Compare commits

..

55 Commits

Author SHA1 Message Date
Simon
99fe4c947c shooshtime fix 2026-03-23 13:46:55 +00:00
Simon
90ce9c684b pornhd3x 2026-03-23 11:32:22 +00:00
Simon
9021521c00 fixes 2026-03-22 17:26:12 +00:00
Simon
fbe04fc752 upgrades 2026-03-22 15:56:25 +00:00
Simon
52f108da8e tiktok group 2026-03-22 12:46:30 +00:00
Simon
50ea0e73b7 pimpbunny fix 2026-03-22 12:27:46 +00:00
Simon
a2d31d90a1 more debug info 2026-03-21 22:29:45 +00:00
Simon
43594a6cfe hottub skill 2026-03-21 21:27:38 +00:00
Simon
7b66e5b28a debugging and single provider compime 2026-03-21 21:18:43 +00:00
Simon
05ea90405b globe for status 2026-03-21 20:21:43 +00:00
Simon
9bba981796 status changes 2026-03-21 19:29:30 +00:00
Simon
cecc1f994b status updated 2026-03-21 19:15:35 +00:00
Simon
75b7241803 hentaihaven fix 2026-03-21 17:53:07 +00:00
Simon
1b32df0c35 pimpbunny fix 2026-03-20 22:08:02 +00:00
Simon
259a07686d noodlemagazine fix 2026-03-20 21:05:18 +00:00
Simon
46cd348148 pimpbunny changes 2026-03-20 21:02:47 +00:00
Simon
dd7c4ec6a1 noodlemagazine thumb proxy 2026-03-20 13:52:06 +00:00
Simon
99e4a77507 no embed in video element 2026-03-19 19:04:49 +00:00
Simon
2b26019a66 vrporn 2026-03-18 22:54:51 +00:00
Simon
f88b789f25 yesporn 2026-03-18 21:48:05 +00:00
Simon
21ef0ebf17 hsex page >1 fix 2026-03-18 12:56:11 +00:00
Simon
ce1afd9873 status upgrade 2026-03-18 12:13:28 +00:00
Simon
ce781e2099 hsex 2026-03-18 11:22:48 +00:00
Simon
a66f44c747 heavyfetish and other changes 2026-03-17 21:04:11 +00:00
Simon
9ca9e820d9 remove embed 2026-03-17 09:58:48 +00:00
Simon
0563a7231a pimpbunny updates 2026-03-17 09:53:34 +00:00
Simon
3c3af70ed6 thumb updates 2026-03-17 09:44:38 +00:00
Simon
7680a93fab pimpbunny thumb 2026-03-17 09:17:28 +00:00
Simon
3a2e77436e swap to curl-cffi 2026-03-17 08:41:48 +00:00
Simon
9172941ac6 fixes 2026-03-17 01:12:52 +00:00
Simon
a977381b3b porndish fix 2026-03-17 00:57:50 +00:00
Simon
0d20fc7a7e docker update 2026-03-17 00:31:10 +00:00
Simon
0c11959d94 porndish 2026-03-17 00:24:29 +00:00
Simon
f8a09b0e97 normalize queries 2026-03-16 19:46:00 +00:00
Simon
9751c25b95 shooshtime 2026-03-16 19:37:05 +00:00
Simon
1f99eec5a3 fix 2 electric boogaloo 2026-03-16 00:16:07 +00:00
Simon
448efeff1e hanime thumbnail fix 2026-03-15 23:47:32 +00:00
Simon
0137313c6e porn4fans fix 2026-03-13 12:53:33 +00:00
Simon
6a62582c09 porn4fans fix 2026-03-13 12:13:04 +00:00
Simon
2e1223e519 fix? 2026-03-10 19:21:42 +00:00
Simon
96926563b8 dynamic base url 2026-03-10 18:45:32 +00:00
Simon
2ad131f38f noodlemagazine proxy implementation 2026-03-10 18:34:06 +00:00
Simon
efb1eb3c91 isLive implemented 2026-03-10 17:54:16 +00:00
Simon
967d1e8143 removed spankbang from archive 2026-03-10 17:48:45 +00:00
Simon
9d7146e705 updated wreq emulation 2026-03-10 16:15:45 +00:00
Simon
8b54eeac81 upgraded wreq 2026-03-10 16:07:48 +00:00
Simon
41373bf937 spankbang fix 2026-03-10 16:07:40 +00:00
Simon
c7866a1702 spankbang 2026-03-10 15:17:28 +00:00
Simon
b875086761 tokyomotion added 2026-03-10 08:46:19 +00:00
Simon
c57ce2e243 porn4fans done 2026-03-10 08:15:53 +00:00
Simon
2ed001801a fixed viralxxxporn 2026-03-10 07:53:18 +00:00
Simon
716b775105 format fixes 2026-03-10 07:53:07 +00:00
Simon
4c1815e0fc fixed warnings 2026-03-08 22:26:35 +00:00
Simon
9fea043888 fixed aspect ratio 2026-03-08 21:32:12 +00:00
Simon
1cb9c325b4 added ascpect_ratio to xfree 2026-03-08 21:31:30 +00:00
74 changed files with 20048 additions and 1547 deletions

3
.gitignore vendored
View File

@@ -3,7 +3,7 @@
# will have compiled files and executables # will have compiled files and executables
debug/ debug/
target/ target/
.testing/ .*/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
@@ -17,4 +17,3 @@ Cargo.lock
*.db *.db
migrations/.keep migrations/.keep
.vscode

View File

@@ -2,6 +2,10 @@
name = "hottub" name = "hottub"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
build = "build.rs"
[features]
debug = []
[dependencies] [dependencies]
cute = "0.3.0" cute = "0.3.0"
@@ -16,8 +20,8 @@ ntex-files = "2.0.0"
serde = "1.0.228" serde = "1.0.228"
serde_json = "1.0.145" serde_json = "1.0.145"
tokio = { version = "1.49", features = ["full"] } tokio = { version = "1.49", features = ["full"] }
wreq = { version = "5.3.0", features = ["full", "cookies", "multipart"] } wreq = { version = "6.0.0-rc.26", features = ["cookies", "multipart", "json"] }
wreq-util = "2" wreq-util = "3.0.0-rc.10"
percent-encoding = "2.3.2" percent-encoding = "2.3.2"
capitalize = "0.3.4" capitalize = "0.3.4"
url = "2.5.7" url = "2.5.7"
@@ -32,6 +36,7 @@ dashmap = "6.1.0"
lru = "0.16.3" lru = "0.16.3"
rand = "0.10.0" rand = "0.10.0"
chrono = "0.4.44" chrono = "0.4.44"
md5 = "0.8.0"
[lints.rust] [lints.rust]
unexpected_cfgs = "allow" unexpected_cfgs = "allow"

View File

@@ -1,4 +1,4 @@
FROM debian FROM ubuntu:24.04
# FROM consol/debian-xfce-vnc:latest # FROM consol/debian-xfce-vnc:latest
# Switch to root user to install additional software # Switch to root user to install additional software
USER 0 USER 0
@@ -28,5 +28,6 @@ RUN apt install -yq libssl-dev \
sudo \ sudo \
&& apt-get clean && apt-get clean
USER 1000 RUN python3 -m pip install --break-system-packages --no-cache-dir curl_cffi
USER 1000

323
SKILL.md Normal file
View File

@@ -0,0 +1,323 @@
---
name: hottub
description: Work on the Hottub Rust server. Use this skill when you need the real build/run commands, compile-time single-provider builds, runtime env vars, API and proxy endpoint trigger examples, or yt-dlp verification steps for returned media URLs.
---
# Hottub
Hottub is a Rust `ntex` server. The main entrypoints are:
- `src/main.rs`: server startup, env loading, root redirect, `/api`, `/proxy`, static files
- `src/api.rs`: `/api/status`, `/api/videos`, `/api/test`, `/api/proxies`
- `src/proxy.rs`: `/proxy/...` redirect and media/image proxy routes
- `src/providers/mod.rs`: provider registry, compile-time provider selection, channel metadata
- `src/util/requester.rs`: outbound HTTP, Burp proxy support, FlareSolverr fallback
## Build and run
Default local run:
```bash
cargo run
```
Run with compiled-in debug logs:
```bash
cargo run --features debug
```
Compile a single-provider binary:
```bash
HOT_TUB_PROVIDER=hsex cargo build
```
Single-provider binary with debug logs:
```bash
HOT_TUB_PROVIDER=hsex cargo run --features debug
```
Notes:
- `HOT_TUB_PROVIDER` is the preferred compile-time selector.
- `HOTTUB_PROVIDER` is also supported as a fallback alias.
- Single-provider builds register only that provider at compile time, so other providers are not constructed and their init paths do not run.
- In a single-provider build, `/api/videos` requests with `"channel": "all"` are remapped to the compiled provider.
- The server binds to `0.0.0.0:18080`.
Useful checks:
```bash
cargo check -q
HOT_TUB_PROVIDER=hsex cargo check -q
HOT_TUB_PROVIDER=hsex cargo check -q --features debug
```
## Environment
Runtime env vars:
- `DATABASE_URL` required. SQLite path, for example `hottub.db`.
- `RUST_LOG` optional. Defaults to `warn` if unset.
- `PROXY` optional. Any value other than `"0"` enables proxy mode in the shared requester.
- `BURP_URL` optional. Outbound HTTP proxy used when `PROXY` is enabled.
- `FLARE_URL` optional but strongly recommended for provider work. Used for FlareSolverr fallback and some providers that call it directly.
- `DOMAIN` optional. Used for the `/` redirect target.
- `DISCORD_WEBHOOK` optional. Enables `/api/test` and provider error reporting to Discord.
Build-time env vars:
- `HOT_TUB_PROVIDER` optional. Compile only one provider into the binary.
- `HOTTUB_PROVIDER` optional fallback alias for the same purpose.
Practical `.env` baseline:
```dotenv
DATABASE_URL=hottub.db
RUST_LOG=info
PROXY=0
BURP_URL=http://127.0.0.1:8081
FLARE_URL=http://127.0.0.1:8191/v1
DOMAIN=127.0.0.1:18080
DISCORD_WEBHOOK=
```
## Endpoint surface
Root:
- `GET /`
- Returns `302 Found`
- Redirects to `hottub://source?url=<DOMAIN-or-request-host>`
Status API:
- `GET /api/status`
- `POST /api/status`
- Returns the server status and channel list
- The `User-Agent` matters because channel visibility can depend on parsed client version
Videos API:
- `POST /api/videos`
- Main provider execution endpoint
- Body is JSON matching `VideosRequest` in `src/videos.rs`
Diagnostics:
- `GET /api/test`
- Sends a Discord test error if `DISCORD_WEBHOOK` is configured
- `GET /api/proxies`
- Returns the current outbound proxy snapshot
Proxy endpoints:
- Redirect proxies:
- `GET|POST /proxy/sxyprn/{endpoint}*`
- `GET|POST /proxy/javtiful/{endpoint}*`
- `GET|POST /proxy/spankbang/{endpoint}*`
- `GET|POST /proxy/porndish/{endpoint}*`
- `GET|POST /proxy/pimpbunny/{endpoint}*`
- Media/image proxies:
- `GET|POST /proxy/noodlemagazine/{endpoint}*`
- `GET|POST /proxy/noodlemagazine-thumb/{endpoint}*`
- `GET|POST /proxy/hanime-cdn/{endpoint}*`
- `GET|POST /proxy/hqporner-thumb/{endpoint}*`
- `GET|POST /proxy/porndish-thumb/{endpoint}*`
- `GET|POST /proxy/pimpbunny-thumb/{endpoint}*`
Everything else under `/` is served from `static/`.
## How to trigger endpoints
Verify the root redirect:
```bash
curl -i http://127.0.0.1:18080/
```
Fetch status with a Hot Tub-like user agent:
```bash
curl -s \
-H 'User-Agent: Hot%20Tub/22c CFNetwork/1494.0.7 Darwin/23.4.0' \
http://127.0.0.1:18080/api/status | jq
```
Equivalent `POST /api/status`:
```bash
curl -s -X POST http://127.0.0.1:18080/api/status | jq
```
Minimal videos request:
```bash
curl -s http://127.0.0.1:18080/api/videos \
-H 'Content-Type: application/json' \
-H 'User-Agent: Hot%20Tub/22c CFNetwork/1494.0.7 Darwin/23.4.0' \
-d '{"channel":"hsex","sort":"date","page":1,"perPage":10}' | jq
```
Use `"all"` against a normal multi-provider build:
```bash
curl -s http://127.0.0.1:18080/api/videos \
-H 'Content-Type: application/json' \
-d '{"channel":"all","sort":"date","page":1,"perPage":10}' | jq
```
Use `"all"` against a single-provider build:
```bash
HOT_TUB_PROVIDER=hsex cargo run --features debug
curl -s http://127.0.0.1:18080/api/videos \
-H 'Content-Type: application/json' \
-d '{"channel":"all","sort":"date","page":1,"perPage":10}' | jq
```
Literal query behavior:
- Quoted queries are treated as literal substring filters after provider fetch.
- Leading `#` is stripped before matching.
Example:
```bash
curl -s http://127.0.0.1:18080/api/videos \
-H 'Content-Type: application/json' \
-d '{"channel":"hsex","query":"\"teacher\"","page":1,"perPage":10}' | jq
```
Trigger the Discord test route:
```bash
curl -i http://127.0.0.1:18080/api/test
```
Inspect proxy state:
```bash
curl -s http://127.0.0.1:18080/api/proxies | jq
```
Trigger a redirect proxy and inspect the `Location` header:
```bash
curl -I 'http://127.0.0.1:18080/proxy/spankbang/some/provider/path'
```
Trigger a media proxy directly:
```bash
curl -I 'http://127.0.0.1:18080/proxy/noodlemagazine/some/media/path'
```
## Videos request fields
Commonly useful request keys:
- `channel`
- `sort`
- `query`
- `page`
- `perPage`
- `featured`
- `category`
- `sites`
- `all_provider_sites`
- `filter`
- `language`
- `networks`
- `stars`
- `categories`
- `duration`
- `sexuality`
Most provider debugging only needs:
```json
{
"channel": "hsex",
"sort": "date",
"query": null,
"page": 1,
"perPage": 10
}
```
## Recommended provider-debug workflow
1. Build only the provider you care about.
2. Run with `--features debug`.
3. Hit `/api/status` to confirm only the expected channel is present.
4. Hit `/api/videos` with either the provider id or `"all"`.
5. Inspect `.items[0].url`, `.items[0].formats`, `.items[0].thumb`, and any local `/proxy/...` URLs.
6. Verify the media URL with `yt-dlp`.
Example:
```bash
HOT_TUB_PROVIDER=hsex cargo run --features debug
curl -s http://127.0.0.1:18080/api/status | jq '.channels[].id'
curl -s http://127.0.0.1:18080/api/videos \
-H 'Content-Type: application/json' \
-d '{"channel":"all","page":1,"perPage":1}' | tee /tmp/hottub-video.json | jq
```
## yt-dlp verification
Use `yt-dlp` to prove that a returned video URL or format is actually consumable.
Check the primary item URL:
```bash
URL="$(jq -r '.items[0].url' /tmp/hottub-video.json)"
yt-dlp -v --simulate "$URL"
```
Prefer the first explicit format when present:
```bash
FORMAT_URL="$(jq -r '.items[0].formats[0].url' /tmp/hottub-video.json)"
yt-dlp -v -F "$FORMAT_URL"
yt-dlp -v --simulate "$FORMAT_URL"
```
If the format contains required HTTP headers, pass them through:
```bash
yt-dlp -v --simulate \
--add-header 'Referer: https://example.com/' \
--add-header 'User-Agent: Mozilla/5.0 ...' \
"$FORMAT_URL"
```
If you want to build the command from JSON:
```bash
FORMAT_URL="$(jq -r '.items[0].formats[0].url' /tmp/hottub-video.json)"
mapfile -t HDRS < <(
jq -r '.items[0].formats[0].http_headers // {} | to_entries[] | "--add-header=\(.key): \(.value)"' \
/tmp/hottub-video.json
)
yt-dlp -v --simulate "${HDRS[@]}" "$FORMAT_URL"
```
For local proxy URLs returned by Hottub, verify the server endpoint directly:
```bash
LOCAL_URL="$(jq -r '.items[0].formats[0].url // .items[0].url' /tmp/hottub-video.json)"
yt-dlp -v --simulate "$LOCAL_URL"
```
## Interaction rules
- Prefer compile-time single-provider builds for provider work.
- Prefer `/api/status` before `/api/videos` so you know what channels the current binary exposes.
- When reproducing client-specific issues, send a realistic `User-Agent`.
- When debugging fetch failures, enable `debug` and set `FLARE_URL`.
- When debugging outbound request behavior, set `PROXY=1` and `BURP_URL=...`.
- Use `/api/test` only when you intentionally want a Discord notification.

View File

@@ -1,380 +0,0 @@
use std::vec;
use std::env;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{decode, ICodedDataTrait};
use crate::db;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::videos::ServerOptions;
use crate::videos::{VideoItem};
use crate::DbPool;
use std::collections::HashMap;
use wreq::Client;
use wreq_util::Emulation;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct SpankbangProvider {
url: String,
}
impl SpankbangProvider {
pub fn new() -> Self {
SpankbangProvider {
url: "https://spankbang.com/".to_string()
}
}
async fn get(&self, cache:VideoCache, pool: DbPool, page: u8, sort: String) -> Result<Vec<VideoItem>> {
let url = format!("{}{}/{}/", self.url, sort, page);
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
// println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = Client::builder()
.emulation(Emulation::Firefox136)
.cert_verification(false)
.build()?;
let response = client.get(url.clone()).send().await?;
let mut cookies_string = String::new();
if let Some(_) = response.headers().get_all("set-cookie").iter().next() {
for _ in response.headers().get_all("set-cookie").iter() {
let mut cookies_map = HashMap::new();
for value in response.headers().get_all("set-cookie").iter() {
if let Ok(cookie_str) = value.to_str() {
if let Some((k, v)) = cookie_str.split_once('=') {
let key = k.trim();
let val = v.split(';').next().unwrap_or("").trim();
cookies_map.insert(key.to_string(), val.to_string());
}
}
}
cookies_string = cookies_map
.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join("; ");
}
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await;
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response, &client,String::new(), pool.clone()).await
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
async fn query(&self, cache: VideoCache, pool: DbPool, page: u8, query: &str) -> Result<Vec<VideoItem>> {
let url = format!("{}s/{}/{}/", self.url, query.replace(" ", "+"), page);
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
// println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = Client::builder()
.emulation(Emulation::Firefox136)
.cert_verification(false)
.build()?;
let response = client.get(url.clone()).send().await?;
let mut cookies_string = String::new();
if let Some(_) = response.headers().get_all("set-cookie").iter().next() {
for _ in response.headers().get_all("set-cookie").iter() {
let mut cookies_map = HashMap::new();
for value in response.headers().get_all("set-cookie").iter() {
if let Ok(cookie_str) = value.to_str() {
if let Some((k, v)) = cookie_str.split_once('=') {
let key = k.trim();
let val = v.split(';').next().unwrap_or("").trim();
cookies_map.insert(key.to_string(), val.to_string());
}
}
}
cookies_string = cookies_map
.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join("; ");
}
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await;
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response, &client, String::new(), pool.clone()).await
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
async fn get_video_url(&self, url:String, client:&Client, cookies: String, pool: DbPool) -> Result<String> {
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,url.clone());
drop(conn);
match db_result {
Ok(Some(video_url)) => {
return Ok(video_url);
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
let response = client.get(url.clone()).header("Cookie", cookies.clone()).send().await?;
let mut response = response;
while response.status().as_u16() == 429 {
// println!("Received 429 Too Many Requests. Waiting 10 seconds before retrying...");
ntex::time::sleep(ntex::time::Seconds(60)).await;
response = client.get(url.clone()).header("Cookie", cookies.clone()).send().await?;
}
if response.status().is_success() {
let text = response.text().await?;
let lines = text.split("\n").collect::<Vec<&str>>();
let url_line = lines.iter()
.find(|s| s.trim_start().starts_with("<source src=") && s.contains("type=\"video/mp4\""))
.unwrap_or(&"");
let new_url = url_line.split("src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &url, &new_url);
drop(conn);
return Ok(new_url)
}
Err(Error::from("Failed to get video URL"))
}
async fn parse_video_item(
&self,
mut html: String,
client: &Client,
cookies: String,
pool: DbPool
) -> Result<VideoItem> {
if html.contains("<!-- Video list block -->") {
html = html.split("<!-- Video list block -->").collect::<Vec<&str>>()[0].to_string();
}
let vid = html.split("\n").collect::<Vec<&str>>();
if vid.len() > 200 {
return Err("Video item has too many lines".into());
}
// for (index ,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let title_line = vid.iter()
.find(|s| s.trim_start().starts_with("<a href=") && s.contains("title="))
.unwrap_or(&"");
let mut title = title_line.split("title=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let thumb_line = vid.iter()
.find(|s| s.trim_start().starts_with("data-src=") && s.contains(".jpg\""))
.unwrap_or(&"");
let thumb = thumb_line.split("data-src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// let preview_line = vid.iter()
// .find(|s: &&&str| s.trim_start().starts_with("<source data-src=") && s.contains("mp4"))
// .unwrap_or(&"");
// let mut preview = "".to_string();
// if vid[15].contains("data-preview=\""){
// preview = vid[15].split("data-preview=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// }
// else{
// preview = preview_line.split("data-src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// }
let duration_str = vid[64].split("m").collect::<Vec<&str>>()[0];
let duration: u32 = duration_str.parse::<u32>().unwrap_or(0) * 60;
// let view_and_rating_str: Vec<&str> = vid.iter().copied().filter(|s| s.contains("<span class=\"md:text-body-md\">")).collect();
// let views_str = view_and_rating_str[0].split(">").collect::<Vec<&str>>()[1].split("K<").collect::<Vec<&str>>()[0];
// let views = (views_str.parse::<f32>().unwrap_or(0.0) * 1000.0) as u32;
// let rate_str = view_and_rating_str[1].split(">").collect::<Vec<&str>>()[1].split("%<").collect::<Vec<&str>>()[0];
// let rating = rate_str.parse::<f32>().unwrap_or(0.0);
let url_part = vid.iter().find(|s| s.contains("<a href=\"/")).unwrap().split("<a href=\"/").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0];
let url = match self.get_video_url(self.url.clone() + url_part, client, cookies, pool).await {
Ok(video_url) => video_url,
Err(e) => {
print!("Error fetching video URL: {}", e);
return Err("Failed to get video URL".into());
}
};
let id = url_part.split("/").collect::<Vec<&str>>()[0].to_string();
// let quality_str = match vid[25].contains("<"){
// true => vid[25].split(">").collect::<Vec<&str>>()[1].split("<").collect::<Vec<&str>>()[0],
// false => "SD",
// };
// let quality = match quality_str{
// "HD" => "1080",
// "4k" => "2160",
// "SD" => "720",
// _ => "1080",
// };
let video_item = VideoItem::new(id, title, url.clone().to_string(), "spankbang".to_string(), thumb, duration)
// .views(views)
// .rating(rating)
// .formats(vec![format])
// .preview(preview)
;
Ok(video_item)
}
async fn get_video_items_from_html(&self, html: String, client: &Client, cookies:String, pool: DbPool) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let items: Vec<VideoItem> = Vec::new();
let split_html = html.split("\"video-list").collect::<Vec<&str>>();
if split_html.len() < 2 {
println!("Could not find video-list in HTML");
return items;
}
let video_listing_content = format!("{}{}", split_html[1], split_html.get(2).unwrap_or(&""));
let raw_videos_vec = video_listing_content
.split("data-testid=\"video-item\"")
.collect::<Vec<&str>>();
if raw_videos_vec.len() < 2 {
println!("Could not find video-item in HTML");
return items;
}
let raw_videos = raw_videos_vec[1..].to_vec();
let futures = raw_videos.into_iter().map(|el| self.parse_video_item(el.to_string(), client, cookies.clone(), pool.clone()));
let results: Vec<Result<VideoItem>> = join_all(futures).await;
let video_items: Vec<VideoItem> = results
.into_iter()
.filter_map(Result::ok)
.collect();
return video_items;
}
}
impl Provider for SpankbangProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
mut sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = options;
let _ = per_page;
let _ = pool;
if sort == "date"{
sort = "trending_videos".to_string();
}
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q).await,
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), sort).await,
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

350
build.rs Normal file
View File

@@ -0,0 +1,350 @@
use std::env;
use std::fs;
use std::path::PathBuf;
struct ProviderDef {
id: &'static str,
module: &'static str,
ty: &'static str,
}
const PROVIDERS: &[ProviderDef] = &[
ProviderDef {
id: "all",
module: "all",
ty: "AllProvider",
},
ProviderDef {
id: "perverzija",
module: "perverzija",
ty: "PerverzijaProvider",
},
ProviderDef {
id: "hanime",
module: "hanime",
ty: "HanimeProvider",
},
ProviderDef {
id: "pornhub",
module: "pornhub",
ty: "PornhubProvider",
},
ProviderDef {
id: "pornhd3x",
module: "pornhd3x",
ty: "Pornhd3xProvider",
},
ProviderDef {
id: "spankbang",
module: "spankbang",
ty: "SpankbangProvider",
},
ProviderDef {
id: "rule34video",
module: "rule34video",
ty: "Rule34videoProvider",
},
ProviderDef {
id: "redtube",
module: "redtube",
ty: "RedtubeProvider",
},
ProviderDef {
id: "okporn",
module: "okporn",
ty: "OkpornProvider",
},
ProviderDef {
id: "pornhat",
module: "pornhat",
ty: "PornhatProvider",
},
ProviderDef {
id: "perfectgirls",
module: "perfectgirls",
ty: "PerfectgirlsProvider",
},
ProviderDef {
id: "okxxx",
module: "okxxx",
ty: "OkxxxProvider",
},
ProviderDef {
id: "homoxxx",
module: "homoxxx",
ty: "HomoxxxProvider",
},
ProviderDef {
id: "missav",
module: "missav",
ty: "MissavProvider",
},
ProviderDef {
id: "xxthots",
module: "xxthots",
ty: "XxthotsProvider",
},
ProviderDef {
id: "yesporn",
module: "yesporn",
ty: "YespornProvider",
},
ProviderDef {
id: "sxyprn",
module: "sxyprn",
ty: "SxyprnProvider",
},
ProviderDef {
id: "porn00",
module: "porn00",
ty: "Porn00Provider",
},
ProviderDef {
id: "youjizz",
module: "youjizz",
ty: "YoujizzProvider",
},
ProviderDef {
id: "paradisehill",
module: "paradisehill",
ty: "ParadisehillProvider",
},
ProviderDef {
id: "porn4fans",
module: "porn4fans",
ty: "Porn4fansProvider",
},
ProviderDef {
id: "pornmz",
module: "pornmz",
ty: "PornmzProvider",
},
ProviderDef {
id: "porndish",
module: "porndish",
ty: "PorndishProvider",
},
ProviderDef {
id: "shooshtime",
module: "shooshtime",
ty: "ShooshtimeProvider",
},
ProviderDef {
id: "pornzog",
module: "pornzog",
ty: "PornzogProvider",
},
ProviderDef {
id: "omgxxx",
module: "omgxxx",
ty: "OmgxxxProvider",
},
ProviderDef {
id: "beeg",
module: "beeg",
ty: "BeegProvider",
},
ProviderDef {
id: "tnaflix",
module: "tnaflix",
ty: "TnaflixProvider",
},
ProviderDef {
id: "tokyomotion",
module: "tokyomotion",
ty: "TokyomotionProvider",
},
ProviderDef {
id: "viralxxxporn",
module: "viralxxxporn",
ty: "ViralxxxpornProvider",
},
ProviderDef {
id: "vrporn",
module: "vrporn",
ty: "VrpornProvider",
},
ProviderDef {
id: "rule34gen",
module: "rule34gen",
ty: "Rule34genProvider",
},
ProviderDef {
id: "xxdbx",
module: "xxdbx",
ty: "XxdbxProvider",
},
ProviderDef {
id: "xfree",
module: "xfree",
ty: "XfreeProvider",
},
ProviderDef {
id: "hqporner",
module: "hqporner",
ty: "HqpornerProvider",
},
ProviderDef {
id: "pmvhaven",
module: "pmvhaven",
ty: "PmvhavenProvider",
},
ProviderDef {
id: "noodlemagazine",
module: "noodlemagazine",
ty: "NoodlemagazineProvider",
},
ProviderDef {
id: "pimpbunny",
module: "pimpbunny",
ty: "PimpbunnyProvider",
},
ProviderDef {
id: "javtiful",
module: "javtiful",
ty: "JavtifulProvider",
},
ProviderDef {
id: "hypnotube",
module: "hypnotube",
ty: "HypnotubeProvider",
},
ProviderDef {
id: "freepornvideosxxx",
module: "freepornvideosxxx",
ty: "FreepornvideosxxxProvider",
},
ProviderDef {
id: "heavyfetish",
module: "heavyfetish",
ty: "HeavyfetishProvider",
},
ProviderDef {
id: "hsex",
module: "hsex",
ty: "HsexProvider",
},
ProviderDef {
id: "sextb",
module: "sextb",
ty: "SextbProvider",
},
ProviderDef {
id: "hentaihaven",
module: "hentaihaven",
ty: "HentaihavenProvider",
},
ProviderDef {
id: "chaturbate",
module: "chaturbate",
ty: "ChaturbateProvider",
},
];
fn main() {
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-env-changed=HOT_TUB_PROVIDER");
println!("cargo:rerun-if-env-changed=HOTTUB_PROVIDER");
println!("cargo:rustc-check-cfg=cfg(hottub_single_provider)");
let provider_cfg_values = PROVIDERS
.iter()
.map(|provider| format!("\"{}\"", provider.id))
.collect::<Vec<_>>()
.join(", ");
println!("cargo:rustc-check-cfg=cfg(hottub_provider, values({provider_cfg_values}))");
let selected = env::var("HOT_TUB_PROVIDER")
.or_else(|_| env::var("HOTTUB_PROVIDER"))
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty());
let providers = match selected.as_deref() {
Some(selected_id) => {
let provider = PROVIDERS
.iter()
.find(|provider| provider.id == selected_id)
.unwrap_or_else(|| {
panic!("Unknown provider `{selected_id}` from HOT_TUB_PROVIDER/HOTTUB_PROVIDER")
});
println!("cargo:rustc-cfg=hottub_single_provider");
println!("cargo:rustc-cfg=hottub_provider=\"{selected_id}\"");
vec![provider]
}
None => PROVIDERS.iter().collect(),
};
let out_dir = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR"));
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR"));
let modules = providers
.iter()
.map(|provider| {
let module_path = manifest_dir
.join("src/providers")
.join(format!("{}.rs", provider.module));
format!(
"#[path = r#\"{}\"#]\npub mod {};",
module_path.display(),
provider.module
)
})
.collect::<Vec<_>>()
.join("\n");
fs::write(out_dir.join("provider_modules.rs"), format!("{modules}\n"))
.expect("write provider_modules.rs");
let registry = providers
.iter()
.map(|provider| {
format!(
"m.insert(\"{id}\", Arc::new({module}::{ty}::new()) as DynProvider);",
id = provider.id,
module = provider.module,
ty = provider.ty
)
})
.collect::<Vec<_>>()
.join("\n");
fs::write(
out_dir.join("provider_registry.rs"),
format!("{{\n{registry}\n}}\n"),
)
.expect("write provider_registry.rs");
let metadata_arms = providers
.iter()
.map(|provider| {
if provider.id == "all" {
format!(
"\"all\" | \"hottub\" => Some({module}::CHANNEL_METADATA),",
module = provider.module
)
} else {
format!(
"\"{id}\" => Some({module}::CHANNEL_METADATA),",
id = provider.id,
module = provider.module
)
}
})
.collect::<Vec<_>>()
.join("\n");
fs::write(
out_dir.join("provider_metadata_fn.rs"),
format!("match id {{\n{metadata_arms}\n_ => None,\n}}\n"),
)
.expect("write provider_metadata_fn.rs");
let selection = match selected.as_deref() {
Some(selected_id) => format!(
"pub const COMPILE_TIME_SELECTED_PROVIDER: Option<&str> = Some(\"{selected_id}\");"
),
None => "pub const COMPILE_TIME_SELECTED_PROVIDER: Option<&str> = None;".to_string(),
};
fs::write(
out_dir.join("provider_selection.rs"),
format!("{selection}\n"),
)
.expect("write provider_selection.rs");
}

View File

@@ -1,6 +1,6 @@
use crate::providers::{ use crate::providers::{
ALL_PROVIDERS, DynProvider, panic_payload_to_string, report_provider_error, ALL_PROVIDERS, DynProvider, build_status_response, panic_payload_to_string,
run_provider_guarded, report_provider_error, resolve_provider_for_build, run_provider_guarded,
}; };
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::send_discord_error_report; use crate::util::discord::send_discord_error_report;
@@ -83,6 +83,53 @@ impl Ord for ClientVersion {
} }
} }
fn normalize_query(raw_query: Option<&str>) -> (Option<String>, Option<String>) {
let Some(raw_query) = raw_query else {
return (None, None);
};
let mut query = raw_query.trim();
if query.is_empty() {
return (None, None);
}
while let Some(stripped) = query.strip_prefix('#') {
query = stripped.trim_start();
}
if query.is_empty() {
return (None, None);
}
let literal_query = if query.len() >= 2
&& ((query.starts_with('"') && query.ends_with('"'))
|| (query.starts_with('\'') && query.ends_with('\'')))
{
let inner = query[1..query.len() - 1].trim();
if inner.is_empty() {
None
} else {
query = inner;
Some(inner.to_ascii_lowercase())
}
} else {
None
};
(Some(query.to_string()), literal_query)
}
fn video_matches_literal_query(video: &VideoItem, literal_query: &str) -> bool {
let contains_literal = |value: &str| value.to_ascii_lowercase().contains(literal_query);
contains_literal(&video.title)
|| video.uploader.as_deref().is_some_and(contains_literal)
|| video
.tags
.as_ref()
.is_some_and(|tags| tags.iter().any(|tag| contains_literal(tag)))
}
pub fn config(cfg: &mut web::ServiceConfig) { pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
web::resource("/status") web::resource("/status")
@@ -99,6 +146,8 @@ pub fn config(cfg: &mut web::ServiceConfig) {
} }
async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> { async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
#[cfg(feature = "debug")]
let trace_id = crate::util::flow_debug::next_trace_id("status");
let clientversion: ClientVersion = match req.headers().get("User-Agent") { let clientversion: ClientVersion = match req.headers().get("User-Agent") {
Some(v) => match v.to_str() { Some(v) => match v.to_str() {
Ok(useragent) => ClientVersion::parse(useragent) Ok(useragent) => ClientVersion::parse(useragent)
@@ -112,6 +161,12 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
"Received status request with client version: {:?}", "Received status request with client version: {:?}",
clientversion clientversion
); );
crate::flow_debug!(
"trace={} status request host={} client={:?}",
trace_id,
req.connection_info().host(),
&clientversion
);
let host = req let host = req
.headers() .headers()
@@ -119,32 +174,69 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
.and_then(|h| h.to_str().ok()) .and_then(|h| h.to_str().ok())
.unwrap_or_default() .unwrap_or_default()
.to_string(); .to_string();
let public_url_base = format!("{}://{}", req.connection_info().scheme(), host);
let mut status = Status::new(); let mut status = Status::new();
#[cfg(feature = "debug")]
let mut channel_count = 0usize;
for (provider_name, provider) in ALL_PROVIDERS.iter() { for (provider_name, provider) in ALL_PROVIDERS.iter() {
crate::flow_debug!(
"trace={} status inspecting provider={}",
trace_id,
provider_name
);
let channel_result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { let channel_result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
provider.get_channel(clientversion.clone()) provider.get_channel(clientversion.clone())
})); }));
match channel_result { match channel_result {
Ok(Some(channel)) => status.add_channel(channel), Ok(Some(mut channel)) => {
if channel.favicon.starts_with('/') {
channel.favicon = format!("{}{}", public_url_base, channel.favicon);
}
#[cfg(feature = "debug")]
{
channel_count += 1;
}
crate::flow_debug!(
"trace={} status added channel id={} provider={}",
trace_id,
channel.id.as_str(),
provider_name
);
status.add_channel(channel)
}
Ok(None) => {} Ok(None) => {}
Err(payload) => { Err(payload) => {
let panic_msg = panic_payload_to_string(payload); let panic_msg = panic_payload_to_string(payload);
crate::flow_debug!(
"trace={} status provider panic provider={} panic={}",
trace_id,
provider_name,
&panic_msg
);
report_provider_error(provider_name, "status.get_channel", &panic_msg).await; report_provider_error(provider_name, "status.get_channel", &panic_msg).await;
} }
} }
} }
status.iconUrl = format!("http://{}/favicon.ico", host).to_string(); status.iconUrl = format!("{}/favicon.ico", public_url_base).to_string();
Ok(web::HttpResponse::Ok().json(&status)) let response = build_status_response(status);
crate::flow_debug!(
"trace={} status response channels={} groups={}",
trace_id,
channel_count,
response.channelGroups.len()
);
Ok(web::HttpResponse::Ok().json(&response))
} }
async fn videos_post( async fn videos_post(
mut video_request: web::types::Json<VideosRequest>, video_request: web::types::Json<VideosRequest>,
cache: web::types::State<VideoCache>, cache: web::types::State<VideoCache>,
pool: web::types::State<DbPool>, pool: web::types::State<DbPool>,
requester: web::types::State<Requester>, requester: web::types::State<Requester>,
req: HttpRequest, req: HttpRequest,
) -> Result<impl web::Responder, web::Error> { ) -> Result<impl web::Responder, web::Error> {
let trace_id = crate::util::flow_debug::next_trace_id("videos");
let clientversion: ClientVersion = match req.headers().get("User-Agent") { let clientversion: ClientVersion = match req.headers().get("User-Agent") {
Some(v) => match v.to_str() { Some(v) => match v.to_str() {
Ok(useragent) => ClientVersion::parse(useragent) Ok(useragent) => ClientVersion::parse(useragent)
@@ -153,12 +245,6 @@ async fn videos_post(
}, },
_ => ClientVersion::new(999, 0, "Hot%20Tub".to_string()), _ => ClientVersion::new(999, 0, "Hot%20Tub".to_string()),
}; };
match video_request.query.as_deref() {
Some(query) if query.starts_with("#") => {
video_request.query = Some(query.trim_start_matches("#").to_string());
}
_ => {}
}
let requester = requester.get_ref().clone(); let requester = requester.get_ref().clone();
// Ensure "videos" table exists with two string columns. // Ensure "videos" table exists with two string columns.
match pool.get() { match pool.get() {
@@ -188,16 +274,14 @@ async fn videos_post(
}, },
items: vec![], items: vec![],
}; };
let channel: String = video_request let requested_channel: String = video_request
.channel .channel
.as_deref() .as_deref()
.unwrap_or("all") .unwrap_or("all")
.to_string(); .to_string();
let channel = resolve_provider_for_build(requested_channel.as_str()).to_string();
let sort: String = video_request.sort.as_deref().unwrap_or("date").to_string(); let sort: String = video_request.sort.as_deref().unwrap_or("date").to_string();
let mut query: Option<String> = video_request.query.clone(); let (query, literal_query) = normalize_query(video_request.query.as_deref());
if video_request.query.as_deref() == Some("") {
query = None;
}
let page: u8 = video_request let page: u8 = video_request
.page .page
.as_ref() .as_ref()
@@ -245,12 +329,34 @@ async fn videos_post(
.to_string(); .to_string();
let duration = video_request.duration.as_deref().unwrap_or("").to_string(); let duration = video_request.duration.as_deref().unwrap_or("").to_string();
let sexuality = video_request.sexuality.as_deref().unwrap_or("").to_string(); let sexuality = video_request.sexuality.as_deref().unwrap_or("").to_string();
let public_url_base = format!(
"{}://{}",
req.connection_info().scheme(),
req.connection_info().host()
);
crate::flow_debug!(
"trace={} videos request requested_channel={} resolved_channel={} sort={} query={:?} page={} per_page={} filter={} category={} sites={} client={:?}",
trace_id,
&requested_channel,
&channel,
&sort,
&query,
page,
perPage,
&filter,
&category,
&sites,
&clientversion
);
let mut requester = requester;
requester.set_debug_trace_id(Some(trace_id.clone()));
let options = ServerOptions { let options = ServerOptions {
featured: Some(featured), featured: Some(featured),
category: Some(category), category: Some(category),
sites: Some(sites), sites: Some(sites),
filter: Some(filter), filter: Some(filter),
language: Some(language), language: Some(language),
public_url_base: Some(public_url_base),
requester: Some(requester), requester: Some(requester),
network: Some(network), network: Some(network),
stars: Some(stars), stars: Some(stars),
@@ -259,6 +365,12 @@ async fn videos_post(
sort: Some(sort.clone()), sort: Some(sort.clone()),
sexuality: Some(sexuality), sexuality: Some(sexuality),
}; };
crate::flow_debug!(
"trace={} videos provider dispatch provider={} literal_query={:?}",
trace_id,
&channel,
&literal_query
);
let mut video_items = run_provider_guarded( let mut video_items = run_provider_guarded(
&channel, &channel,
"videos_post.get_videos", "videos_post.get_videos",
@@ -273,6 +385,11 @@ async fn videos_post(
), ),
) )
.await; .await;
crate::flow_debug!(
"trace={} videos provider returned count={}",
trace_id,
video_items.len()
);
// There is a bug in Hottub38 that makes the client error for a 403-url even though formats work fine // There is a bug in Hottub38 that makes the client error for a 403-url even though formats work fine
if clientversion == ClientVersion::new(38, 0, "Hot%20Tub".to_string()) { if clientversion == ClientVersion::new(38, 0, "Hot%20Tub".to_string()) {
@@ -294,6 +411,18 @@ async fn videos_post(
.collect(); .collect();
} }
if let Some(literal_query) = literal_query.as_deref() {
#[cfg(feature = "debug")]
let before = video_items.len();
video_items.retain(|video| video_matches_literal_query(video, literal_query));
crate::flow_debug!(
"trace={} videos literal filter kept={} removed={}",
trace_id,
video_items.len(),
before.saturating_sub(video_items.len())
);
}
videos.items = video_items.clone(); videos.items = video_items.clone();
if video_items.len() == 0 { if video_items.len() == 0 {
videos.pageInfo = PageInfo { videos.pageInfo = PageInfo {
@@ -311,7 +440,15 @@ async fn videos_post(
let per_page_clone = perPage.to_string(); let per_page_clone = perPage.to_string();
let options_clone = options.clone(); let options_clone = options.clone();
let channel_clone = channel.clone(); let channel_clone = channel.clone();
#[cfg(feature = "debug")]
let prefetch_trace_id = trace_id.clone();
task::spawn_local(async move { task::spawn_local(async move {
crate::flow_debug!(
"trace={} videos prefetch spawn next_page={} provider={}",
prefetch_trace_id,
next_page,
&channel_clone
);
// if let AnyProvider::Spankbang(_) = provider_clone { // if let AnyProvider::Spankbang(_) = provider_clone {
// // Spankbang has a delay for the next page // // Spankbang has a delay for the next page
// ntex::time::sleep(ntex::time::Seconds(80)).await; // ntex::time::sleep(ntex::time::Seconds(80)).await;
@@ -345,11 +482,23 @@ async fn videos_post(
} }
} }
crate::flow_debug!(
"trace={} videos response items={} has_next={}",
trace_id,
videos.items.len(),
videos.pageInfo.hasNextPage
);
Ok(web::HttpResponse::Ok().json(&videos)) Ok(web::HttpResponse::Ok().json(&videos))
} }
pub fn get_provider(channel: &str) -> Option<DynProvider> { pub fn get_provider(channel: &str) -> Option<DynProvider> {
ALL_PROVIDERS.get(channel).cloned() let provider = ALL_PROVIDERS.get(channel).cloned();
crate::flow_debug!(
"provider lookup channel={} found={}",
channel,
provider.is_some()
);
provider
} }
pub async fn test() -> Result<impl web::Responder, web::Error> { pub async fn test() -> Result<impl web::Responder, web::Error> {
@@ -370,6 +519,7 @@ pub async fn test() -> Result<impl web::Responder, web::Error> {
pub async fn proxies() -> Result<impl web::Responder, web::Error> { pub async fn proxies() -> Result<impl web::Responder, web::Error> {
let proxies = all_proxies_snapshot().await.unwrap_or_default(); let proxies = all_proxies_snapshot().await.unwrap_or_default();
crate::flow_debug!("proxies endpoint snapshot_count={}", proxies.len());
let mut by_protocol: std::collections::BTreeMap<String, Vec<Proxy>> = let mut by_protocol: std::collections::BTreeMap<String, Vec<Proxy>> =
std::collections::BTreeMap::new(); std::collections::BTreeMap::new();
for proxy in proxies { for proxy in proxies {

View File

@@ -1,10 +1,17 @@
use crate::models::DBVideo;
use diesel::prelude::*; use diesel::prelude::*;
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "hentaihaven",
hottub_provider = "missav",
hottub_provider = "perverzija",
))]
pub fn get_video( pub fn get_video(
conn: &mut SqliteConnection, conn: &mut SqliteConnection,
video_id: String, video_id: String,
) -> Result<Option<String>, diesel::result::Error> { ) -> Result<Option<String>, diesel::result::Error> {
use crate::models::DBVideo;
use crate::schema::videos::dsl::*; use crate::schema::videos::dsl::*;
let result = videos let result = videos
.filter(id.eq(video_id)) .filter(id.eq(video_id))
@@ -16,11 +23,19 @@ pub fn get_video(
} }
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "hentaihaven",
hottub_provider = "missav",
hottub_provider = "perverzija",
))]
pub fn insert_video( pub fn insert_video(
conn: &mut SqliteConnection, conn: &mut SqliteConnection,
new_id: &str, new_id: &str,
new_url: &str, new_url: &str,
) -> Result<usize, diesel::result::Error> { ) -> Result<usize, diesel::result::Error> {
use crate::models::DBVideo;
use crate::schema::videos::dsl::*; use crate::schema::videos::dsl::*;
diesel::insert_into(videos) diesel::insert_into(videos)
.values(DBVideo { .values(DBVideo {
@@ -30,6 +45,13 @@ pub fn insert_video(
.execute(conn) .execute(conn)
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "hentaihaven",
hottub_provider = "missav",
hottub_provider = "perverzija",
))]
pub fn delete_video( pub fn delete_video(
conn: &mut SqliteConnection, conn: &mut SqliteConnection,
video_id: String, video_id: String,

View File

@@ -39,6 +39,11 @@ async fn main() -> std::io::Result<()> {
} }
} }
env_logger::init(); // You need this to actually see logs env_logger::init(); // You need this to actually see logs
crate::flow_debug!(
"startup begin rust_log={} debug_compiled={}",
std::env::var("RUST_LOG").unwrap_or_else(|_| "unset".to_string()),
cfg!(feature = "debug")
);
// set up database connection pool // set up database connection pool
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL"); let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
@@ -46,15 +51,23 @@ async fn main() -> std::io::Result<()> {
let pool = r2d2::Pool::builder() let pool = r2d2::Pool::builder()
.build(manager) .build(manager)
.expect("Failed to create pool."); .expect("Failed to create pool.");
crate::flow_debug!(
"database pool ready database_url={}",
crate::util::flow_debug::preview(&connspec, 96)
);
let mut requester = util::requester::Requester::new(); let mut requester = util::requester::Requester::new();
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string()); let proxy_enabled = env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string();
requester.set_proxy(proxy_enabled);
crate::flow_debug!("requester initialized proxy_enabled={}", proxy_enabled);
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new() let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new()
.max_size(100_000) .max_size(100_000)
.to_owned(); .to_owned();
crate::flow_debug!("video cache initialized max_size=100000");
thread::spawn(move || { thread::spawn(move || {
crate::flow_debug!("provider init thread spawned");
// Create a tiny runtime just for these async tasks // Create a tiny runtime just for these async tasks
let rt = tokio::runtime::Builder::new_current_thread() let rt = tokio::runtime::Builder::new_current_thread()
.enable_all() .enable_all()
@@ -62,10 +75,13 @@ async fn main() -> std::io::Result<()> {
.expect("build tokio runtime"); .expect("build tokio runtime");
rt.block_on(async move { rt.block_on(async move {
crate::flow_debug!("provider init begin");
providers::init_providers_now(); providers::init_providers_now();
crate::flow_debug!("provider init complete");
}); });
}); });
crate::flow_debug!("http server binding addr=0.0.0.0:18080 workers=8");
web::HttpServer::new(move || { web::HttpServer::new(move || {
web::App::new() web::App::new()
.state(pool.clone()) .state(pool.clone())

View File

@@ -14,6 +14,12 @@ use futures::stream::FuturesUnordered;
use std::fs; use std::fs;
use std::time::Duration; use std::time::Duration;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "meta-search",
tags: &["aggregator", "multi-site", "search"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -22,7 +28,6 @@ error_chain! {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(dead_code)]
pub struct AllProvider {} pub struct AllProvider {}
impl AllProvider { impl AllProvider {
@@ -158,7 +163,7 @@ impl Provider for AllProvider {
name: "All".to_string(), name: "All".to_string(),
description: "Query from all sites of this Server".to_string(), description: "Query from all sites of this Server".to_string(),
premium: false, premium: false,
favicon: "https://hottub.spacemoehre.de/favicon.ico".to_string(), favicon: "/favicon.ico".to_string(),
status: "active".to_string(), status: "active".to_string(),
categories: vec![], categories: vec![],
options: vec![ChannelOption { options: vec![ChannelOption {

View File

@@ -14,6 +14,12 @@ use std::thread;
use std::time::Duration; use std::time::Duration;
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "clips", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -8,6 +8,12 @@ use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "live-cams",
tags: &["live", "cams", "amateur"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -293,6 +299,7 @@ impl ChaturbateProvider {
thumb, thumb,
0, 0,
) )
.is_live(true)
.views(views as u32) .views(views as u32)
.uploader(username.clone()) .uploader(username.clone())
.uploader_url(video_url.clone()) .uploader_url(video_url.clone())

View File

@@ -13,6 +13,12 @@ use std::sync::{Arc, RwLock};
use std::thread; use std::thread;
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["tube", "networked", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -12,6 +12,12 @@ use crate::status::*;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::videos::{self, ServerOptions, VideoItem}; use crate::videos::{self, ServerOptions, VideoItem};
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "hentai-animation",
tags: &["hentai", "anime", "premium"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -31,7 +37,6 @@ struct HanimeSearchRequest {
page: u8, page: u8,
} }
#[allow(dead_code)]
impl HanimeSearchRequest { impl HanimeSearchRequest {
pub fn new() -> Self { pub fn new() -> Self {
HanimeSearchRequest { HanimeSearchRequest {
@@ -45,26 +50,10 @@ impl HanimeSearchRequest {
page: 0, page: 0,
} }
} }
pub fn tags(mut self, tags: Vec<String>) -> Self {
self.tags = tags;
self
}
pub fn search_text(mut self, search_text: String) -> Self { pub fn search_text(mut self, search_text: String) -> Self {
self.search_text = search_text; self.search_text = search_text;
self self
} }
pub fn tags_mode(mut self, tags_mode: String) -> Self {
self.tags_mode = tags_mode;
self
}
pub fn brands(mut self, brands: Vec<String>) -> Self {
self.brands = brands;
self
}
pub fn blacklist(mut self, blacklist: Vec<String>) -> Self {
self.blacklist = blacklist;
self
}
pub fn order_by(mut self, order_by: String) -> Self { pub fn order_by(mut self, order_by: String) -> Self {
self.order_by = order_by; self.order_by = order_by;
self self
@@ -114,16 +103,11 @@ struct HanimeSearchResult {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(dead_code)] pub struct HanimeProvider;
pub struct HanimeProvider {
url: String,
}
impl HanimeProvider { impl HanimeProvider {
pub fn new() -> Self { pub fn new() -> Self {
HanimeProvider { HanimeProvider
url: "https://hanime.tv/".to_string(),
}
} }
fn build_channel(&self, _clientversion: ClientVersion) -> Channel { fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
@@ -213,9 +197,10 @@ impl HanimeProvider {
drop(conn); drop(conn);
let id = hit.id.to_string(); let id = hit.id.to_string();
let title = hit.name; let title = hit.name;
let thumb = hit.cover_url.replace( let thumb = crate::providers::build_proxy_url(
"https://hanime-cdn.com", &options,
"https://hottub.spacemoehre.de/proxy/hanime-cdn", "hanime-cdn",
&crate::providers::strip_url_scheme(&hit.cover_url),
); );
let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds
let channel = "hanime".to_string(); // Placeholder, adjust as needed let channel = "hanime".to_string(); // Placeholder, adjust as needed

1261
src/providers/heavyfetish.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,12 @@ use std::vec;
use titlecase::Titlecase; use titlecase::Titlecase;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "hentai-animation",
tags: &["hentai", "anime", "curated"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -195,7 +201,12 @@ impl HentaihavenProvider {
let block = match html let block = match html
.split("previouspostslink") .split("previouspostslink")
.next() .next()
.and_then(|s| s.split("vraven_manga_list").nth(1)) .and_then(|s| {
s.split("vraven_manga_list").nth(1).or_else(|| {
s.find(r#"<div class="page-content-listing item-big_thumbnail">"#)
.map(|idx| &s[idx..])
})
})
{ {
Some(b) => b, Some(b) => b,
None => { None => {

View File

@@ -14,6 +14,12 @@ use std::vec;
use wreq::Client; use wreq::Client;
use wreq_util::Emulation; use wreq_util::Emulation;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "gay-male",
tags: &["gay", "male", "tube"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -15,6 +15,12 @@ use std::sync::{Arc, RwLock};
use std::{thread, vec}; use std::{thread, vec};
use titlecase::Titlecase; use titlecase::Titlecase;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["studio", "hd", "scenes"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -188,7 +194,9 @@ impl HqpornerProvider {
.await .await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?; .map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let video_items = self.get_video_items_from_html(text, &mut requester).await; let video_items = self
.get_video_items_from_html(text, &mut requester, &options)
.await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.insert(video_url, video_items.clone()); cache.insert(video_url, video_items.clone());
} }
@@ -234,7 +242,9 @@ impl HqpornerProvider {
.await .await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?; .map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let video_items = self.get_video_items_from_html(text, &mut requester).await; let video_items = self
.get_video_items_from_html(text, &mut requester, &options)
.await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.insert(video_url, video_items.clone()); cache.insert(video_url, video_items.clone());
} }
@@ -245,6 +255,7 @@ impl HqpornerProvider {
&self, &self,
html: String, html: String,
requester: &mut Requester, requester: &mut Requester,
options: &ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") { if html.is_empty() || html.contains("404 Not Found") {
return vec![]; return vec![];
@@ -273,7 +284,7 @@ impl HqpornerProvider {
let Some(seg) = iter.next() else { let Some(seg) = iter.next() else {
break; break;
}; };
in_flight.push(self.get_video_item(seg, requester.clone())); in_flight.push(self.get_video_item(seg, requester.clone(), options));
} }
let Some(result) = in_flight.next().await else { let Some(result) = in_flight.next().await else {
@@ -312,7 +323,12 @@ impl HqpornerProvider {
items items
} }
async fn get_video_item(&self, seg: String, mut requester: Requester) -> Result<VideoItem> { async fn get_video_item(
&self,
seg: String,
mut requester: Requester,
options: &ServerOptions,
) -> Result<VideoItem> {
let video_url = format!( let video_url = format!(
"{}{}", "{}{}",
self.url, self.url,
@@ -351,7 +367,7 @@ impl HqpornerProvider {
format!("https://{}", thumb_raw.trim_start_matches('/')) format!("https://{}", thumb_raw.trim_start_matches('/'))
}; };
let thumb = match thumb_abs.strip_prefix("https://") { let thumb = match thumb_abs.strip_prefix("https://") {
Some(path) => format!("https://hottub.spacemoehre.de/proxy/hqporner-thumb/{path}"), Some(path) => crate::providers::build_proxy_url(options, "hqporner-thumb", path),
None => thumb_abs, None => thumb_abs,
}; };
let raw_duration = seg let raw_duration = seg

1050
src/providers/hsex.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,12 @@ use std::{thread, vec};
use titlecase::Titlecase; use titlecase::Titlecase;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "fetish-kink",
tags: &["hypnosis", "fetish", "sissy"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -17,6 +17,12 @@ use std::vec;
use titlecase::Titlecase; use titlecase::Titlecase;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "jav",
tags: &["jav", "asian", "streaming"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -155,7 +161,7 @@ impl JavtifulProvider {
return Ok(vec![]); return Ok(vec![]);
} }
let video_items: Vec<VideoItem> = self let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester) .get_video_items_from_html(text.clone(), &mut requester, &options)
.await; .await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -223,7 +229,7 @@ impl JavtifulProvider {
return Ok(vec![]); return Ok(vec![]);
} }
let video_items: Vec<VideoItem> = self let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester) .get_video_items_from_html(text.clone(), &mut requester, &options)
.await; .await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -238,6 +244,7 @@ impl JavtifulProvider {
&self, &self,
html: String, html: String,
requester: &mut Requester, requester: &mut Requester,
options: &ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") { if html.is_empty() || html.contains("404 Not Found") {
return vec![]; return vec![];
@@ -269,7 +276,7 @@ impl JavtifulProvider {
.split("card ") .split("card ")
.skip(1) .skip(1)
.filter(|seg| !seg.contains("SPONSOR")) .filter(|seg| !seg.contains("SPONSOR"))
.map(|el| self.get_video_item(el.to_string(), requester.clone())); .map(|el| self.get_video_item(el.to_string(), requester.clone(), options));
join_all(futures) join_all(futures)
.await .await
@@ -300,7 +307,12 @@ impl JavtifulProvider {
.collect() .collect()
} }
async fn get_video_item(&self, seg: String, mut requester: Requester) -> Result<VideoItem> { async fn get_video_item(
&self,
seg: String,
mut requester: Requester,
options: &ServerOptions,
) -> Result<VideoItem> {
let video_url = seg let video_url = seg
.split(" href=\"") .split(" href=\"")
.nth(1) .nth(1)
@@ -350,7 +362,9 @@ impl JavtifulProvider {
.unwrap_or("") .unwrap_or("")
.to_string(); .to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32; let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let (tags, formats, views) = self.extract_media(&video_url, &mut requester).await?; let (tags, formats, views) = self
.extract_media(&video_url, &mut requester, options)
.await?;
if preview.len() == 0 { if preview.len() == 0 {
preview = format!("https://trailers.jav.si/preview/{id}.mp4"); preview = format!("https://trailers.jav.si/preview/{id}.mp4");
@@ -367,6 +381,7 @@ impl JavtifulProvider {
&self, &self,
url: &str, url: &str,
requester: &mut Requester, requester: &mut Requester,
options: &ServerOptions,
) -> Result<(Vec<String>, Vec<VideoFormat>, u32)> { ) -> Result<(Vec<String>, Vec<VideoFormat>, u32)> {
let text = requester let text = requester
.get(url, Some(Version::HTTP_2)) .get(url, Some(Version::HTTP_2))
@@ -413,7 +428,11 @@ impl JavtifulProvider {
.unwrap_or(0); .unwrap_or(0);
let quality = "1080p".to_string(); let quality = "1080p".to_string();
let video_url = url.replace("javtiful.com", "hottub.spacemoehre.de/proxy/javtiful"); let video_url = crate::providers::build_proxy_url(
options,
"javtiful",
&crate::providers::strip_url_scheme(url),
);
Ok(( Ok((
tags, tags,
vec![VideoFormat::new(video_url, quality, "video/mp4".into())], vec![VideoFormat::new(video_url, quality, "video/mp4".into())],

View File

@@ -16,6 +16,12 @@ use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "jav",
tags: &["jav", "asian", "uncensored"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -9,196 +9,49 @@ use std::sync::Arc;
use crate::{ use crate::{
DbPool, DbPool,
api::ClientVersion, api::ClientVersion,
status::Channel, status::{Channel, ChannelGroup, ChannelView, Status, StatusResponse},
util::{cache::VideoCache, discord::send_discord_error_report, requester::Requester}, util::{cache::VideoCache, discord::send_discord_error_report, requester::Requester},
videos::{ServerOptions, VideoItem}, videos::{ServerOptions, VideoItem},
}; };
pub mod all; include!(concat!(env!("OUT_DIR"), "/provider_selection.rs"));
pub mod hanime; include!(concat!(env!("OUT_DIR"), "/provider_modules.rs"));
pub mod perverzija;
pub mod pmvhaven;
pub mod pornhub;
// pub mod spankbang;
pub mod homoxxx;
pub mod okporn;
pub mod okxxx;
pub mod perfectgirls;
pub mod pornhat;
pub mod redtube;
pub mod rule34video;
// pub mod hentaimoon;
pub mod beeg;
pub mod missav;
pub mod omgxxx;
pub mod paradisehill;
pub mod porn00;
pub mod pornzog;
pub mod sxyprn;
pub mod tnaflix;
pub mod viralxxxporn;
pub mod xfree;
pub mod xxthots;
pub mod youjizz;
// pub mod pornxp;
pub mod chaturbate;
pub mod freepornvideosxxx;
pub mod hentaihaven;
pub mod hqporner;
pub mod hypnotube;
pub mod javtiful;
pub mod noodlemagazine;
pub mod pimpbunny;
pub mod rule34gen;
pub mod xxdbx;
// pub mod tube8;
// convenient alias // convenient alias
pub type DynProvider = Arc<dyn Provider>; pub type DynProvider = Arc<dyn Provider>;
#[derive(Clone, Copy)]
pub struct ProviderChannelMetadata {
pub group_id: &'static str,
pub tags: &'static [&'static str],
}
pub static ALL_PROVIDERS: Lazy<HashMap<&'static str, DynProvider>> = Lazy::new(|| { pub static ALL_PROVIDERS: Lazy<HashMap<&'static str, DynProvider>> = Lazy::new(|| {
let mut m = HashMap::default(); let mut m = HashMap::default();
m.insert("all", Arc::new(all::AllProvider::new()) as DynProvider); include!(concat!(env!("OUT_DIR"), "/provider_registry.rs"));
m.insert(
"perverzija",
Arc::new(perverzija::PerverzijaProvider::new()) as DynProvider,
);
m.insert(
"hanime",
Arc::new(hanime::HanimeProvider::new()) as DynProvider,
);
m.insert(
"pornhub",
Arc::new(pornhub::PornhubProvider::new()) as DynProvider,
);
m.insert(
"rule34video",
Arc::new(rule34video::Rule34videoProvider::new()) as DynProvider,
);
m.insert(
"redtube",
Arc::new(redtube::RedtubeProvider::new()) as DynProvider,
);
m.insert(
"okporn",
Arc::new(okporn::OkpornProvider::new()) as DynProvider,
);
m.insert(
"pornhat",
Arc::new(pornhat::PornhatProvider::new()) as DynProvider,
);
m.insert(
"perfectgirls",
Arc::new(perfectgirls::PerfectgirlsProvider::new()) as DynProvider,
);
m.insert(
"okxxx",
Arc::new(okxxx::OkxxxProvider::new()) as DynProvider,
);
m.insert(
"homoxxx",
Arc::new(homoxxx::HomoxxxProvider::new()) as DynProvider,
);
m.insert(
"missav",
Arc::new(missav::MissavProvider::new()) as DynProvider,
);
m.insert(
"xxthots",
Arc::new(xxthots::XxthotsProvider::new()) as DynProvider,
);
m.insert(
"sxyprn",
Arc::new(sxyprn::SxyprnProvider::new()) as DynProvider,
);
m.insert(
"porn00",
Arc::new(porn00::Porn00Provider::new()) as DynProvider,
);
m.insert(
"youjizz",
Arc::new(youjizz::YoujizzProvider::new()) as DynProvider,
);
m.insert(
"paradisehill",
Arc::new(paradisehill::ParadisehillProvider::new()) as DynProvider,
);
m.insert(
"pornzog",
Arc::new(pornzog::PornzogProvider::new()) as DynProvider,
);
m.insert(
"omgxxx",
Arc::new(omgxxx::OmgxxxProvider::new()) as DynProvider,
);
m.insert("beeg", Arc::new(beeg::BeegProvider::new()) as DynProvider);
m.insert(
"tnaflix",
Arc::new(tnaflix::TnaflixProvider::new()) as DynProvider,
);
m.insert(
"viralxxxporn",
Arc::new(viralxxxporn::ViralxxxpornProvider::new()) as DynProvider,
);
// m.insert("pornxp", Arc::new(pornxp::PornxpProvider::new()) as DynProvider);
m.insert(
"rule34gen",
Arc::new(rule34gen::Rule34genProvider::new()) as DynProvider,
);
m.insert(
"xxdbx",
Arc::new(xxdbx::XxdbxProvider::new()) as DynProvider,
);
m.insert(
"xfree",
Arc::new(xfree::XfreeProvider::new()) as DynProvider,
);
m.insert(
"hqporner",
Arc::new(hqporner::HqpornerProvider::new()) as DynProvider,
);
m.insert(
"pmvhaven",
Arc::new(pmvhaven::PmvhavenProvider::new()) as DynProvider,
);
m.insert(
"noodlemagazine",
Arc::new(noodlemagazine::NoodlemagazineProvider::new()) as DynProvider,
);
m.insert(
"pimpbunny",
Arc::new(pimpbunny::PimpbunnyProvider::new()) as DynProvider,
);
m.insert(
"javtiful",
Arc::new(javtiful::JavtifulProvider::new()) as DynProvider,
);
m.insert(
"hypnotube",
Arc::new(hypnotube::HypnotubeProvider::new()) as DynProvider,
);
m.insert(
"freepornvideosxxx",
Arc::new(freepornvideosxxx::FreepornvideosxxxProvider::new()) as DynProvider,
);
m.insert(
"hentaihaven",
Arc::new(hentaihaven::HentaihavenProvider::new()) as DynProvider,
);
m.insert(
"chaturbate",
Arc::new(chaturbate::ChaturbateProvider::new()) as DynProvider,
);
// m.insert("tube8", Arc::new(tube8::Tube8Provider::new()) as DynProvider);
// add more here as you migrate them
m m
}); });
pub fn init_providers_now() { pub fn init_providers_now() {
// Idempotent & thread-safe: runs the Lazy init exactly once. // Idempotent & thread-safe: runs the Lazy init exactly once.
crate::flow_debug!(
"provider init selection={:?}",
compile_time_selected_provider()
);
Lazy::force(&ALL_PROVIDERS); Lazy::force(&ALL_PROVIDERS);
} }
pub fn compile_time_selected_provider() -> Option<&'static str> {
COMPILE_TIME_SELECTED_PROVIDER
}
pub fn resolve_provider_for_build<'a>(channel: &'a str) -> &'a str {
match compile_time_selected_provider() {
Some(selected) if channel == "all" => selected,
_ => channel,
}
}
pub fn panic_payload_to_string(payload: Box<dyn std::any::Any + Send>) -> String { pub fn panic_payload_to_string(payload: Box<dyn std::any::Any + Send>) -> String {
if let Some(s) = payload.downcast_ref::<&str>() { if let Some(s) = payload.downcast_ref::<&str>() {
return (*s).to_string(); return (*s).to_string();
@@ -213,10 +66,29 @@ pub async fn run_provider_guarded<F>(provider_name: &str, context: &str, fut: F)
where where
F: Future<Output = Vec<VideoItem>>, F: Future<Output = Vec<VideoItem>>,
{ {
crate::flow_debug!(
"provider guard enter provider={} context={}",
provider_name,
context
);
match AssertUnwindSafe(fut).catch_unwind().await { match AssertUnwindSafe(fut).catch_unwind().await {
Ok(videos) => videos, Ok(videos) => {
crate::flow_debug!(
"provider guard exit provider={} context={} videos={}",
provider_name,
context,
videos.len()
);
videos
}
Err(payload) => { Err(payload) => {
let panic_msg = panic_payload_to_string(payload); let panic_msg = panic_payload_to_string(payload);
crate::flow_debug!(
"provider guard panic provider={} context={} panic={}",
provider_name,
context,
&panic_msg
);
let _ = send_discord_error_report( let _ = send_discord_error_report(
format!("Provider panic: {}", provider_name), format!("Provider panic: {}", provider_name),
None, None,
@@ -260,8 +132,21 @@ pub fn requester_or_default(
context: &str, context: &str,
) -> Requester { ) -> Requester {
match options.requester.clone() { match options.requester.clone() {
Some(requester) => requester, Some(requester) => {
crate::flow_debug!(
"provider requester existing provider={} context={} trace={}",
provider_name,
context,
requester.debug_trace_id().unwrap_or("none")
);
requester
}
None => { None => {
crate::flow_debug!(
"provider requester fallback provider={} context={}",
provider_name,
context
);
report_provider_error_background( report_provider_error_background(
provider_name, provider_name,
context, context,
@@ -272,6 +157,210 @@ pub fn requester_or_default(
} }
} }
pub fn strip_url_scheme(url: &str) -> String {
url.strip_prefix("https://")
.or_else(|| url.strip_prefix("http://"))
.unwrap_or(url)
.trim_start_matches('/')
.to_string()
}
pub fn build_proxy_url(options: &ServerOptions, proxy: &str, target: &str) -> String {
let target = target.trim_start_matches('/');
let base = options
.public_url_base
.as_deref()
.unwrap_or("")
.trim_end_matches('/');
if base.is_empty() {
format!("/proxy/{proxy}/{target}")
} else {
format!("{base}/proxy/{proxy}/{target}")
}
}
fn channel_metadata_for(id: &str) -> Option<ProviderChannelMetadata> {
include!(concat!(env!("OUT_DIR"), "/provider_metadata_fn.rs"))
}
fn channel_group_title(group_id: &str) -> &'static str {
match group_id {
"meta-search" => "Meta Search",
"mainstream-tube" => "Mainstream Tube",
"tiktok" => "Tiktok",
"studio-network" => "Studio & Network",
"amateur-homemade" => "Amateur & Homemade",
"onlyfans" => "OnlyFans",
"chinese" => "Chinese",
"jav" => "JAV",
"fetish-kink" => "Fetish & Kink",
"hentai-animation" => "Hentai & Animation",
"ai" => "AI",
"gay-male" => "Gay & Male",
"live-cams" => "Live Cams",
"pmv-compilation" => "PMV & Compilation",
_ => "Other",
}
}
fn channel_group_system_image(group_id: &str) -> Option<&'static str> {
match group_id {
"jav" | "chinese" => Some("globe"),
_ => None,
}
}
fn channel_group_order(group_id: &str) -> usize {
match group_id {
"meta-search" => 0,
"mainstream-tube" => 1,
"tiktok" => 2,
"studio-network" => 3,
"onlyfans" => 4,
"chinese" => 5,
"jav" => 6,
"fetish-kink" => 7,
"hentai-animation" => 8,
"ai" => 9,
"gay-male" => 10,
"live-cams" => 11,
"pmv-compilation" => 12,
_ => 99,
}
}
pub fn decorate_channel(channel: Channel) -> ChannelView {
let metadata = channel_metadata_for(&channel.id);
let ytdlp_command = match channel.id.as_str() {
"pimpbunny" => Some("yt-dlp --compat-options allow-unsafe-ext".to_string()),
_ => None,
};
ChannelView {
id: channel.id,
name: channel.name,
description: channel.description,
premium: channel.premium,
favicon: channel.favicon,
status: channel.status,
categories: channel.categories,
options: channel.options,
nsfw: channel.nsfw,
groupKey: metadata.map(|value| value.group_id.to_string()),
sortOrder: None,
tags: metadata.map(|value| {
value
.tags
.iter()
.take(3)
.map(|tag| (*tag).to_string())
.collect()
}),
ytdlpCommand: ytdlp_command,
cacheDuration: channel.cacheDuration,
}
}
pub fn build_channel_groups(channels: &[ChannelView]) -> Vec<ChannelGroup> {
let mut groups = Vec::new();
let mut group_ids = channels
.iter()
.filter_map(|channel| channel.groupKey.clone())
.collect::<Vec<_>>();
group_ids.sort_by_key(|group_id| (channel_group_order(group_id), group_id.clone()));
group_ids.dedup();
for group_id in group_ids {
let mut grouped_channels = channels
.iter()
.filter(|channel| channel.groupKey.as_deref() == Some(group_id.as_str()))
.collect::<Vec<_>>();
grouped_channels.sort_by(|a, b| {
(a.sortOrder.unwrap_or(u32::MAX), &a.name, &a.id).cmp(&(
b.sortOrder.unwrap_or(u32::MAX),
&b.name,
&b.id,
))
});
let channel_ids = grouped_channels
.into_iter()
.map(|channel| channel.id.clone())
.collect::<Vec<_>>();
groups.push(ChannelGroup {
id: group_id.clone(),
title: channel_group_title(&group_id).to_string(),
systemImage: channel_group_system_image(&group_id).map(str::to_string),
channelIds: channel_ids,
});
}
groups
}
fn assign_channel_sort_order(channels: &mut [ChannelView]) {
let mut ordered = channels
.iter()
.enumerate()
.map(|(index, channel)| {
(
index,
channel.groupKey.clone(),
channel.name.to_ascii_lowercase(),
channel.id.to_ascii_lowercase(),
)
})
.collect::<Vec<_>>();
ordered.sort_by(|a, b| {
let a_group = a.1.as_deref().unwrap_or("");
let b_group = b.1.as_deref().unwrap_or("");
(channel_group_order(a_group), a_group, &a.2, &a.3).cmp(&(
channel_group_order(b_group),
b_group,
&b.2,
&b.3,
))
});
for (sort_index, (channel_index, _, _, _)) in ordered.into_iter().enumerate() {
channels[channel_index].sortOrder = Some((sort_index + 1) as u32);
}
}
pub fn build_status_response(status: Status) -> StatusResponse {
let mut channels = status
.channels
.into_iter()
.map(decorate_channel)
.collect::<Vec<_>>();
assign_channel_sort_order(&mut channels);
let channelGroups = build_channel_groups(&channels);
crate::flow_debug!(
"status response build channels={} groups={}",
channels.len(),
channelGroups.len()
);
StatusResponse {
id: status.id,
name: status.name,
subtitle: status.subtitle,
description: status.description,
iconUrl: status.iconUrl,
color: status.color,
status: status.status,
notices: status.notices,
channels,
channelGroups,
subscription: status.subscription,
nsfw: status.nsfw,
categories: status.categories,
options: status.options,
filtersFooter: status.filtersFooter,
}
}
#[async_trait] #[async_trait]
pub trait Provider: Send + Sync { pub trait Provider: Send + Sync {
async fn get_videos( async fn get_videos(
@@ -305,3 +394,125 @@ pub trait Provider: Send + Sync {
}) })
} }
} }
#[cfg(all(test, not(hottub_single_provider)))]
mod tests {
use super::*;
use crate::status::ChannelOption;
fn base_channel(id: &str) -> Channel {
Channel {
id: id.to_string(),
name: id.to_string(),
description: String::new(),
premium: false,
favicon: String::new(),
status: "active".to_string(),
categories: vec![],
options: Vec::<ChannelOption>::new(),
nsfw: true,
cacheDuration: None,
}
}
#[test]
fn decorates_channel_with_group_and_tags() {
let channel = decorate_channel(base_channel("hsex"));
assert_eq!(channel.groupKey.as_deref(), Some("chinese"));
assert_eq!(channel.sortOrder, None);
assert_eq!(channel.ytdlpCommand, None);
assert_eq!(
channel.tags.as_deref(),
Some(
&[
"amateur".to_string(),
"chinese".to_string(),
"homemade".to_string(),
][..]
)
);
}
#[test]
fn builds_group_index() {
let channels = vec![
decorate_channel(base_channel("all")),
decorate_channel(base_channel("hsex")),
decorate_channel(base_channel("missav")),
];
let groups = build_channel_groups(&channels);
assert_eq!(groups[0].id, "meta-search");
assert_eq!(groups[1].id, "chinese");
assert_eq!(groups[2].id, "jav");
}
#[test]
fn decorates_pimpbunny_with_ytdlp_command() {
let channel = decorate_channel(base_channel("pimpbunny"));
assert_eq!(
channel.ytdlpCommand.as_deref(),
Some("yt-dlp --compat-options allow-unsafe-ext")
);
}
#[test]
fn reflects_updated_group_moves() {
assert_eq!(
decorate_channel(base_channel("perverzija"))
.groupKey
.as_deref(),
Some("studio-network")
);
assert_eq!(
decorate_channel(base_channel("rule34gen"))
.groupKey
.as_deref(),
Some("ai")
);
}
#[test]
fn status_response_uses_documented_group_keys() {
let mut status = Status::new();
status.channels = vec![
base_channel("missav"),
base_channel("hsex"),
base_channel("all"),
base_channel("pimpbunny"),
];
let json = serde_json::to_value(build_status_response(status)).expect("valid status json");
let channels = json["channels"].as_array().expect("channels array");
let all_channel = channels
.iter()
.find(|channel| channel["id"] == "all")
.expect("all channel present");
assert_eq!(all_channel["groupKey"], "meta-search");
assert!(all_channel.get("group").is_none());
assert!(all_channel["sortOrder"].is_number());
let groups = json["channelGroups"].as_array().expect("group array");
let meta_group = groups
.iter()
.find(|group| group["id"] == "meta-search")
.expect("meta group present");
assert_eq!(meta_group["channelIds"], serde_json::json!(["all"]));
assert!(meta_group.get("channels").is_none());
let chinese_group = groups
.iter()
.find(|group| group["id"] == "chinese")
.expect("chinese group present");
assert_eq!(chinese_group["systemImage"], "globe");
let pimpbunny_channel = channels
.iter()
.find(|channel| channel["id"] == "pimpbunny")
.expect("pimpbunny channel present");
assert_eq!(
pimpbunny_channel["ytdlpCommand"],
"yt-dlp --compat-options allow-unsafe-ext"
);
}
}

View File

@@ -4,18 +4,24 @@ use crate::providers::Provider;
use crate::status::*; use crate::status::*;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number; use crate::util::parse_abbreviated_number;
use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoFormat, VideoItem}; use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait; use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::net::IpAddr;
use url::Url;
use std::vec; use std::vec;
use titlecase::Titlecase; use titlecase::Titlecase;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["search", "mixed", "user-upload"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -82,7 +88,7 @@ impl NoodlemagazineProvider {
.await .await
.unwrap_or_default(); .unwrap_or_default();
let items = self.get_video_items_from_html(text, requester).await; let items = self.get_video_items_from_html(text, &options);
if items.is_empty() { if items.is_empty() {
Ok(old_items) Ok(old_items)
@@ -119,7 +125,7 @@ impl NoodlemagazineProvider {
.await .await
.unwrap_or_default(); .unwrap_or_default();
let items = self.get_video_items_from_html(text, requester).await; let items = self.get_video_items_from_html(text, &options);
if items.is_empty() { if items.is_empty() {
Ok(old_items) Ok(old_items)
@@ -130,11 +136,7 @@ impl NoodlemagazineProvider {
} }
} }
async fn get_video_items_from_html( fn get_video_items_from_html(&self, html: String, options: &ServerOptions) -> Vec<VideoItem> {
&self,
html: String,
requester: Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") { if html.is_empty() || html.contains("404 Not Found") {
return vec![]; return vec![];
} }
@@ -152,22 +154,103 @@ impl NoodlemagazineProvider {
None => return vec![], None => return vec![],
}; };
let raw_videos = list list.split("<div class=\"item\">")
.split("<div class=\"item\">")
.skip(1) .skip(1)
.map(|s| s.to_string()); .filter_map(|segment| {
self.get_video_item(segment.to_string(), options).ok()
let futures = raw_videos.map(|v| self.get_video_item(v, requester.clone())); })
let results = join_all(futures).await; .collect()
results.into_iter().filter_map(Result::ok).collect()
} }
async fn get_video_item( fn proxy_url(&self, options: &ServerOptions, video_url: &str) -> String {
&self, crate::providers::build_proxy_url(
video_segment: String, options,
requester: Requester, "noodlemagazine",
) -> Result<VideoItem> { &crate::providers::strip_url_scheme(video_url),
)
}
fn normalize_thumb_url(&self, thumb: &str) -> String {
let thumb = thumb.trim();
if thumb.is_empty() {
return String::new();
}
if thumb.starts_with("http://") || thumb.starts_with("https://") {
return thumb.to_string();
}
if thumb.starts_with("//") {
return format!("https:{thumb}");
}
if thumb.starts_with('/') {
return format!("{}{}", self.url, thumb);
}
format!("{}/{}", self.url.trim_end_matches('/'), thumb.trim_start_matches('/'))
}
fn has_allowed_image_extension(path: &str) -> bool {
let path = path.to_ascii_lowercase();
[".jpg", ".jpeg", ".png", ".webp", ".avif", ".gif"]
.iter()
.any(|ext| path.ends_with(ext))
}
fn is_disallowed_thumb_host(host: &str) -> bool {
if host.eq_ignore_ascii_case("localhost") {
return true;
}
match host.parse::<IpAddr>() {
Ok(IpAddr::V4(ip)) => {
ip.is_private()
|| ip.is_loopback()
|| ip.is_link_local()
|| ip.is_broadcast()
|| ip.is_documentation()
|| ip.is_unspecified()
}
Ok(IpAddr::V6(ip)) => {
ip.is_loopback()
|| ip.is_unspecified()
|| ip.is_multicast()
|| ip.is_unique_local()
|| ip.is_unicast_link_local()
}
Err(_) => false,
}
}
fn is_allowed_thumb_url(&self, url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
!Self::is_disallowed_thumb_host(host) && Self::has_allowed_image_extension(url.path())
}
fn proxied_thumb(&self, options: &ServerOptions, thumb: &str) -> String {
let normalized = self.normalize_thumb_url(thumb);
if normalized.is_empty() || !self.is_allowed_thumb_url(&normalized) {
return String::new();
}
crate::providers::build_proxy_url(
options,
"noodlemagazine-thumb",
&crate::providers::strip_url_scheme(&normalized),
)
}
fn get_video_item(&self, video_segment: String, options: &ServerOptions) -> Result<VideoItem> {
let href = video_segment let href = video_segment
.split("<a href=\"") .split("<a href=\"")
.nth(1) .nth(1)
@@ -217,54 +300,24 @@ impl NoodlemagazineProvider {
.and_then(|s| s.split('<').next()) .and_then(|s| s.split('<').next())
.and_then(|v| parse_abbreviated_number(v.trim())) .and_then(|v| parse_abbreviated_number(v.trim()))
.unwrap_or(0); .unwrap_or(0);
let proxy_url = self.proxy_url(options, &video_url);
let formats = self let proxied_thumb = self.proxied_thumb(options, &thumb);
.extract_media(&video_url, requester)
.await
.ok_or_else(|| Error::from("media extraction failed"))?;
Ok(VideoItem::new( Ok(VideoItem::new(
id, id,
title, title,
video_url, proxy_url.clone(),
"noodlemagazine".into(), "noodlemagazine".into(),
thumb, proxied_thumb,
duration, duration,
) )
.views(views) .views(views)
.formats(formats)) .formats(vec![
} VideoFormat::new(proxy_url, "auto".into(), "video/mp4".into())
.format_id("auto".into())
async fn extract_media( .format_note("proxied".into())
&self, .http_header("Referer".into(), video_url),
video_url: &String, ]))
mut requester: Requester,
) -> Option<Vec<VideoFormat>> {
let text = requester
.get(video_url, Some(Version::HTTP_2))
.await
.unwrap_or_default();
let json_str = text.split("window.playlist = ").nth(1)?.split(';').next()?;
let json: serde_json::Value = serde_json::from_str(json_str).ok()?;
let sources = json["sources"].as_array()?;
let mut formats = vec![];
for s in sources {
let file = s["file"].as_str()?.to_string();
let label = s["label"].as_str().unwrap_or("unknown").to_string();
formats.push(
VideoFormat::new(file, label.clone(), "video/mp4".into())
.format_id(label.clone())
.format_note(label.clone())
.http_header("Referer".into(), video_url.clone()),
);
}
Some(formats.into_iter().rev().collect())
} }
} }
@@ -300,3 +353,104 @@ impl Provider for NoodlemagazineProvider {
Some(self.build_channel(clientversion)) Some(self.build_channel(clientversion))
} }
} }
#[cfg(test)]
mod tests {
use super::NoodlemagazineProvider;
use crate::videos::ServerOptions;
fn options() -> ServerOptions {
ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
}
}
#[test]
fn rewrites_video_pages_to_hottub_proxy() {
let provider = NoodlemagazineProvider::new();
let options = options();
assert_eq!(
provider.proxy_url(&options, "https://noodlemagazine.com/watch/-123_456"),
"https://example.com/proxy/noodlemagazine/noodlemagazine.com/watch/-123_456"
);
}
#[test]
fn parses_listing_without_detail_page_requests() {
let provider = NoodlemagazineProvider::new();
let options = options();
let html = r#"
<div class="list_videos" id="list_videos">
<div class="item">
<a href="/watch/-123_456">
<img data-src="https://noodlemagazine.com/thumbs/test.jpg" />
</a>
<div class="title">sample &amp; title</div>
<svg><use></use></svg>#clock-o"></use></svg>12:34<
<svg><use></use></svg>#eye"></use></svg>1.2K<
</div>
>Show more</div>
"#;
let items = provider.get_video_items_from_html(html.to_string(), &options);
assert_eq!(items.len(), 1);
assert_eq!(
items[0].url,
"https://example.com/proxy/noodlemagazine/noodlemagazine.com/watch/-123_456"
);
assert_eq!(
items[0].thumb,
"https://example.com/proxy/noodlemagazine-thumb/noodlemagazine.com/thumbs/test.jpg"
);
assert_eq!(items[0].formats.as_ref().map(|f| f.len()), Some(1));
}
#[test]
fn keeps_https_cdn_thumbs_but_drops_non_images() {
let provider = NoodlemagazineProvider::new();
let options = options();
let html = r#"
<div class="list_videos" id="list_videos">
<div class="item">
<a href="/watch/-123_456">
<img data-src="https://cdn.example/thumb.jpg" />
</a>
<div class="title">sample</div>
<svg><use></use></svg>#clock-o"></use></svg>12:34<
<svg><use></use></svg>#eye"></use></svg>1.2K<
</div>
<div class="item">
<a href="/watch/-555_666">
<img data-src="https://noodlemagazine.com/watch/not-an-image" />
</a>
<div class="title">sample 2</div>
<svg><use></use></svg>#clock-o"></use></svg>00:42<
<svg><use></use></svg>#eye"></use></svg>123<
</div>
>Show more</div>
"#;
let items = provider.get_video_items_from_html(html.to_string(), &options);
assert_eq!(items.len(), 2);
assert_eq!(
items[0].thumb,
"https://example.com/proxy/noodlemagazine-thumb/cdn.example/thumb.jpg"
);
assert!(items[1].thumb.is_empty());
}
}

View File

@@ -14,6 +14,12 @@ use std::vec;
use wreq::Client; use wreq::Client;
use wreq_util::Emulation; use wreq_util::Emulation;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "hd", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -15,6 +15,12 @@ use std::vec;
use wreq::Client; use wreq::Client;
use wreq_util::Emulation; use wreq_util::Emulation;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "mixed", "search"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -9,10 +9,17 @@ use crate::{status::*, util};
use async_trait::async_trait; use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{Html, Selector};
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::thread; use std::thread;
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["studio", "networks", "models"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -586,6 +593,28 @@ impl OmgxxxProvider {
return None; return None;
} }
fn extract_tag_entries(&self, video_segment: &str) -> Vec<(String, String)> {
let fragment = Html::parse_fragment(video_segment);
let selector = Selector::parse("div.models a").expect("valid omgxxx models selector");
fragment
.select(&selector)
.filter_map(|anchor| {
let href = anchor.value().attr("href")?.to_string();
let title = anchor
.text()
.collect::<Vec<_>>()
.join(" ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ");
if title.is_empty() {
return None;
}
Some((href, title))
})
.collect()
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> { fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() { if html.is_empty() {
println!("HTML is empty"); println!("HTML is empty");
@@ -751,64 +780,50 @@ impl OmgxxxProvider {
let site_id = self let site_id = self
.get_site_id_from_name(site_name) .get_site_id_from_name(site_name)
.unwrap_or("".to_string()); .unwrap_or("".to_string());
let mut tags = match video_segment.contains("class=\"models\">") { let mut tags = Vec::new();
true => video_segment for (href, tag_title) in self.extract_tag_entries(video_segment) {
.split("class=\"models\">") if href.contains("/models/") {
.collect::<Vec<&str>>() let model_id = href
.get(1) .split("/models/")
.copied() .nth(1)
.unwrap_or_default() .unwrap_or_default()
.split("</div>") .split('/')
.collect::<Vec<&str>>() .next()
.get(0) .unwrap_or_default()
.copied() .to_string();
.unwrap_or_default() if !model_id.is_empty() {
.split("href=\"")
.collect::<Vec<&str>>()[1..]
.into_iter()
.map(|s| {
Self::push_unique( Self::push_unique(
&self.stars, &self.stars,
FilterOption { FilterOption {
id: s id: model_id,
.split("/") title: tag_title.clone(),
.collect::<Vec<&str>>()
.get(4)
.copied()
.unwrap_or_default()
.to_string(),
title: s
.split(">")
.collect::<Vec<&str>>()
.get(1)
.copied()
.unwrap_or_default()
.split("<")
.collect::<Vec<&str>>()
.get(0)
.copied()
.unwrap_or_default()
.trim()
.to_string(),
}, },
); );
s.split(">") }
.collect::<Vec<&str>>() }
.get(1) if href.contains("/sites/") {
.copied() let site_id = href
.unwrap_or_default() .split("/sites/")
.split("<") .nth(1)
.collect::<Vec<&str>>() .unwrap_or_default()
.get(0) .split('/')
.copied() .next()
.unwrap_or_default() .unwrap_or_default()
.trim() .to_string();
.to_string() if !site_id.is_empty() {
}) Self::push_unique(
.collect::<Vec<String>>() &self.sites,
.to_vec(), FilterOption {
false => vec![], id: site_id,
}; title: tag_title.clone(),
},
);
}
}
if !tags.iter().any(|existing| existing == &tag_title) {
tags.push(tag_title);
}
}
if !site_id.is_empty() { if !site_id.is_empty() {
Self::push_unique( Self::push_unique(
&self.sites, &self.sites,
@@ -817,7 +832,9 @@ impl OmgxxxProvider {
title: site_name.to_string(), title: site_name.to_string(),
}, },
); );
tags.push(site_name.to_string()); if !tags.iter().any(|existing| existing == site_name) {
tags.push(site_name.to_string());
}
} }
let video_item = VideoItem::new( let video_item = VideoItem::new(
@@ -837,6 +854,160 @@ impl OmgxxxProvider {
} }
} }
#[cfg(test)]
mod tests {
use super::*;
fn test_provider() -> OmgxxxProvider {
OmgxxxProvider {
url: "https://www.omg.xxx".to_string(),
sites: Arc::new(RwLock::new(vec![FilterOption {
id: "clubsweethearts".to_string(),
title: "Club Sweethearts".to_string(),
}])),
networks: Arc::new(RwLock::new(vec![])),
stars: Arc::new(RwLock::new(vec![])),
}
}
#[test]
fn parses_model_and_site_tags_without_empty_strings() {
let provider = test_provider();
let html = r##"
<div class="list-videos">
<div class="item">
<a href="https://www.omg.xxx/videos/4290034/example-video/" title="[Club Sweethearts] Example Video"></a>
<img loading="lazy" data-src="https://cdn.example/thumb.jpg" />
<span class="duration">Duration 12:34</span>
<div class="views">1.2M</div>
<div class="thumb" data-preview="https://cdn.example/preview.mp4"></div>
<div class="models">
<a class="models__item thumb_cs" href="https://www.omg.xxx/sites/clubsweethearts/" style="order: 0;">
<svg class="icon icon-tv"><use xlink:href="#icon-tv"></use></svg>
<span>Club Sweethearts</span>
</a>
<a class="models__item thumb_model" href="https://www.omg.xxx/models/oliver-trunk/" style="order: 0;">
<svg class="icon icon-star"><use xlink:href="#icon-star"></use></svg>
<span>Oliver Trunk</span>
</a>
<a class="models__item thumb_model" href="https://www.omg.xxx/models/sara-bork/" style="order: 0;">
<svg class="icon icon-star"><use xlink:href="#icon-star"></use></svg>
<span>Sara Bork</span>
</a>
</div>
</div>
</div>
"##
.to_string();
let items = provider.get_video_items_from_html(html);
assert_eq!(items.len(), 1);
assert_eq!(
items[0].tags,
Some(vec![
"Club Sweethearts".to_string(),
"Oliver Trunk".to_string(),
"Sara Bork".to_string()
])
);
assert!(
items[0]
.tags
.as_ref()
.unwrap()
.iter()
.all(|tag| !tag.is_empty())
);
let stars = provider.stars.read().unwrap().clone();
assert!(
stars
.iter()
.any(|tag| tag.id == "oliver-trunk" && tag.title == "Oliver Trunk")
);
assert!(
stars
.iter()
.any(|tag| tag.id == "sara-bork" && tag.title == "Sara Bork")
);
}
#[test]
fn parses_live_item_shape_with_channel_and_pornstar_info() {
let provider = test_provider();
let html = r##"
<div class="list-videos">
<div class="item">
<a href="https://www.omg.xxx/videos/93763302/step-daughter-vol-2-scene-3/" target="_blank" title="Step Daughter Vol.2 Scene 3">
<div class="img thumb__img" data-preview="https://cast.omg.xxx/preview/93763302.mp4">
<img loading="lazy" class="thumb lazyloaded" src="https://img.omg.xxx/93763000/93763302/medium@2x/1.jpg" data-src="https://img.omg.xxx/93763000/93763302/medium@2x/1.jpg" alt="Step Daughter Vol.2 Scene 3" width="0" height="0">
<span class="duration"> Full Video 26:44 </span>
<span class="js-favourites thumb-favourites" data-action="add" data-type="video" data-object_id="93763302">
<svg class="icon icon-heart-plus"><use xlink:href="#icon-heart-plus"></use></svg>
<svg class="icon icon-trashcan"><use xlink:href="#icon-trashcan"></use></svg>
</span>
</div>
</a>
<div class="item-info">
<a href="https://www.omg.xxx/videos/93763302/step-daughter-vol-2-scene-3/" title="Step Daughter Vol.2 Scene 3">
<strong class="title"> Step Daughter Vol.2 Scene 3 </strong>
</a>
<div class="models is-truncated">
<a class="models__item thumb_cs" href="https://www.omg.xxx/sites/family-sinners/" style="order: 0;">
<svg class="icon icon-tv"><use xlink:href="#icon-tv"></use></svg>
<span>Family Sinners</span>
</a>
<a class="models__item" href="https://www.omg.xxx/models/vienna-rose/" style="order: 0;">
<svg class="icon icon-star"><use xlink:href="#icon-star"></use></svg>
<span>Vienna Rose</span>
</a>
<a class="models__item" href="https://www.omg.xxx/models/mark-wood/" style="order: 1;">
<svg class="icon icon-star"><use xlink:href="#icon-star"></use></svg>
<span>Mark Wood</span>
</a>
</div>
<div class="wrap">
<div class="rating positive "> 100% </div>
<div class="views">4.8K</div>
</div>
</div>
</div>
</div>
"##
.to_string();
let items = provider.get_video_items_from_html(html);
assert_eq!(items.len(), 1);
assert_eq!(
items[0].tags,
Some(vec![
"Family Sinners".to_string(),
"Vienna Rose".to_string(),
"Mark Wood".to_string()
])
);
let sites = provider.sites.read().unwrap().clone();
assert!(
sites
.iter()
.any(|tag| tag.id == "family-sinners" && tag.title == "Family Sinners")
);
let stars = provider.stars.read().unwrap().clone();
assert!(
stars
.iter()
.any(|tag| tag.id == "vienna-rose" && tag.title == "Vienna Rose")
);
assert!(
stars
.iter()
.any(|tag| tag.id == "mark-wood" && tag.title == "Mark Wood")
);
}
}
#[async_trait] #[async_trait]
impl Provider for OmgxxxProvider { impl Provider for OmgxxxProvider {
async fn get_videos( async fn get_videos(

View File

@@ -10,6 +10,12 @@ use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "mixed", "movies"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -15,6 +15,12 @@ use std::vec;
use wreq::Client; use wreq::Client;
use wreq_util::Emulation; use wreq_util::Emulation;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["glamour", "softcore", "solo"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -17,6 +17,12 @@ use wreq::Client;
use wreq::Version; use wreq::Version;
use wreq_util::Emulation; use wreq_util::Emulation;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["regional", "amateur", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -2,6 +2,7 @@ use crate::DbPool;
use crate::api::ClientVersion; use crate::api::ClientVersion;
use crate::providers::Provider; use crate::providers::Provider;
use crate::status::*; use crate::status::*;
use crate::util::parse_abbreviated_number;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report}; use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::requester::Requester; use crate::util::requester::Requester;
@@ -10,13 +11,19 @@ use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait; use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::{thread, vec}; use std::{thread, vec};
use titlecase::Titlecase; use titlecase::Titlecase;
use url::Url;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "onlyfans",
tags: &["creator", "onlyfans", "amateur"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -39,6 +46,10 @@ pub struct PimpbunnyProvider {
} }
impl PimpbunnyProvider { impl PimpbunnyProvider {
const FIREFOX_USER_AGENT: &'static str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &'static str =
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
pub fn new() -> Self { pub fn new() -> Self {
let provider = Self { let provider = Self {
url: "https://pimpbunny.com".to_string(), url: "https://pimpbunny.com".to_string(),
@@ -167,11 +178,304 @@ impl PimpbunnyProvider {
} }
} }
fn is_allowed_thumb_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
matches!(host, "pimpbunny.com" | "www.pimpbunny.com")
&& url.path().starts_with("/contents/videos_screenshots/")
}
fn proxied_thumb(&self, options: &ServerOptions, thumb: &str) -> String {
if thumb.is_empty() || !Self::is_allowed_thumb_url(thumb) {
return thumb.to_string();
}
crate::providers::build_proxy_url(
options,
"pimpbunny-thumb",
&crate::providers::strip_url_scheme(thumb),
)
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
matches!(host, "pimpbunny.com" | "www.pimpbunny.com")
&& !url.path().starts_with("/contents/videos_screenshots/")
}
fn proxied_video(&self, options: &ServerOptions, page_url: &str) -> String {
if page_url.is_empty() || !Self::is_allowed_detail_url(page_url) {
return page_url.to_string();
}
crate::providers::build_proxy_url(
options,
"pimpbunny",
&crate::providers::strip_url_scheme(page_url),
)
}
fn root_referer(&self) -> String {
format!("{}/", self.url.trim_end_matches('/'))
}
fn sort_by(sort: &str) -> &'static str {
match sort {
"best rated" => "rating",
"most viewed" => "video_viewed",
_ => "post_date",
}
}
fn build_search_path_query(query: &str, separator: &str) -> String {
query.split_whitespace().collect::<Vec<_>>().join(separator)
}
fn append_archive_query(url: String, sort: &str) -> String {
let separator = if url.contains('?') { '&' } else { '?' };
format!("{url}{separator}sort_by={}", Self::sort_by(sort))
}
fn page_family_referer(&self, request_url: &str) -> String {
let Some(url) = Url::parse(request_url).ok() else {
return self.root_referer();
};
let path = url.path();
let referer_path = if path.starts_with("/videos/") {
"/videos/".to_string()
} else if path.starts_with("/search/") {
let parts: Vec<_> = path.trim_matches('/').split('/').collect();
if parts.len() >= 2 {
format!("/search/{}/", parts[1])
} else {
"/search/".to_string()
}
} else if path.starts_with("/categories/") {
let parts: Vec<_> = path.trim_matches('/').split('/').collect();
if parts.len() >= 2 {
format!("/categories/{}/", parts[1])
} else {
"/categories/".to_string()
}
} else if path.starts_with("/onlyfans-models/") {
let parts: Vec<_> = path.trim_matches('/').split('/').collect();
if parts.len() >= 2 {
format!("/onlyfans-models/{}/", parts[1])
} else {
"/onlyfans-models/".to_string()
}
} else {
"/".to_string()
};
format!("{}{}", self.url.trim_end_matches('/'), referer_path)
}
fn build_browse_url(&self, page: u8, sort: &str) -> String {
let base = if page <= 1 {
format!("{}/videos/", self.url)
} else {
format!("{}/videos/{page}/", self.url)
};
Self::append_archive_query(base, sort)
}
fn build_search_url(&self, query: &str, page: u8, sort: &str) -> String {
let path_query = Self::build_search_path_query(query, "-");
let base = if page <= 1 {
format!("{}/search/{path_query}/", self.url)
} else {
format!("{}/search/{path_query}/{page}/", self.url)
};
Self::append_archive_query(base, sort)
}
fn build_common_archive_url(&self, archive_path: &str, page: u8, sort: &str) -> String {
let canonical = format!(
"{}/{}",
self.url.trim_end_matches('/'),
archive_path.trim_start_matches('/')
);
let base = if page <= 1 {
canonical
} else {
format!("{}/{}", canonical.trim_end_matches('/'), page)
};
let base = if base.ends_with('/') {
base
} else {
format!("{base}/")
};
Self::append_archive_query(base, sort)
}
fn navigation_headers(
referer: Option<&str>,
sec_fetch_site: &'static str,
) -> Vec<(String, String)> {
let mut headers = vec![
(
"User-Agent".to_string(),
Self::FIREFOX_USER_AGENT.to_string(),
),
("Accept".to_string(), Self::HTML_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
("Cache-Control".to_string(), "no-cache".to_string()),
("Pragma".to_string(), "no-cache".to_string()),
("Priority".to_string(), "u=0, i".to_string()),
("Connection".to_string(), "keep-alive".to_string()),
("TE".to_string(), "trailers".to_string()),
("Sec-Fetch-Dest".to_string(), "document".to_string()),
("Sec-Fetch-Mode".to_string(), "navigate".to_string()),
("Sec-Fetch-Site".to_string(), sec_fetch_site.to_string()),
("Sec-Fetch-User".to_string(), "?1".to_string()),
("Upgrade-Insecure-Requests".to_string(), "1".to_string()),
];
if let Some(referer) = referer {
headers.push(("Referer".to_string(), referer.to_string()));
}
headers
}
fn headers_with_cookies(
&self,
requester: &Requester,
request_url: &str,
referer: Option<&str>,
sec_fetch_site: &'static str,
) -> Vec<(String, String)> {
let mut headers = Self::navigation_headers(referer, sec_fetch_site);
if let Some(cookie) = requester.cookie_header_for_url(request_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
}
fn is_cloudflare_challenge(html: &str) -> bool {
html.contains("cf-turnstile-response")
|| html.contains("Performing security verification")
|| html.contains("__cf_chl_rt_tk")
|| html.contains("cUPMDTk:\"")
|| html.contains("Just a moment...")
}
fn extract_challenge_path(html: &str) -> Option<String> {
html.split("cUPMDTk:\"")
.nth(1)
.and_then(|s| s.split('"').next())
.map(str::to_string)
.or_else(|| {
html.split("__cf_chl_rt_tk=")
.nth(1)
.and_then(|s| s.split('"').next())
.map(|token| format!("/?__cf_chl_rt_tk={token}"))
})
}
fn absolute_site_url(&self, path_or_url: &str) -> String {
if path_or_url.starts_with("http://") || path_or_url.starts_with("https://") {
path_or_url.to_string()
} else {
format!(
"{}/{}",
self.url.trim_end_matches('/'),
path_or_url.trim_start_matches('/')
)
}
}
async fn fetch_html(
&self,
requester: &mut Requester,
request_url: &str,
referer: Option<&str>,
sec_fetch_site: &'static str,
) -> Result<String> {
let headers = self.headers_with_cookies(requester, request_url, referer, sec_fetch_site);
let response = requester
.get_raw_with_headers(request_url, headers.clone())
.await
.map_err(Error::from)?;
let status = response.status();
let body = response.text().await.map_err(Error::from)?;
if status.is_success() || status.as_u16() == 404 {
return Ok(body);
}
if status.as_u16() == 403 && Self::is_cloudflare_challenge(&body) {
if let Some(challenge_path) = Self::extract_challenge_path(&body) {
let challenge_url = self.absolute_site_url(&challenge_path);
let challenge_headers = self.headers_with_cookies(
requester,
&challenge_url,
Some(request_url),
"same-origin",
);
let _ = requester
.get_raw_with_headers(&challenge_url, challenge_headers)
.await;
}
}
let retry_headers =
self.headers_with_cookies(requester, request_url, referer, sec_fetch_site);
requester
.get_with_headers(request_url, retry_headers, Some(Version::HTTP_11))
.await
.map_err(|e| Error::from(format!("{e}")))
}
async fn warm_root_session(&self, requester: &mut Requester) {
let root_url = self.root_referer();
let _ = self
.fetch_html(requester, &root_url, None, "none")
.await;
}
async fn warm_root_session_for_base(base: &str, requester: &mut Requester) {
let root_url = format!("{}/", base.trim_end_matches('/'));
let _ = requester
.get_with_headers(
&root_url,
Self::navigation_headers(None, "none"),
Some(Version::HTTP_11),
)
.await;
}
async fn load_stars(base: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_stars(base: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new(); let mut requester = Requester::new();
Self::warm_root_session_for_base(base, &mut requester).await;
let request_url = format!("{base}/onlyfans-models/?models_per_page=20");
let headers = {
let root_url = format!("{}/", base.trim_end_matches('/'));
let mut headers = Self::navigation_headers(Some(&root_url), "same-origin");
if let Some(cookie) = requester.cookie_header_for_url(&request_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
};
let text = requester let text = requester
.get( .get_with_headers(
&format!("{base}/onlyfans-models/?models_per_page=20"), &request_url,
headers,
Some(Version::HTTP_2), Some(Version::HTTP_2),
) )
.await .await
@@ -213,9 +517,20 @@ impl PimpbunnyProvider {
async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new(); let mut requester = Requester::new();
Self::warm_root_session_for_base(base, &mut requester).await;
let request_url = format!("{base}/categories/?items_per_page=120");
let headers = {
let root_url = format!("{}/", base.trim_end_matches('/'));
let mut headers = Self::navigation_headers(Some(&root_url), "same-origin");
if let Some(cookie) = requester.cookie_header_for_url(&request_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
};
let text = requester let text = requester
.get( .get_with_headers(
&format!("{base}/categories/?items_per_page=120"), &request_url,
headers,
Some(Version::HTTP_2), Some(Version::HTTP_2),
) )
.await .await
@@ -256,15 +571,7 @@ impl PimpbunnyProvider {
sort: &str, sort: &str,
options: ServerOptions, options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let sort_string = match sort { let video_url = self.build_browse_url(page, sort);
"best rated" => "&sort_by=rating",
"most viewed" => "&sort_by=video_viewed",
_ => "&sort_by=post_date",
};
let video_url = format!(
"{}/videos/{}/?videos_per_page=32{}",
self.url, page, sort_string
);
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
@@ -279,7 +586,17 @@ impl PimpbunnyProvider {
}; };
let mut requester = let mut requester =
crate::providers::requester_or_default(&options, module_path!(), "missing_requester"); crate::providers::requester_or_default(&options, module_path!(), "missing_requester");
let text = match requester.get(&video_url, Some(Version::HTTP_11)).await { self.warm_root_session(&mut requester).await;
let referer = self.page_family_referer(&video_url);
let text = match self
.fetch_html(
&mut requester,
&video_url,
Some(&referer),
"same-origin",
)
.await
{
Ok(text) => text, Ok(text) => text,
Err(e) => { Err(e) => {
crate::providers::report_provider_error( crate::providers::report_provider_error(
@@ -291,9 +608,7 @@ impl PimpbunnyProvider {
return Ok(old_items); return Ok(old_items);
} }
}; };
let video_items: Vec<VideoItem> = self let video_items = self.get_video_items_from_html(text.clone(), &options);
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone()); cache.insert(video_url.clone(), video_items.clone());
@@ -311,27 +626,17 @@ impl PimpbunnyProvider {
options: ServerOptions, options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let search_string = query.trim().to_string(); let search_string = query.trim().to_string();
let sort = options.sort.as_deref().unwrap_or("");
let mut video_url = format!( let mut video_url = self.build_search_url(&search_string, page, sort);
"{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&videos_per_page=32&from_videos={}",
self.url,
search_string.replace(" ", "-"),
page
);
let sort_string = match options.sort.as_deref().unwrap_or("") {
"best rated" => "&sort_by=rating",
"most viewed" => "&sort_by=video_viewed",
_ => "&sort_by=post_date",
};
if let Ok(stars) = self.stars.read() { if let Ok(stars) = self.stars.read() {
if let Some(star) = stars if let Some(star) = stars
.iter() .iter()
.find(|s| s.title.to_ascii_lowercase() == search_string.to_ascii_lowercase()) .find(|s| s.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
{ {
video_url = format!( video_url = self.build_common_archive_url(
"{}/onlyfans-models/{}/{}/?videos_per_page=20{}", &format!("/onlyfans-models/{}/", star.id),
self.url, star.id, page, sort_string page,
sort,
); );
} }
} else { } else {
@@ -346,10 +651,8 @@ impl PimpbunnyProvider {
.iter() .iter()
.find(|c| c.title.to_ascii_lowercase() == search_string.to_ascii_lowercase()) .find(|c| c.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
{ {
video_url = format!( video_url =
"{}/categories/{}/{}/?videos_per_page=20{}", self.build_common_archive_url(&format!("/categories/{}/", cat.id), page, sort);
self.url, cat.id, page, sort_string
);
} }
} else { } else {
crate::providers::report_provider_error_background( crate::providers::report_provider_error_background(
@@ -375,8 +678,17 @@ impl PimpbunnyProvider {
let mut requester = let mut requester =
crate::providers::requester_or_default(&options, module_path!(), "missing_requester"); crate::providers::requester_or_default(&options, module_path!(), "missing_requester");
println!("Fetching URL: {}", video_url); self.warm_root_session(&mut requester).await;
let text = match requester.get(&video_url, Some(Version::HTTP_2)).await { let referer = self.page_family_referer(&video_url);
let text = match self
.fetch_html(
&mut requester,
&video_url,
Some(&referer),
"same-origin",
)
.await
{
Ok(text) => text, Ok(text) => text,
Err(e) => { Err(e) => {
crate::providers::report_provider_error( crate::providers::report_provider_error(
@@ -388,9 +700,7 @@ impl PimpbunnyProvider {
return Ok(old_items); return Ok(old_items);
} }
}; };
let video_items: Vec<VideoItem> = self let video_items = self.get_video_items_from_html(text.clone(), &options);
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone()); cache.insert(video_url.clone(), video_items.clone());
@@ -400,11 +710,7 @@ impl PimpbunnyProvider {
Ok(video_items) Ok(video_items)
} }
async fn get_video_items_from_html( fn get_video_items_from_html(&self, html: String, options: &ServerOptions) -> Vec<VideoItem> {
&self,
html: String,
requester: &mut Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") { if html.is_empty() || html.contains("404 Not Found") {
return vec![]; return vec![];
} }
@@ -418,19 +724,42 @@ impl PimpbunnyProvider {
None => return vec![], None => return vec![],
}; };
let futures = block block
.split("<div class=\"col\">") .split("<div class=\"col\">")
.skip(1) .skip(1)
.map(|el| self.get_video_item(el.to_string(), requester.clone())); .filter_map(|el| self.get_video_item(el.to_string(), options).ok())
join_all(futures)
.await
.into_iter() .into_iter()
.filter_map(Result::ok)
.collect() .collect()
} }
async fn get_video_item(&self, seg: String, mut requester: Requester) -> Result<VideoItem> { fn extract_duration_from_segment(&self, seg: &str) -> u32 {
for token in seg.split(|ch: char| ch == '<' || ch == '>' || ch.is_whitespace()) {
let candidate = token.trim();
if candidate.is_empty() || !candidate.contains(':') {
continue;
}
if let Some(parsed) = parse_time_to_seconds(candidate) {
return parsed as u32;
}
}
0
}
fn extract_views_from_segment(&self, seg: &str) -> u32 {
let Some(before_views) = seg.split("Views").next() else {
return 0;
};
let candidate = before_views
.split(|ch: char| ch == '<' || ch == '>' || ch.is_whitespace())
.filter(|value| !value.trim().is_empty())
.next_back()
.unwrap_or("")
.trim_matches(|ch: char| ch == '(' || ch == ')' || ch == ',');
parse_abbreviated_number(candidate).unwrap_or(0)
}
fn get_video_item(&self, seg: String, options: &ServerOptions) -> Result<VideoItem> {
let video_url = seg let video_url = seg
.split(" href=\"") .split(" href=\"")
.nth(1) .nth(1)
@@ -485,64 +814,22 @@ impl PimpbunnyProvider {
.and_then(|s| s.split('"').next()) .and_then(|s| s.split('"').next())
.unwrap_or("") .unwrap_or("")
.to_string(); .to_string();
let proxy_url = self.proxied_video(options, &video_url);
let (tags, formats, views, duration) = let views = self.extract_views_from_segment(&seg);
self.extract_media(&video_url, &mut requester).await?; let duration = self.extract_duration_from_segment(&seg);
let formats = vec![
VideoFormat::new(proxy_url.clone(), "auto".into(), "video/mp4".into())
.format_id("auto".into())
.format_note("proxied".into()),
];
Ok( Ok(
VideoItem::new(id, title, video_url, "pimpbunny".into(), thumb, duration) VideoItem::new(id, title, proxy_url, "pimpbunny".into(), thumb, duration)
.formats(formats) .formats(formats)
.tags(tags)
.preview(preview) .preview(preview)
.views(views), .views(views),
) )
} }
async fn extract_media(
&self,
url: &str,
requester: &mut Requester,
) -> Result<(Vec<String>, Vec<VideoFormat>, u32, u32)> {
let text = requester
.get(url, Some(Version::HTTP_2))
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let json_str = text
.split("application/ld+json\">")
.nth(1)
.and_then(|s| s.split("</script>").next())
.ok_or_else(|| ErrorKind::Parse("ld+json".into()))?;
let json: serde_json::Value = serde_json::from_str(json_str)?;
let video_url = json["contentUrl"].as_str().unwrap_or("").to_string();
let quality = video_url
.split('_')
.last()
.and_then(|s| s.split('.').next())
.unwrap_or("")
.to_string();
let views = json["interactionStatistic"]
.as_array()
.and_then(|a| a.first())
.and_then(|v| v["userInteractionCount"].as_str())
.and_then(|v| v.parse().ok())
.unwrap_or(0);
let duration = json["duration"]
.as_str()
.map(|d| parse_time_to_seconds(&d.replace(['P', 'T', 'H', 'M', 'S'], "")).unwrap_or(0))
.unwrap_or(0) as u32;
Ok((
vec![],
vec![VideoFormat::new(video_url, quality, "video/mp4".into())],
views,
duration,
))
}
} }
#[async_trait] #[async_trait]
@@ -558,6 +845,7 @@ impl Provider for PimpbunnyProvider {
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let page = page.parse::<u8>().unwrap_or(1); let page = page.parse::<u8>().unwrap_or(1);
let thumb_options = options.clone();
let res = match query { let res = match query {
Some(q) => self.to_owned().query(cache, page, &q, options).await, Some(q) => self.to_owned().query(cache, page, &q, options).await,
@@ -568,9 +856,226 @@ impl Provider for PimpbunnyProvider {
eprintln!("pimpbunny error: {e}"); eprintln!("pimpbunny error: {e}");
vec![] vec![]
}) })
.into_iter()
.map(|mut item| {
if !item.thumb.is_empty() {
item.thumb = self.proxied_thumb(&thumb_options, &item.thumb);
}
item
})
.collect()
} }
fn get_channel(&self, v: ClientVersion) -> Option<Channel> { fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
Some(self.build_channel(v)) Some(self.build_channel(v))
} }
} }
#[cfg(test)]
mod tests {
use super::PimpbunnyProvider;
use crate::videos::ServerOptions;
use std::sync::{Arc, RwLock};
fn test_provider() -> PimpbunnyProvider {
PimpbunnyProvider {
url: "https://pimpbunny.com".to_string(),
stars: Arc::new(RwLock::new(vec![])),
categories: Arc::new(RwLock::new(vec![])),
}
}
#[test]
fn rewrites_allowed_thumbs_to_proxy_urls() {
let provider = test_provider();
let options = ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
};
let proxied = provider.proxied_thumb(
&options,
"https://pimpbunny.com/contents/videos_screenshots/517000/517329/800x450/1.jpg",
);
assert_eq!(
proxied,
"https://example.com/proxy/pimpbunny-thumb/pimpbunny.com/contents/videos_screenshots/517000/517329/800x450/1.jpg"
);
}
#[test]
fn rewrites_video_pages_to_redirect_proxy() {
let provider = test_provider();
let options = ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
};
let proxied = provider.proxied_video(
&options,
"https://pimpbunny.com/videos/example-video/",
);
assert_eq!(
proxied,
"https://example.com/proxy/pimpbunny/pimpbunny.com/videos/example-video/"
);
}
#[test]
fn parses_listing_without_detail_requests() {
let provider = test_provider();
let options = ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
};
let html = r#"
<div class="videos_videos_list"></div>
<div class="videos_videos_list">
<div class="col">
<a href="https://pimpbunny.com/videos/example-video/">
<div class="card-thumbnail">
<img src="https://pimpbunny.com/contents/videos_screenshots/517000/517329/800x450/1.jpg" data-preview="https://pimpbunny.com/preview.mp4" />
</div>
<div class="card-title">sample &amp; title</div>
<div>12:34</div>
<div>1.2K Views</div>
</a>
</div>
-pagination-wrapper
"#;
let items = provider.get_video_items_from_html(html.to_string(), &options);
assert_eq!(items.len(), 1);
assert_eq!(
items[0].url,
"https://example.com/proxy/pimpbunny/pimpbunny.com/videos/example-video/"
);
assert_eq!(items[0].duration, 754);
assert_eq!(items[0].views, Some(1200));
assert_eq!(items[0].formats.as_ref().map(|f| f.len()), Some(1));
}
#[test]
fn extracts_cloudflare_challenge_path() {
let html = r#"
<script type="text/javascript">
(function(){
window._cf_chl_opt = {
cUPMDTk:"/?mode=async&function=get_block&block_id=videos_videos_list&videos_per_page=8&sort_by=post_date&from=1&__cf_chl_tk=test-token"
};
}());
</script>
"#;
assert!(PimpbunnyProvider::is_cloudflare_challenge(html));
assert_eq!(
PimpbunnyProvider::extract_challenge_path(html).as_deref(),
Some(
"/?mode=async&function=get_block&block_id=videos_videos_list&videos_per_page=8&sort_by=post_date&from=1&__cf_chl_tk=test-token"
)
);
}
#[test]
fn builds_async_browse_url_instead_of_numbered_videos_path() {
let provider = test_provider();
assert_eq!(
provider.build_browse_url(1, "most recent"),
"https://pimpbunny.com/videos/?sort_by=post_date"
);
assert_eq!(
provider.build_browse_url(2, "most recent"),
"https://pimpbunny.com/videos/2/?sort_by=post_date"
);
}
#[test]
fn builds_search_url_with_query_and_pagination() {
let provider = test_provider();
assert_eq!(
provider.build_search_url("adriana chechik", 1, "most viewed"),
"https://pimpbunny.com/search/adriana-chechik/?sort_by=video_viewed"
);
assert_eq!(
provider.build_search_url("adriana chechik", 3, "most viewed"),
"https://pimpbunny.com/search/adriana-chechik/3/?sort_by=video_viewed"
);
}
#[test]
fn builds_common_archive_url_with_async_block() {
let provider = test_provider();
assert_eq!(
provider.build_common_archive_url("/categories/amateur/", 1, "best rated"),
"https://pimpbunny.com/categories/amateur/?sort_by=rating"
);
assert_eq!(
provider.build_common_archive_url("/categories/amateur/", 4, "best rated"),
"https://pimpbunny.com/categories/amateur/4/?sort_by=rating"
);
}
#[test]
fn derives_page_family_referer() {
let provider = test_provider();
assert_eq!(
provider.page_family_referer("https://pimpbunny.com/videos/2/?sort_by=post_date"),
"https://pimpbunny.com/videos/"
);
assert_eq!(
provider.page_family_referer(
"https://pimpbunny.com/categories/blowjob/2/?sort_by=post_date"
),
"https://pimpbunny.com/categories/blowjob/"
);
assert_eq!(
provider.page_family_referer(
"https://pimpbunny.com/search/adriana-chechik/3/?sort_by=video_viewed"
),
"https://pimpbunny.com/search/adriana-chechik/"
);
assert_eq!(
provider.page_family_referer(
"https://pimpbunny.com/onlyfans-models/momoitenshi/3/?sort_by=post_date"
),
"https://pimpbunny.com/onlyfans-models/momoitenshi/"
);
}
}

View File

@@ -14,6 +14,12 @@ use std::sync::{Arc, RwLock};
use std::vec; use std::vec;
use url::form_urlencoded::Serializer; use url::form_urlencoded::Serializer;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "pmv-compilation",
tags: &["pmv", "music", "compilation"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "hd", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

650
src/providers/porn4fans.rs Normal file
View File

@@ -0,0 +1,650 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{ICodedDataTrait, decode};
use regex::Regex;
use scraper::{Html, Selector};
use std::collections::HashSet;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "onlyfans",
tags: &["creator", "premium", "clips"],
};
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct Porn4fansProvider {
url: String,
}
#[derive(Debug, Clone)]
struct Porn4fansCard {
id: String,
title: String,
page_url: String,
thumb: String,
duration: u32,
views: Option<u32>,
rating: Option<f32>,
}
impl Porn4fansProvider {
pub fn new() -> Self {
Self {
url: "https://www.porn4fans.com".to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "porn4fans".to_string(),
name: "Porn4Fans".to_string(),
description: "OnlyFans porn videos.".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=www.porn4fans.com"
.to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn sort_by(sort: &str) -> &'static str {
match sort {
"popular" => "video_viewed",
_ => "post_date",
}
}
fn build_latest_url(&self, page: u32, sort: &str) -> String {
format!(
"{}/latest-updates/?mode=async&function=get_block&block_id=custom_list_videos_latest_videos_list&sort_by={}&from={page}",
self.url,
Self::sort_by(sort)
)
}
fn build_latest_headers(&self) -> Vec<(String, String)> {
vec![(
"Referer".to_string(),
format!("{}/latest-updates/", self.url),
)]
}
fn build_search_path_query(query: &str, separator: &str) -> String {
query.split_whitespace().collect::<Vec<_>>().join(separator)
}
fn build_search_url(&self, query: &str, page: u32, sort: &str) -> String {
let query_param = Self::build_search_path_query(query, "+");
let path_query = Self::build_search_path_query(query, "-");
format!(
"{}/search/{path_query}/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search_result&q={query_param}&sort_by={}&from_videos={page}",
self.url,
Self::sort_by(sort)
)
}
fn build_search_headers(&self, query: &str) -> Vec<(String, String)> {
let path_query = Self::build_search_path_query(query, "-");
vec![(
"Referer".to_string(),
format!("{}/search/{path_query}/", self.url),
)]
}
async fn get(
&self,
cache: VideoCache,
page: u32,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_latest_url(page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, "porn4fans", "porn4fans.get.missing_requester");
let text = match requester
.get_with_headers(&video_url, self.build_latest_headers(), None)
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
"porn4fans",
"get.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"porn4fans",
"get.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let video_items = self.get_video_items_from_html(text, requester).await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
async fn query(
&self,
cache: VideoCache,
page: u32,
query: &str,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_search_url(query, page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, "porn4fans", "porn4fans.query.missing_requester");
let text = match requester
.get_with_headers(&video_url, self.build_search_headers(query), None)
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
"porn4fans",
"query.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"porn4fans",
"query.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let video_items = self.get_video_items_from_html(text, requester).await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
fn extract_between<'a>(text: &'a str, start: &str, end: &str) -> Option<&'a str> {
text.split(start).nth(1)?.split(end).next()
}
fn first_non_empty_attr(segment: &str, attrs: &[&str]) -> Option<String> {
attrs.iter().find_map(|attr| {
Self::extract_between(segment, attr, "\"")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
}
fn normalize_url(&self, url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
return url.to_string();
}
if url.starts_with("//") {
return format!("https:{url}");
}
if url.starts_with('/') {
return format!("{}{}", self.url, url);
}
format!("{}/{}", self.url, url.trim_start_matches("./"))
}
fn extract_thumb_url(&self, segment: &str) -> String {
let thumb_raw = Self::first_non_empty_attr(
segment,
&[
"data-original=\"",
"data-webp=\"",
"srcset=\"",
"src=\"",
"poster=\"",
],
)
.unwrap_or_default();
if thumb_raw.starts_with("data:image/") {
return String::new();
}
self.normalize_url(&thumb_raw)
}
fn decode_escaped_text(text: &str) -> String {
text.replace("\\/", "/").replace("&amp;", "&")
}
fn decode_html_text(text: &str) -> String {
decode(text.as_bytes())
.to_string()
.unwrap_or_else(|_| text.to_string())
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
fn strip_tags(text: &str) -> String {
Regex::new(r"(?is)<[^>]+>")
.ok()
.map(|regex| regex.replace_all(text, "").to_string())
.unwrap_or_else(|| text.to_string())
}
fn push_unique_tag(values: &mut Vec<String>, value: String) {
let value = value.trim().to_string();
if value.is_empty()
|| values
.iter()
.any(|existing| existing.eq_ignore_ascii_case(&value))
{
return;
}
values.push(value);
}
fn extract_views(text: &str) -> Option<u32> {
Regex::new(r"(?i)<svg[^>]+icon-eye[^>]*>.*?</svg>\s*<span>([^<]+)</span>")
.ok()
.and_then(|re| re.captures(text))
.and_then(|caps| caps.get(1))
.and_then(|m| parse_abbreviated_number(m.as_str().trim()))
}
fn extract_rating(text: &str) -> Option<f32> {
Regex::new(r"(?i)<svg[^>]+icon-like[^>]*>.*?</svg>\s*<span>([^<%]+)%</span>")
.ok()
.and_then(|re| re.captures(text))
.and_then(|caps| caps.get(1))
.and_then(|m| m.as_str().trim().parse::<f32>().ok())
}
fn extract_direct_video_url_from_page(text: &str) -> Option<String> {
let decoded = Self::decode_escaped_text(text);
for key in ["video_url", "video_alt_url", "contentUrl"] {
let pattern = format!(
r#"(?is)(?:^|[{{\s,])["']?{}["']?\s*[:=]\s*["'](?P<url>https?://[^"'<>]+?\.mp4)"#,
regex::escape(key)
);
let regex = Regex::new(&pattern).ok()?;
if let Some(url) = regex
.captures(&decoded)
.and_then(|captures| captures.name("url"))
.map(|value| value.as_str().to_string())
{
return Some(url);
}
}
None
}
fn collect_texts(document: &Html, selector: &str) -> Vec<String> {
let Ok(selector) = Selector::parse(selector) else {
return vec![];
};
let mut values = Vec::new();
for element in document.select(&selector) {
let raw_text = element.text().collect::<Vec<_>>().join(" ");
let cleaned = Self::decode_html_text(&Self::strip_tags(&raw_text));
Self::push_unique_tag(&mut values, cleaned);
}
values
}
fn extract_page_models_and_categories(text: &str) -> (Vec<String>, Vec<String>) {
let document = Html::parse_document(text);
let models = Self::collect_texts(&document, ".player-models-list a[href*=\"/models/\"]");
let mut categories =
Self::collect_texts(&document, ".categories-row a[href*=\"/categories/\"]");
for value in Self::collect_texts(&document, ".tags-row a[href*=\"/tags/\"]") {
Self::push_unique_tag(&mut categories, value);
}
(models, categories)
}
fn parse_video_cards_from_html(&self, html: &str) -> Vec<Porn4fansCard> {
if html.trim().is_empty() {
return vec![];
}
let Ok(link_re) = Regex::new(
r#"(?is)<a[^>]+class="item-link"[^>]+href="(?P<href>[^"]+/video/(?P<id>\d+)/[^"]+)"[^>]+title="(?P<title>[^"]+)"[^>]*>(?P<body>.*?)</a>"#,
) else {
return vec![];
};
let mut items = Vec::new();
let mut seen = HashSet::new();
for captures in link_re.captures_iter(html) {
let Some(id) = captures.name("id").map(|m| m.as_str().to_string()) else {
continue;
};
if !seen.insert(id.clone()) {
continue;
}
let href = captures
.name("href")
.map(|m| self.normalize_url(m.as_str()))
.unwrap_or_default();
let title_raw = captures
.name("title")
.map(|m| m.as_str())
.unwrap_or_default();
let title = decode(title_raw.as_bytes())
.to_string()
.unwrap_or_else(|_| title_raw.to_string());
let body = captures
.name("body")
.map(|m| m.as_str())
.unwrap_or_default();
let thumb = self.extract_thumb_url(body);
let duration_raw = Self::extract_between(body, "<div class=\"duration\">", "<")
.unwrap_or_default()
.trim()
.to_string();
let duration = parse_time_to_seconds(&duration_raw).unwrap_or(0) as u32;
let views = Self::extract_views(body).unwrap_or(0);
let rating = Self::extract_rating(body);
items.push(Porn4fansCard {
id,
title,
page_url: href,
thumb,
duration,
views: (views > 0).then_some(views),
rating,
});
}
items
}
async fn enrich_video_card(
&self,
card: Porn4fansCard,
mut requester: crate::util::requester::Requester,
) -> VideoItem {
let direct_url = requester
.get_with_headers(
&card.page_url,
vec![("Referer".to_string(), format!("{}/", self.url))],
None,
)
.await
.ok();
let (direct_url, models, categories) = match direct_url {
Some(text) => {
let url = Self::extract_direct_video_url_from_page(&text)
.unwrap_or_else(|| card.page_url.clone());
let (models, categories) = Self::extract_page_models_and_categories(&text);
(url, models, categories)
}
None => (card.page_url.clone(), vec![], vec![]),
};
let mut item = VideoItem::new(
card.id,
card.title,
direct_url,
"porn4fans".to_string(),
card.thumb,
card.duration,
);
if let Some(views) = card.views {
item = item.views(views);
}
if let Some(rating) = card.rating {
item = item.rating(rating);
}
if let Some(model) = models.first() {
item = item.uploader(model.clone());
}
item = item.tags(categories);
item
}
async fn get_video_items_from_html(
&self,
html: String,
requester: crate::util::requester::Requester,
) -> Vec<VideoItem> {
let cards = self.parse_video_cards_from_html(&html);
let futures = cards
.into_iter()
.map(|card| self.enrich_video_card(card, requester.clone()));
join_all(futures).await
}
}
#[async_trait]
impl Provider for Porn4fansProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = pool;
let _ = per_page;
let page = page.parse::<u32>().unwrap_or(1);
let videos = match query {
Some(query) if !query.trim().is_empty() => {
self.query(cache, page, &query, &sort, options).await
}
_ => self.get(cache, page, &sort, options).await,
};
match videos {
Ok(videos) => videos,
Err(e) => {
report_provider_error(
"porn4fans",
"get_videos",
&format!("page={page}; error={e}"),
)
.await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}
#[cfg(test)]
mod tests {
use super::Porn4fansProvider;
#[test]
fn builds_latest_url_with_custom_block_id() {
let provider = Porn4fansProvider::new();
assert_eq!(
provider.build_latest_url(2, "new"),
"https://www.porn4fans.com/latest-updates/?mode=async&function=get_block&block_id=custom_list_videos_latest_videos_list&sort_by=post_date&from=2"
);
}
#[test]
fn builds_search_url_with_custom_block_id() {
let provider = Porn4fansProvider::new();
assert_eq!(
provider.build_search_url("big black cock", 3, "popular"),
"https://www.porn4fans.com/search/big-black-cock/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search_result&q=big+black+cock&sort_by=video_viewed&from_videos=3"
);
}
#[test]
fn parses_porn4fans_search_markup() {
let provider = Porn4fansProvider::new();
let html = r##"
<div class="thumbs second grid-1" id="custom_list_videos_videos_list_search_result_items">
<div class="item">
<a class="item-link" href="https://www.porn4fans.com/video/10194/horny-police-officer-melztube-gets-banged-by-bbc/" title="Horny Police Officer Melztube Gets Banged By BBC">
<div class="img-wrap">
<div class="duration">23:47</div>
<picture>
<source srcset="https://www.porn4fans.com/contents/videos_screenshots/10000/10194/800x450/1.jpg" type="image/webp">
<img class="thumb lazy-load" src="data:image/gif;base64,AAAA" data-original="https://www.porn4fans.com/contents/videos_screenshots/10000/10194/800x450/1.jpg" data-webp="https://www.porn4fans.com/contents/videos_screenshots/10000/10194/800x450/1.jpg" data-preview="https://www.porn4fans.com/get_file/3/9df8de1fc2da5dfcbf9a4ad512dc8f306c4997e60f/10000/10194/10194_preview_high.mp4/" alt="Horny Police Officer Melztube Gets Banged By BBC" />
</picture>
</div>
<div class="video-text">Horny Police Officer Melztube Gets Banged By BBC</div>
<ul class="video-items">
<li class="video-item">
<svg class="svg-icon icon-eye"><use xlink:href="#icon-eye"></use></svg>
<span>14K</span>
</li>
<li class="video-item rating">
<svg class="svg-icon icon-like"><use xlink:href="#icon-like"></use></svg>
<span>66%</span>
</li>
<li class="video-item">
<span>2 weeks ago</span>
</li>
</ul>
</a>
</div>
</div>
"##;
let items = provider.parse_video_cards_from_html(html);
assert_eq!(items.len(), 1);
assert_eq!(items[0].id, "10194");
assert_eq!(
items[0].page_url,
"https://www.porn4fans.com/video/10194/horny-police-officer-melztube-gets-banged-by-bbc/"
);
assert_eq!(
items[0].thumb,
"https://www.porn4fans.com/contents/videos_screenshots/10000/10194/800x450/1.jpg"
);
assert_eq!(items[0].duration, 1427);
assert_eq!(items[0].views, Some(14_000));
assert_eq!(items[0].rating, Some(66.0));
}
#[test]
fn extracts_direct_video_url_from_video_page() {
let html = r#"
<script>
var flashvars = {
video_url: 'https:\/\/www.porn4fans.com\/get_file\/3\/9df8de1fc2da5dfcbf9a4ad512dc8f306c4997e60f\/10000\/10951\/10951.mp4\/',
video_alt_url: 'https:\/\/www.porn4fans.com\/get_file\/3\/9df8de1fc2da5dfcbf9a4ad512dc8f306c4997e60f\/10000\/10951\/10951_720p.mp4\/'
};
</script>
"#;
assert_eq!(
Porn4fansProvider::extract_direct_video_url_from_page(html).as_deref(),
Some(
"https://www.porn4fans.com/get_file/3/9df8de1fc2da5dfcbf9a4ad512dc8f306c4997e60f/10000/10951/10951.mp4"
)
);
}
#[test]
fn extracts_models_and_categories_from_video_page() {
let html = r#"
<div class="player-models-list">
<div class="player-model-item">
<a href="/models/piper-rockelle/"><span class="player-model-name">Piper Rockelle</span></a>
</div>
</div>
<ul class="categories-row">
<li class="visible"><a href="/categories/striptease/">Striptease</a></li>
<li class="visible"><a href="/categories/teen/">Teen</a></li>
</ul>
<ul class="tags-row">
<li class="visible"><a href="/tags/bathroom/">Bathroom</a></li>
</ul>
"#;
let (models, categories) = Porn4fansProvider::extract_page_models_and_categories(html);
assert_eq!(models, vec!["Piper Rockelle".to_string()]);
assert_eq!(
categories,
vec![
"Striptease".to_string(),
"Teen".to_string(),
"Bathroom".to_string()
]
);
}
}

1482
src/providers/porndish.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "hd", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

1380
src/providers/pornhd3x.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "studio", "general"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

1241
src/providers/pornmz.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "clips", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -12,6 +12,12 @@ use htmlentity::entity::{ICodedDataTrait, decode};
use serde_json::Value; use serde_json::Value;
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "legacy", "general"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "ai",
tags: &["rule34", "ai-generated", "animation"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -13,6 +13,12 @@ use htmlentity::entity::{ICodedDataTrait, decode};
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "hentai-animation",
tags: &["rule34", "animation", "fandom"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

1336
src/providers/sextb.rs Normal file

File diff suppressed because it is too large Load Diff

1436
src/providers/shooshtime.rs Normal file

File diff suppressed because it is too large Load Diff

752
src/providers/spankbang.rs Normal file
View File

@@ -0,0 +1,752 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{ElementRef, Html, Selector};
use url::form_urlencoded::byte_serialize;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "mixed", "search"],
};
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct SpankbangProvider {
url: String,
}
impl SpankbangProvider {
pub fn new() -> Self {
Self {
url: "https://spankbang.com".to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "spankbang".to_string(),
name: "SpankBang".to_string(),
description: "Porn videos, trending searches, and featured scenes.".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=spankbang.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "trending".to_string(),
title: "Trending".to_string(),
},
FilterOption {
id: "upcoming".to_string(),
title: "Upcoming".to_string(),
},
FilterOption {
id: "new".to_string(),
title: "New".to_string(),
},
FilterOption {
id: "popular".to_string(),
title: "Popular".to_string(),
},
FilterOption {
id: "featured".to_string(),
title: "Featured".to_string(),
},
],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn normalize_get_sort(sort: &str) -> &'static str {
match sort {
"upcoming" => "upcoming",
"new" => "new",
"popular" => "popular",
_ => "trending",
}
}
fn normalize_query_sort(sort: &str) -> &'static str {
match sort {
"new" => "new",
"popular" => "popular",
"featured" => "featured",
_ => "trending",
}
}
fn encode_search_query(query: &str) -> String {
query
.split_whitespace()
.map(|part| byte_serialize(part.as_bytes()).collect::<String>())
.collect::<Vec<_>>()
.join("+")
}
fn build_get_url(&self, page: u32, sort: &str) -> String {
match Self::normalize_get_sort(sort) {
"upcoming" => {
if page > 1 {
format!("{}/upcoming/{page}/", self.url)
} else {
format!("{}/upcoming/", self.url)
}
}
"new" => {
if page > 1 {
format!("{}/new_videos/{page}/", self.url)
} else {
format!("{}/new_videos/", self.url)
}
}
"popular" => {
if page > 1 {
format!("{}/most_popular/{page}/?p=w", self.url)
} else {
format!("{}/most_popular/?p=w", self.url)
}
}
_ => {
if page > 1 {
format!("{}/trending_videos/{page}/", self.url)
} else {
format!("{}/trending_videos/", self.url)
}
}
}
}
fn request_headers(&self) -> Vec<(String, String)> {
vec![("Referer".to_string(), format!("{}/", self.url))]
}
fn build_query_url(&self, query: &str, page: u32, sort: &str) -> String {
let encoded_query = Self::encode_search_query(query);
let mut url = if page > 1 {
format!("{}/s/{encoded_query}/{page}/", self.url)
} else {
format!("{}/s/{encoded_query}/", self.url)
};
match Self::normalize_query_sort(sort) {
"new" => url.push_str("?o=new"),
"popular" => url.push_str("?o=popular"),
"featured" => url.push_str("?o=featured"),
_ => {}
}
url
}
fn normalize_url(&self, url: &str) -> String {
if url.is_empty() {
return String::new();
}
if url.starts_with("http://") || url.starts_with("https://") {
return url.to_string();
}
if url.starts_with("//") {
return format!("https:{url}");
}
if url.starts_with('/') {
return format!("{}{}", self.url, url);
}
format!("{}/{}", self.url, url.trim_start_matches("./"))
}
fn proxy_url(&self, proxy_base_url: &str, url: &str) -> String {
let path = url
.strip_prefix(&self.url)
.unwrap_or(url)
.trim_start_matches('/');
if proxy_base_url.is_empty() {
return format!("/proxy/spankbang/{path}");
}
format!(
"{}/proxy/spankbang/{path}",
proxy_base_url.trim_end_matches('/')
)
}
fn decode_html(text: &str) -> String {
decode(text.as_bytes())
.to_string()
.unwrap_or_else(|_| text.to_string())
}
fn collapse_whitespace(text: &str) -> String {
text.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn text_of(element: &ElementRef<'_>) -> String {
Self::collapse_whitespace(&element.text().collect::<Vec<_>>().join(" "))
}
fn parse_duration(text: &str) -> u32 {
let raw = Self::collapse_whitespace(text);
if raw.is_empty() {
return 0;
}
if raw.contains(':') {
return parse_time_to_seconds(&raw)
.and_then(|seconds| u32::try_from(seconds).ok())
.unwrap_or(0);
}
let mut total = 0;
let mut digits = String::new();
for ch in raw.chars() {
if ch.is_ascii_digit() {
digits.push(ch);
continue;
}
if digits.is_empty() {
continue;
}
let value = digits.parse::<u32>().unwrap_or(0);
match ch.to_ascii_lowercase() {
'h' => total += value * 3600,
'm' => total += value * 60,
's' => total += value,
_ => {}
}
digits.clear();
}
if total == 0 && !digits.is_empty() {
digits.parse::<u32>().unwrap_or(0)
} else {
total
}
}
fn parse_rating(text: &str) -> Option<f32> {
let cleaned = Self::collapse_whitespace(text)
.trim_end_matches('%')
.trim()
.to_string();
if cleaned.is_empty() || cleaned == "-" {
return None;
}
cleaned.parse::<f32>().ok()
}
fn parse_card(
&self,
card: ElementRef<'_>,
video_link_selector: &Selector,
title_selector: &Selector,
thumb_selector: &Selector,
preview_selector: &Selector,
length_selector: &Selector,
views_selector: &Selector,
rating_selector: &Selector,
meta_link_selector: &Selector,
proxy_base_url: &str,
) -> Option<VideoItem> {
let card_html = card.html();
let card_text = Self::collapse_whitespace(&card.text().collect::<Vec<_>>().join(" "));
if card_html.contains("SpankBang Gold") || card_text.contains("SpankBang Gold") {
return None;
}
let id = card.value().attr("data-id")?.to_string();
let href = card
.select(video_link_selector)
.find_map(|link| link.value().attr("href"))
.map(ToString::to_string)?;
let thumb = card
.select(thumb_selector)
.find_map(|img| img.value().attr("src"))
.map(|src| self.normalize_url(src))
.unwrap_or_default();
let preview = card
.select(preview_selector)
.find_map(|source| source.value().attr("data-src"))
.map(|src| self.normalize_url(src));
let duration = card
.select(length_selector)
.next()
.map(|element| Self::parse_duration(&Self::text_of(&element)))
.unwrap_or(0);
let views = card
.select(views_selector)
.next()
.and_then(|element| parse_abbreviated_number(&Self::text_of(&element)));
let rating = card
.select(rating_selector)
.next()
.and_then(|element| Self::parse_rating(&Self::text_of(&element)));
let title = card
.select(title_selector)
.next()
.and_then(|link| link.value().attr("title"))
.map(Self::decode_html)
.unwrap_or_else(|| {
card.select(thumb_selector)
.next()
.and_then(|img| img.value().attr("alt"))
.map(Self::decode_html)
.unwrap_or_default()
});
if title.is_empty() {
return None;
}
let mut item = VideoItem::new(
id,
title,
self.proxy_url(proxy_base_url, &href),
"spankbang".to_string(),
thumb,
duration,
);
if let Some(views) = views {
item = item.views(views);
}
if let Some(rating) = rating {
item = item.rating(rating);
}
if let Some(preview) = preview {
item = item.preview(preview);
}
if let Some(meta_link) = card.select(meta_link_selector).next() {
let uploader = Self::decode_html(&Self::text_of(&meta_link));
if !uploader.is_empty() {
item = item.uploader(uploader);
}
if let Some(meta_href) = meta_link.value().attr("href") {
let uploader_url = self.normalize_url(meta_href);
if !uploader_url.is_empty() {
item = item.uploader_url(uploader_url);
}
}
}
Some(item)
}
fn get_video_items_from_html(&self, html: String, proxy_base_url: &str) -> Vec<VideoItem> {
let document = Html::parse_document(&html);
let video_list_selector = Selector::parse(r#"[data-testid="video-list"]"#).unwrap();
let card_selector = Selector::parse(r#"[data-testid="video-item"]"#).unwrap();
let video_link_selector = Selector::parse(r#"a[href*="/video/"]"#).unwrap();
let title_selector = Selector::parse(r#"a[title]"#).unwrap();
let thumb_selector = Selector::parse("picture img, img").unwrap();
let preview_selector = Selector::parse(r#"source[data-src]"#).unwrap();
let length_selector = Selector::parse(r#"[data-testid="video-item-length"]"#).unwrap();
let views_selector = Selector::parse(r#"[data-testid="views"]"#).unwrap();
let rating_selector = Selector::parse(r#"[data-testid="rates"]"#).unwrap();
let meta_link_selector =
Selector::parse(r#"[data-testid="video-info-with-badge"] a[data-testid="title"]"#)
.unwrap();
let mut items = Vec::new();
let roots = document.select(&video_list_selector).collect::<Vec<_>>();
let cards = if let Some(root) = roots.last() {
root.select(&card_selector).collect::<Vec<_>>()
} else {
document.select(&card_selector).collect::<Vec<_>>()
};
for card in cards {
if let Some(item) = self.parse_card(
card,
&video_link_selector,
&title_selector,
&thumb_selector,
&preview_selector,
&length_selector,
&views_selector,
&rating_selector,
&meta_link_selector,
proxy_base_url,
) {
items.push(item);
}
}
items
}
async fn get(
&self,
cache: VideoCache,
page: u32,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_get_url(page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, "spankbang", "spankbang.get.missing_requester");
let text = match requester
.get_with_headers(&video_url, self.request_headers(), None)
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
"spankbang",
"get.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"spankbang",
"get.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let proxy_base_url = options.public_url_base.as_deref().unwrap_or_default();
let video_items = self.get_video_items_from_html(text, proxy_base_url);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
async fn query(
&self,
cache: VideoCache,
page: u32,
query: &str,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_query_url(query, page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, "spankbang", "spankbang.query.missing_requester");
let text = match requester
.get_with_headers(&video_url, self.request_headers(), None)
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
"spankbang",
"query.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"spankbang",
"query.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let proxy_base_url = options.public_url_base.as_deref().unwrap_or_default();
let video_items = self.get_video_items_from_html(text, proxy_base_url);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
}
#[async_trait]
impl Provider for SpankbangProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = pool;
let _ = per_page;
let page = page.parse::<u32>().unwrap_or(1);
let videos = match query {
Some(query) if !query.trim().is_empty() => {
self.query(cache, page, &query, &sort, options).await
}
_ => self.get(cache, page, &sort, options).await,
};
match videos {
Ok(videos) => videos,
Err(e) => {
report_provider_error(
"spankbang",
"get_videos",
&format!("page={page}; error={e}"),
)
.await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}
#[cfg(test)]
mod tests {
use super::SpankbangProvider;
#[test]
fn builds_top_level_urls() {
let provider = SpankbangProvider::new();
assert_eq!(
provider.build_get_url(1, "trending"),
"https://spankbang.com/trending_videos/"
);
assert_eq!(
provider.build_get_url(2, "upcoming"),
"https://spankbang.com/upcoming/2/"
);
assert_eq!(
provider.build_get_url(2, "new"),
"https://spankbang.com/new_videos/2/"
);
assert_eq!(
provider.build_get_url(2, "popular"),
"https://spankbang.com/most_popular/2/?p=w"
);
assert_eq!(
provider.build_get_url(1, "featured"),
"https://spankbang.com/trending_videos/"
);
}
#[test]
fn builds_search_urls_with_exact_sort_shape() {
let provider = SpankbangProvider::new();
assert_eq!(
provider.build_query_url("adriana chechik", 1, "trending"),
"https://spankbang.com/s/adriana+chechik/"
);
assert_eq!(
provider.build_query_url("adriana chechik", 2, "new"),
"https://spankbang.com/s/adriana+chechik/2/?o=new"
);
assert_eq!(
provider.build_query_url("adriana chechik", 2, "popular"),
"https://spankbang.com/s/adriana+chechik/2/?o=popular"
);
assert_eq!(
provider.build_query_url("adriana chechik", 2, "featured"),
"https://spankbang.com/s/adriana+chechik/2/?o=featured"
);
assert_eq!(
provider.build_query_url("無修正", 1, "trending"),
"https://spankbang.com/s/%E7%84%A1%E4%BF%AE%E6%AD%A3/"
);
assert_eq!(
provider.request_headers(),
vec![("Referer".to_string(), "https://spankbang.com/".to_string())]
);
}
#[test]
fn parses_cards_and_rewrites_to_proxy_url() {
let provider = SpankbangProvider::new();
let html = r#"
<div data-testid="video-item" data-id="6597754" class="js-video-item z-0 flex flex-col">
<a href="/3xeuy/video/adriana+s+fleshlight+insertion" class="relative mb-1 overflow-hidden rounded bg-neutral-900">
<picture>
<img
src="https://tbi.sb-cd.com/t/6597754/6/5/w:300/t6-enh/adriana-s-fleshlight-insertion.jpg"
alt="Adriana&#39;s Fleshlight Insertion"
/>
</picture>
<video>
<source data-src="https://tbv.sb-cd.com/t/6597754/6/5/td.mp4" type="video/mp4" />
</video>
<div data-testid="video-item-length">17m</div>
</a>
<div data-testid="video-info-with-badge">
<div class="flex justify-between">
<a data-testid="title" href="/76/pornstar/adriana+chechik/">
<span>Adriana Chechik</span>
</a>
<span data-testid="views"><span></span><span>35K</span></span>
<span data-testid="rates"><span></span><span>96%</span></span>
</div>
<p>
<a href="/3xeuy/video/adriana+s+fleshlight+insertion" title="Adriana&#39;s Fleshlight Insertion">
<span>Adriana&#39;s Fleshlight Insertion</span>
</a>
</p>
</div>
</div>
"#;
let items = provider.get_video_items_from_html(html.to_string(), "https://example.com");
assert_eq!(items.len(), 1);
assert_eq!(items[0].id, "6597754");
assert_eq!(items[0].title, "Adriana's Fleshlight Insertion");
assert_eq!(
items[0].url,
"https://example.com/proxy/spankbang/3xeuy/video/adriana+s+fleshlight+insertion"
);
assert_eq!(
items[0].thumb,
"https://tbi.sb-cd.com/t/6597754/6/5/w:300/t6-enh/adriana-s-fleshlight-insertion.jpg"
);
assert_eq!(
items[0].preview,
Some("https://tbv.sb-cd.com/t/6597754/6/5/td.mp4".to_string())
);
assert_eq!(items[0].duration, 1020);
assert_eq!(items[0].views, Some(35_000));
assert_eq!(items[0].rating, Some(96.0));
assert_eq!(items[0].uploader, Some("Adriana Chechik".to_string()));
assert_eq!(
items[0].uploaderUrl,
Some("https://spankbang.com/76/pornstar/adriana+chechik/".to_string())
);
}
#[test]
fn skips_spankbang_gold_cards() {
let provider = SpankbangProvider::new();
let html = r#"
<div data-testid="video-item" data-id="1">
<a href="/gold/video/locked">
<picture>
<img src="https://example.com/gold.jpg" alt="Gold video" />
</picture>
<div>SpankBang Gold</div>
<div data-testid="video-item-length">10m</div>
</a>
<div data-testid="video-info-with-badge">
<span data-testid="views"><span>1K</span></span>
<p><a href="/gold/video/locked" title="Gold video"><span>Gold video</span></a></p>
</div>
</div>
<div data-testid="video-item" data-id="2">
<a href="/free/video/open">
<picture>
<img src="https://example.com/free.jpg" alt="Free video" />
</picture>
<div data-testid="video-item-length">5m</div>
</a>
<div data-testid="video-info-with-badge">
<span data-testid="views"><span>2K</span></span>
<p><a href="/free/video/open" title="Free video"><span>Free video</span></a></p>
</div>
</div>
"#;
let items = provider.get_video_items_from_html(html.to_string(), "https://example.com");
assert_eq!(items.len(), 1);
assert_eq!(items[0].id, "2");
assert_eq!(items[0].title, "Free video");
}
#[test]
fn prefers_primary_video_list_over_header_dropdown_cards() {
let provider = SpankbangProvider::new();
let html = r#"
<div data-testid="video-list">
<div data-testid="video-item" data-id="111">
<a href="/wrong/video/header-card">
<picture><img src="https://example.com/wrong.jpg" alt="Wrong header card" /></picture>
<div data-testid="video-item-length">5m</div>
</a>
<div data-testid="video-info-with-badge">
<span data-testid="views"><span>1K</span></span>
<p><a href="/wrong/video/header-card" title="Wrong header card"><span>Wrong header card</span></a></p>
</div>
</div>
</div>
<div data-testid="video-list">
<div data-testid="video-item" data-id="222">
<a href="/right/video/adriana+chechik">
<picture><img src="https://example.com/right.jpg" alt="Right result" /></picture>
<div data-testid="video-item-length">17m</div>
</a>
<div data-testid="video-info-with-badge">
<span data-testid="views"><span>35K</span></span>
<span data-testid="rates"><span>96%</span></span>
<p><a href="/right/video/adriana+chechik" title="Right result"><span>Right result</span></a></p>
</div>
</div>
</div>
"#;
let items = provider.get_video_items_from_html(html.to_string(), "https://example.com");
assert_eq!(items.len(), 1);
assert_eq!(items[0].id, "222");
assert_eq!(items[0].title, "Right result");
}
}

View File

@@ -15,6 +15,12 @@ use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "community", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -162,7 +168,7 @@ impl SxyprnProvider {
}; };
// Pass a reference to options if needed, or reconstruct as needed // Pass a reference to options if needed, or reconstruct as needed
let video_items = match self let video_items = match self
.get_video_items_from_html(text.clone(), pool, requester) .get_video_items_from_html(text.clone(), pool, requester, &options)
.await .await
{ {
Ok(items) => items, Ok(items) => items,
@@ -247,7 +253,7 @@ impl SxyprnProvider {
}; };
let video_items = match self let video_items = match self
.get_video_items_from_html(text.clone(), pool, requester) .get_video_items_from_html(text.clone(), pool, requester, &options)
.await .await
{ {
Ok(items) => items, Ok(items) => items,
@@ -284,6 +290,7 @@ impl SxyprnProvider {
html: String, html: String,
_pool: DbPool, _pool: DbPool,
_requester: Requester, _requester: Requester,
options: &ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
if html.is_empty() { if html.is_empty() {
return Ok(vec![]); return Ok(vec![]);
@@ -313,7 +320,8 @@ impl SxyprnProvider {
.ok_or_else(|| ErrorKind::Parse("failed to extract /post/ url".into()))? .ok_or_else(|| ErrorKind::Parse("failed to extract /post/ url".into()))?
.to_string(); .to_string();
let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", url); let video_url =
crate::providers::build_proxy_url(options, "sxyprn", &format!("post/{}", url));
// title parts // title parts
let title_parts = video_segment let title_parts = video_segment
@@ -421,7 +429,7 @@ impl SxyprnProvider {
.collect::<Vec<String>>(); .collect::<Vec<String>>();
let video_item_url = stream_urls.first().cloned().unwrap_or_else(|| { let video_item_url = stream_urls.first().cloned().unwrap_or_else(|| {
format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", id) crate::providers::build_proxy_url(options, "sxyprn", &format!("post/{}", id))
}); });
let mut video_item = VideoItem::new( let mut video_item = VideoItem::new(

View File

@@ -12,6 +12,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "legacy", "studio"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -0,0 +1,530 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use regex::Regex;
use url::form_urlencoded::Serializer;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "jav",
tags: &["japanese", "amateur", "jav"],
};
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct TokyomotionProvider {
url: String,
}
impl TokyomotionProvider {
pub fn new() -> Self {
Self {
url: "https://www.tokyomotion.net".to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "tokyomotion".to_string(),
name: "Tokyo Motion".to_string(),
description: "Japanese porn videos.".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=www.tokyomotion.net"
.to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "being-watched".to_string(),
title: "Being Watched".to_string(),
},
FilterOption {
id: "most-recent".to_string(),
title: "Most Recent".to_string(),
},
FilterOption {
id: "most-viewed".to_string(),
title: "Most Viewed".to_string(),
},
FilterOption {
id: "most-commented".to_string(),
title: "Most Commented".to_string(),
},
FilterOption {
id: "top-rated".to_string(),
title: "Top Rated".to_string(),
},
FilterOption {
id: "top-favorites".to_string(),
title: "Top Favorites".to_string(),
},
FilterOption {
id: "longest".to_string(),
title: "Longest".to_string(),
},
],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn sort_code_for_get(sort: &str) -> &'static str {
match sort {
"being-watched" => "bw",
"most-recent" => "mr",
"most-commented" => "md",
"top-rated" => "tr",
"top-favorites" => "tf",
"longest" => "lg",
_ => "mv",
}
}
fn sort_code_for_query(sort: &str) -> &'static str {
match sort {
"being-watched" => "bw",
"most-viewed" => "mv",
"most-commented" => "md",
"top-rated" => "tr",
"top-favorites" => "tf",
"longest" => "lg",
_ => "mr",
}
}
fn build_get_url(&self, page: u32, sort: &str) -> String {
format!(
"{}/videos?t=a&o={}&page={page}",
self.url,
Self::sort_code_for_get(sort)
)
}
fn build_query_url(&self, query: &str, page: u32, sort: &str) -> String {
let mut serializer = Serializer::new(String::new());
serializer.append_pair("search_query", query);
serializer.append_pair("search_type", "videos");
serializer.append_pair("o", Self::sort_code_for_query(sort));
serializer.append_pair("page", &page.to_string());
format!("{}/search?{}", self.url, serializer.finish())
}
async fn get(
&self,
cache: VideoCache,
page: u32,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_get_url(page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, "tokyomotion", "tokyomotion.get.missing_requester");
let text = match requester.get(&video_url, None).await {
Ok(text) => text,
Err(e) => {
report_provider_error(
"tokyomotion",
"get.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"tokyomotion",
"get.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
async fn query(
&self,
cache: VideoCache,
page: u32,
query: &str,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = self.build_query_url(query, page, sort);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester = requester_or_default(
&options,
"tokyomotion",
"tokyomotion.query.missing_requester",
);
let text = match requester.get(&video_url, None).await {
Ok(text) => text,
Err(e) => {
report_provider_error(
"tokyomotion",
"query.request",
&format!("url={video_url}; error={e}"),
)
.await;
return Ok(old_items);
}
};
if text.trim().is_empty() {
report_provider_error(
"tokyomotion",
"query.empty_response",
&format!("url={video_url}"),
)
.await;
return Ok(old_items);
}
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
return Ok(video_items);
}
Ok(old_items)
}
fn extract_between<'a>(text: &'a str, start: &str, end: &str) -> Option<&'a str> {
text.split(start).nth(1)?.split(end).next()
}
fn normalize_url(&self, url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
return url.to_string();
}
if url.starts_with("//") {
return format!("https:{url}");
}
if url.starts_with('/') {
return format!("{}{}", self.url, url);
}
format!("{}/{}", self.url, url.trim_start_matches("./"))
}
fn parse_views(raw: &str) -> Option<u32> {
let cleaned = raw
.replace("views", "")
.replace("view", "")
.replace(',', "")
.trim()
.to_string();
parse_abbreviated_number(&cleaned)
}
fn parse_rating(raw: &str) -> Option<f32> {
let cleaned = raw.replace('%', "").trim().to_string();
if cleaned == "-" || cleaned.is_empty() {
return None;
}
cleaned.parse::<f32>().ok()
}
fn extract_id_from_url(url: &str) -> String {
url.trim_end_matches('/')
.split('/')
.find_map(|part| {
if part.chars().all(|c| c.is_ascii_digit()) {
Some(part.to_string())
} else {
None
}
})
.unwrap_or_default()
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.trim().is_empty() {
return vec![];
}
let Ok(card_re) = Regex::new(
r#"(?is)<a href="(?P<href>/video/(?P<id>\d+)/[^"]+)"\s+class="thumb-popu">(?P<body>.*?)</a>\s*<div class="video-added">.*?</div>\s*<div class="video-views pull-left">\s*(?P<views>.*?)\s*</div>\s*<div class="video-rating pull-right[^"]*">\s*.*?<b>(?P<rating>[^<]+)</b>"#,
) else {
return vec![];
};
let mut items = Vec::new();
for captures in card_re.captures_iter(&html) {
let href = captures
.name("href")
.map(|m| m.as_str())
.unwrap_or_default();
let video_url = self.normalize_url(href);
let id = captures
.name("id")
.map(|m| m.as_str().to_string())
.unwrap_or_else(|| Self::extract_id_from_url(&video_url));
if id.is_empty() {
continue;
}
let body = captures
.name("body")
.map(|m| m.as_str())
.unwrap_or_default();
let title_raw = Self::extract_between(
body,
"<span class=\"video-title title-truncate m-t-5\">",
"<",
)
.or_else(|| Self::extract_between(body, "title=\"", "\""))
.unwrap_or_default()
.trim()
.to_string();
let title = decode(title_raw.as_bytes())
.to_string()
.unwrap_or(title_raw);
if title.trim().is_empty() {
continue;
}
let thumb = Self::extract_between(body, "<img src=\"", "\"")
.map(|thumb| self.normalize_url(thumb))
.unwrap_or_default();
let duration_raw = Self::extract_between(body, "<div class=\"duration\">", "<")
.unwrap_or_default()
.trim()
.to_string();
let duration = parse_time_to_seconds(&duration_raw).unwrap_or(0) as u32;
let views_raw = captures
.name("views")
.map(|m| m.as_str())
.unwrap_or_default()
.trim()
.to_string();
let views = Self::parse_views(&views_raw);
let rating_raw = captures
.name("rating")
.map(|m| m.as_str())
.unwrap_or_default()
.trim()
.to_string();
let rating = Self::parse_rating(&rating_raw);
let mut item = VideoItem::new(
id,
title,
video_url,
"tokyomotion".to_string(),
thumb,
duration,
);
if let Some(views) = views {
item = item.views(views);
}
if let Some(rating) = rating {
item = item.rating(rating);
}
items.push(item);
}
items
}
}
#[async_trait]
impl Provider for TokyomotionProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = pool;
let _ = per_page;
let page = page.parse::<u32>().unwrap_or(1);
let videos = match query {
Some(query) if !query.trim().is_empty() => {
self.query(cache, page, &query, &sort, options).await
}
_ => self.get(cache, page, &sort, options).await,
};
match videos {
Ok(videos) => videos,
Err(e) => {
report_provider_error(
"tokyomotion",
"get_videos",
&format!("page={page}; error={e}"),
)
.await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}
#[cfg(test)]
mod tests {
use super::TokyomotionProvider;
#[test]
fn builds_get_url_with_requested_sort() {
let provider = TokyomotionProvider::new();
assert_eq!(
provider.build_get_url(2, "most-viewed"),
"https://www.tokyomotion.net/videos?t=a&o=mv&page=2"
);
assert_eq!(
provider.build_get_url(2, "top-rated"),
"https://www.tokyomotion.net/videos?t=a&o=tr&page=2"
);
}
#[test]
fn builds_query_url_with_requested_sort() {
let provider = TokyomotionProvider::new();
assert_eq!(
provider.build_query_url("cute girl", 2, "most-recent"),
"https://www.tokyomotion.net/search?search_query=cute+girl&search_type=videos&o=mr&page=2"
);
assert_eq!(
provider.build_query_url("cute girl", 2, "top-favorites"),
"https://www.tokyomotion.net/search?search_query=cute+girl&search_type=videos&o=tf&page=2"
);
}
#[test]
fn parses_tokyomotion_cards() {
let provider = TokyomotionProvider::new();
let html = r##"
<div class="row">
<div class="col-sm-4 col-md-3 col-lg-3">
<div class="well well-sm">
<a href="/video/6225200/いのりちゃん 着エロ iv-日本美女-cute-japanese-girl" class="thumb-popu">
<div class="thumb-overlay">
<img src="https://cdn.tokyo-motion.net/media/videos/tmb194/6225200/16.jpg" title="いのりちゃん 着エロ IV 日本美女 Cute Japanese Girl" alt="いのりちゃん 着エロ IV 日本美女 Cute Japanese Girl" class="img-responsive "/>
<div class="hd-text-icon">HD</div>
<div class="duration">
01:55:27
</div>
</div>
<span class="video-title title-truncate m-t-5">いのりちゃん 着エロ IV 日本美女 Cute Japanese Girl</span>
</a>
<div class="video-added">4 days ago</div>
<div class="video-views pull-left">
4000 views
</div>
<div class="video-rating pull-right ">
<i class="fa fa-heart video-rating-heart "></i> <b>57%</b>
</div>
<div class="clearfix"></div>
</div>
</div>
<div class="col-sm-4 col-md-3 col-lg-3">
<div class="well well-sm">
<a href="/video/6222401/tattooed-trans-tease-jerking-on-cam" class="thumb-popu">
<div class="thumb-overlay">
<img src="https://cdn.tokyo-motion.net/media/videos/tmb194/6222401/1.jpg" title="Tattooed Trans Tease Jerking On Cam" alt="Tattooed Trans Tease Jerking On Cam" class="img-responsive "/>
<div class="hd-text-icon">HD</div>
<div class="duration">
10:33
</div>
</div>
<span class="video-title title-truncate m-t-5">Tattooed Trans Tease Jerking On Cam</span>
</a>
<div class="video-added">4 days ago</div>
<div class="video-views pull-left">
0 views
</div>
<div class="video-rating pull-right no-rating">
<i class="fa fa-heart video-rating-heart no-rating"></i> <b>-</b>
</div>
<div class="clearfix"></div>
</div>
</div>
</div>
"##;
let items = provider.get_video_items_from_html(html.to_string());
assert_eq!(items.len(), 2);
assert_eq!(items[0].id, "6225200");
assert_eq!(
items[0].url,
"https://www.tokyomotion.net/video/6225200/いのりちゃん 着エロ iv-日本美女-cute-japanese-girl"
);
assert_eq!(
items[0].thumb,
"https://cdn.tokyo-motion.net/media/videos/tmb194/6225200/16.jpg"
);
assert_eq!(items[0].duration, 6927);
assert_eq!(items[0].views, Some(4000));
assert_eq!(items[0].rating, Some(57.0));
assert_eq!(items[1].id, "6222401");
assert_eq!(items[1].duration, 633);
assert_eq!(items[1].views, Some(0));
assert_eq!(items[1].rating, None);
}
}

File diff suppressed because it is too large Load Diff

1249
src/providers/vrporn.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,12 @@ use std::sync::{Arc, RwLock};
use std::vec; use std::vec;
use wreq::Version; use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "tiktok",
tags: &["tube", "mixed", "search"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@@ -62,31 +68,28 @@ impl XfreeProvider {
); );
vec![] vec![]
}), }),
options: vec![ options: vec![ChannelOption {
ChannelOption { id: "sexuality".to_string(),
id: "sexuality".to_string(), title: "Sexuality".to_string(),
title: "Sexuality".to_string(), description: "Sexuality of the Videos".to_string(),
description: "Sexuality of the Videos".to_string(), systemImage: "heart".to_string(),
systemImage: "heart".to_string(), colorName: "red".to_string(),
colorName: "red".to_string(), multiSelect: false,
multiSelect: false, options: vec![
options: vec![ FilterOption {
FilterOption { id: "1".to_string(),
id: "1".to_string(), title: "Straight".to_string(),
title: "Straight".to_string(), },
}, FilterOption {
FilterOption { id: "2".to_string(),
id: "2".to_string(), title: "Gay".to_string(),
title: "Gay".to_string(), },
}, FilterOption {
FilterOption { id: "3".to_string(),
id: "3".to_string(), title: "Trans".to_string(),
title: "Trans".to_string(), },
}, ],
], }],
},
],
nsfw: true, nsfw: true,
cacheDuration: None, cacheDuration: None,
} }
@@ -138,13 +141,21 @@ impl XfreeProvider {
let mut requester = let mut requester =
crate::providers::requester_or_default(&options, module_path!(), "missing_requester"); crate::providers::requester_or_default(&options, module_path!(), "missing_requester");
// let _ = requester.get("https://www.xfree.com/", Some(Version::HTTP_2)).await; // let _ = requester.get("https://www.xfree.com/", Some(Version::HTTP_2)).await;
let text = match requester.get_with_headers(&video_url, vec![ let text = match requester
("Apiversion".to_string(), "1.0".to_string()), .get_with_headers(
("Accept".to_string(), "application/json text/plain */*".to_string()), &video_url,
("Referer".to_string(), "https://www.xfree.com/".to_string()), vec![
], ("Apiversion".to_string(), "1.0".to_string()),
Some(Version::HTTP_2), (
).await { "Accept".to_string(),
"application/json text/plain */*".to_string(),
),
("Referer".to_string(), "https://www.xfree.com/".to_string()),
],
Some(Version::HTTP_2),
)
.await
{
Ok(text) => text, Ok(text) => text,
Err(e) => { Err(e) => {
crate::providers::report_provider_error( crate::providers::report_provider_error(
@@ -190,7 +201,9 @@ impl XfreeProvider {
} }
}; };
for post in json.get("body").and_then(|v| v.get("posts")) for post in json
.get("body")
.and_then(|v| v.get("posts"))
.and_then(|p| p.as_array()) .and_then(|p| p.as_array())
.unwrap_or(&vec![]) .unwrap_or(&vec![])
{ {
@@ -206,10 +219,10 @@ impl XfreeProvider {
.to_string(); .to_string();
let video_url = format!( let video_url = format!(
"https://cdn.xfree.com/xfree-prod/{}/{}/{}/{}/full.mp4", "https://cdn.xfree.com/xfree-prod/{}/{}/{}/{}/full.mp4",
id.clone().chars().nth(0).unwrap_or('0'), id.chars().nth(0).unwrap_or('0'),
id.clone().chars().nth(1).unwrap_or('0'), id.chars().nth(1).unwrap_or('0'),
id.clone().chars().nth(2).unwrap_or('0'), id.chars().nth(2).unwrap_or('0'),
id.clone() id
); );
let listsuffix = post let listsuffix = post
@@ -224,10 +237,10 @@ impl XfreeProvider {
let views = post.get("viewCount").and_then(|v| v.as_u64()).unwrap_or(0) as u32; let views = post.get("viewCount").and_then(|v| v.as_u64()).unwrap_or(0) as u32;
let preview = format!( let preview = format!(
"https://cdn.xfree.com/xfree-prod/{}/{}/{}/{}/listing7.mp4", "https://cdn.xfree.com/xfree-prod/{}/{}/{}/{}/listing7.mp4",
id.clone().chars().nth(0).unwrap_or('0'), id.chars().nth(0).unwrap_or('0'),
id.clone().chars().nth(1).unwrap_or('0'), id.chars().nth(1).unwrap_or('0'),
id.clone().chars().nth(2).unwrap_or('0'), id.chars().nth(2).unwrap_or('0'),
id.clone() id
); );
let duration = post let duration = post
.get("media") .get("media")
@@ -239,14 +252,16 @@ impl XfreeProvider {
.and_then(|v| v.as_array()) .and_then(|v| v.as_array())
.unwrap_or(&vec![]) .unwrap_or(&vec![])
.iter() .iter()
.filter_map(|t| .filter_map(|t| t.get("tag").and_then(|n| n.as_str()).map(|s| s.to_string()))
t.get("tag").and_then(|n| n.as_str()).map(|s| s.to_string()))
.collect::<Vec<String>>(); .collect::<Vec<String>>();
for tag in tags.iter() { for tag in tags.iter() {
Self::push_unique(&self.categories, FilterOption { Self::push_unique(
id: tag.clone(), &self.categories,
title: tag.clone(), FilterOption {
}); id: tag.clone(),
title: tag.clone(),
},
);
} }
let uploader = post let uploader = post
.get("user") .get("user")
@@ -262,6 +277,14 @@ impl XfreeProvider {
let uploaded_at = chrono::DateTime::parse_from_rfc3339(&upload_date) let uploaded_at = chrono::DateTime::parse_from_rfc3339(&upload_date)
.map(|dt| dt.timestamp() as u64) .map(|dt| dt.timestamp() as u64)
.unwrap_or(0); .unwrap_or(0);
let aspect_ration = post
.get("media")
.and_then(|v| v.get("aspectRatio"))
.and_then(|v| v.as_str())
.unwrap_or_default()
.to_string()
.parse::<f32>()
.unwrap_or(0.5625);
let video_item = VideoItem::new( let video_item = VideoItem::new(
id.to_string(), id.to_string(),
title, title,
@@ -274,7 +297,8 @@ impl XfreeProvider {
.preview(preview) .preview(preview)
.tags(tags) .tags(tags)
.uploader(uploader) .uploader(uploader)
.uploaded_at(uploaded_at); .uploaded_at(uploaded_at)
.aspect_ratio(aspect_ration);
items.push(video_item); items.push(video_item);
} }
return items; return items;
@@ -295,7 +319,16 @@ impl Provider for XfreeProvider {
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let page = page.parse::<u8>().unwrap_or(1); let page = page.parse::<u8>().unwrap_or(1);
let res = self.to_owned().query(cache, page, &query.unwrap_or("null".to_string()), options, pool).await; let res = self
.to_owned()
.query(
cache,
page,
&query.unwrap_or("null".to_string()),
options,
pool,
)
.await;
res.unwrap_or_else(|e| { res.unwrap_or_else(|e| {
eprintln!("xfree error: {e}"); eprintln!("xfree error: {e}");

View File

@@ -12,6 +12,12 @@ use regex::Regex;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "onlyfans",
tags: &["database", "clips", "mixed"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "onlyfans",
tags: &["onlyfans", "leaks", "creator"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

1513
src/providers/yesporn.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,12 @@ use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec; use std::vec;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "mixed", "search"],
};
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);

295
src/proxies/doodstream.rs Normal file
View File

@@ -0,0 +1,295 @@
use ntex::web;
use regex::{Captures, Regex};
use url::Url;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct DoodstreamProxy {}
impl DoodstreamProxy {
const ROOT_REFERER: &'static str = "https://turboplayers.xyz/";
pub fn new() -> Self {
Self {}
}
fn normalize_detail_url(endpoint: &str) -> Option<String> {
let normalized = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.trim().to_string()
} else {
format!("https://{}", endpoint.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&normalized).then_some(normalized)
}
fn is_allowed_host(host: &str) -> bool {
matches!(
host,
"turboplayers.xyz"
| "www.turboplayers.xyz"
| "trailerhg.xyz"
| "www.trailerhg.xyz"
| "streamhg.com"
| "www.streamhg.com"
)
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
if !Self::is_allowed_host(host) {
return false;
}
url.path().starts_with("/t/")
|| url.path().starts_with("/e/")
|| url.path().starts_with("/d/")
}
fn request_headers(detail_url: &str) -> Vec<(String, String)> {
vec![
("Referer".to_string(), Self::ROOT_REFERER.to_string()),
("Origin".to_string(), "https://turboplayers.xyz".to_string()),
(
"Accept".to_string(),
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8".to_string(),
),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
(
"Sec-Fetch-Site".to_string(),
if detail_url.contains("trailerhg.xyz") {
"cross-site".to_string()
} else {
"same-origin".to_string()
},
),
]
}
fn regex(pattern: &str) -> Option<Regex> {
Regex::new(pattern).ok()
}
fn decode_base36(token: &str) -> Option<usize> {
usize::from_str_radix(token, 36).ok()
}
fn sanitize_media_url(url: &str) -> String {
url.trim()
.trim_end_matches('\\')
.trim_end_matches('"')
.trim_end_matches('\'')
.to_string()
}
fn extract_literal_url(text: &str) -> Option<String> {
let direct_patterns = [
r#"urlPlay\s*=\s*'(?P<url>https?://[^']+)'"#,
r#"data-hash\s*=\s*"(?P<url>https?://[^"]+)""#,
r#""(?P<url>https?://[^"]+\.(?:m3u8|mp4)(?:\?[^"]*)?)""#,
r#"'(?P<url>https?://[^']+\.(?:m3u8|mp4)(?:\?[^']*)?)'"#,
];
for pattern in direct_patterns {
let Some(regex) = Self::regex(pattern) else {
continue;
};
if let Some(url) = regex
.captures(text)
.and_then(|captures| captures.name("url"))
.map(|value| Self::sanitize_media_url(value.as_str()))
{
return Some(url);
}
}
None
}
fn extract_packed_eval_args(text: &str) -> Option<(String, usize, usize, Vec<String>)> {
let regex = Self::regex(
r#"eval\(function\(p,a,c,k,e,d\)\{.*?\}\('(?P<payload>(?:\\'|\\\\|[^'])*)',(?P<radix>\d+),(?P<count>\d+),'(?P<symbols>(?:\\'|\\\\|[^'])*)'\.split\('\|'\)"#,
)?;
let captures = regex.captures(text)?;
let payload = Self::decode_js_single_quoted(captures.name("payload")?.as_str());
let radix = captures.name("radix")?.as_str().parse::<usize>().ok()?;
let count = captures.name("count")?.as_str().parse::<usize>().ok()?;
let symbols = Self::decode_js_single_quoted(captures.name("symbols")?.as_str());
let parts = symbols.split('|').map(|value| value.to_string()).collect();
Some((payload, radix, count, parts))
}
fn decode_js_single_quoted(value: &str) -> String {
let mut result = String::with_capacity(value.len());
let mut chars = value.chars();
while let Some(ch) = chars.next() {
if ch != '\\' {
result.push(ch);
continue;
}
match chars.next() {
Some('\\') => result.push('\\'),
Some('\'') => result.push('\''),
Some('"') => result.push('"'),
Some('n') => result.push('\n'),
Some('r') => result.push('\r'),
Some('t') => result.push('\t'),
Some(other) => {
result.push('\\');
result.push(other);
}
None => result.push('\\'),
}
}
result
}
fn unpack_packer(text: &str) -> Option<String> {
let (mut payload, radix, count, symbols) = Self::extract_packed_eval_args(text)?;
if radix != 36 {
return None;
}
let token_regex = Self::regex(r"\b[0-9a-z]+\b")?;
payload = token_regex
.replace_all(&payload, |captures: &Captures| {
let token = captures
.get(0)
.map(|value| value.as_str())
.unwrap_or_default();
let Some(index) = Self::decode_base36(token) else {
return token.to_string();
};
if index >= count {
return token.to_string();
}
let replacement = symbols.get(index).map(|value| value.as_str()).unwrap_or("");
if replacement.is_empty() {
token.to_string()
} else {
replacement.to_string()
}
})
.to_string();
Some(payload)
}
fn collect_media_candidates(text: &str) -> Vec<String> {
let Some(regex) = Self::regex(r#"https?://[^\s"'<>]+?\.(?:m3u8|mp4|txt)(?:\?[^\s"'<>]*)?"#)
else {
return vec![];
};
let mut urls = regex
.find_iter(text)
.map(|value| Self::sanitize_media_url(value.as_str()))
.filter(|url| url.starts_with("https://"))
.collect::<Vec<_>>();
urls.sort_by_key(|url| {
if url.contains(".m3u8") {
0
} else if url.contains(".mp4") {
1
} else {
2
}
});
urls.dedup();
urls
}
fn extract_stream_url(text: &str) -> Option<String> {
if let Some(url) = Self::extract_literal_url(text) {
return Some(url);
}
let unpacked = Self::unpack_packer(text)?;
Self::collect_media_candidates(&unpacked)
.into_iter()
.next()
.or_else(|| Self::extract_literal_url(&unpacked))
}
}
impl crate::proxies::Proxy for DoodstreamProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some(detail_url) = Self::normalize_detail_url(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let html = match requester
.get_with_headers(&detail_url, Self::request_headers(&detail_url), None)
.await
{
Ok(text) => text,
Err(_) => return String::new(),
};
Self::extract_stream_url(&html).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::DoodstreamProxy;
#[test]
fn allows_only_known_doodstream_hosts() {
assert!(DoodstreamProxy::is_allowed_detail_url(
"https://turboplayers.xyz/t/69bdfb21cc640"
));
assert!(DoodstreamProxy::is_allowed_detail_url(
"https://trailerhg.xyz/e/ttdc7a6qpskt"
));
assert!(!DoodstreamProxy::is_allowed_detail_url(
"http://turboplayers.xyz/t/69bdfb21cc640"
));
assert!(!DoodstreamProxy::is_allowed_detail_url(
"https://example.com/t/69bdfb21cc640"
));
}
#[test]
fn extracts_clear_hls_url_from_turboplayers_layout() {
let html = r#"
<div id="video_player" data-hash="https://cdn4.turboviplay.com/data1/69bdfa8ce1f4d/69bdfa8ce1f4d.m3u8"></div>
<script>
var urlPlay = 'https://cdn4.turboviplay.com/data1/69bdfa8ce1f4d/69bdfa8ce1f4d.m3u8';
</script>
"#;
assert_eq!(
DoodstreamProxy::extract_stream_url(html).as_deref(),
Some("https://cdn4.turboviplay.com/data1/69bdfa8ce1f4d/69bdfa8ce1f4d.m3u8")
);
}
#[test]
fn unpacks_streamhg_style_player_config() {
let html = r#"
<script type='text/javascript'>
eval(function(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}('0 1={\"2\":\"https://cdn.example/master.m3u8?t=1\",\"3\":\"https://cdn.example/master.txt\"};4(\"5\").6({7:[{8:1.2,9:\"a\"}]});',36,11,'var|links|hls2|hls3|jwplayer|vplayer|setup|sources|file|type|hls'.split('|')))
</script>
"#;
assert_eq!(
DoodstreamProxy::extract_stream_url(html).as_deref(),
Some("https://cdn.example/master.m3u8?t=1")
);
}
}

View File

@@ -6,12 +6,23 @@ use ntex::{
use crate::util::requester::Requester; use crate::util::requester::Requester;
fn normalize_image_url(endpoint: &str) -> String {
let endpoint = endpoint.trim_start_matches('/');
if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else if endpoint.starts_with("hanime-cdn.com/") || endpoint == "hanime-cdn.com" {
format!("https://{endpoint}")
} else {
format!("https://{endpoint}")
}
}
pub async fn get_image( pub async fn get_image(
req: HttpRequest, req: HttpRequest,
requester: web::types::State<Requester>, requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> { ) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string(); let endpoint = req.match_info().query("endpoint").to_string();
let image_url = format!("https://hanime-cdn.com/{}", endpoint); let image_url = normalize_image_url(&endpoint);
let upstream = match requester let upstream = match requester
.get_ref() .get_ref()
@@ -52,3 +63,24 @@ pub async fn get_image(
// ...or simple & compatible: // ...or simple & compatible:
Ok(resp.body(bytes.to_vec())) Ok(resp.body(bytes.to_vec()))
} }
#[cfg(test)]
mod tests {
use super::normalize_image_url;
#[test]
fn keeps_full_hanime_cdn_host_path_without_duplication() {
assert_eq!(
normalize_image_url("hanime-cdn.com/images/covers/natsu-zuma-2-cv1.png"),
"https://hanime-cdn.com/images/covers/natsu-zuma-2-cv1.png"
);
}
#[test]
fn prefixes_relative_paths_with_hanime_cdn_host() {
assert_eq!(
normalize_image_url("/images/covers/natsu-zuma-2-cv1.png"),
"https://hanime-cdn.com/images/covers/natsu-zuma-2-cv1.png"
);
}
}

View File

@@ -1,16 +1,35 @@
use crate::proxies::doodstream::DoodstreamProxy;
use crate::proxies::pornhd3x::Pornhd3xProxy;
use ntex::web; use ntex::web;
use crate::proxies::pimpbunny::PimpbunnyProxy;
use crate::proxies::porndish::PorndishProxy;
use crate::proxies::spankbang::SpankbangProxy;
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester}; use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
pub mod doodstream;
pub mod hanimecdn; pub mod hanimecdn;
pub mod hqpornerthumb; pub mod hqpornerthumb;
pub mod javtiful; pub mod javtiful;
pub mod noodlemagazine;
pub mod pimpbunny;
pub mod pimpbunnythumb;
pub mod porndish;
pub mod porndishthumb;
pub mod pornhd3x;
pub mod shooshtime;
pub mod spankbang;
pub mod sxyprn; pub mod sxyprn;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum AnyProxy { pub enum AnyProxy {
Doodstream(DoodstreamProxy),
Sxyprn(SxyprnProxy), Sxyprn(SxyprnProxy),
Javtiful(javtiful::JavtifulProxy), Javtiful(javtiful::JavtifulProxy),
Pornhd3x(Pornhd3xProxy),
Pimpbunny(PimpbunnyProxy),
Porndish(PorndishProxy),
Spankbang(SpankbangProxy),
} }
pub trait Proxy { pub trait Proxy {
@@ -20,8 +39,13 @@ pub trait Proxy {
impl Proxy for AnyProxy { impl Proxy for AnyProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String { async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
match self { match self {
AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await,
AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await, AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await, AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await,
AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await,
AnyProxy::Porndish(p) => p.get_video_url(url, requester).await,
AnyProxy::Spankbang(p) => p.get_video_url(url, requester).await,
} }
} }
} }

View File

@@ -0,0 +1,441 @@
use ntex::http::header::{CONTENT_LENGTH, CONTENT_TYPE};
use ntex::{
http::Response,
web::{self, HttpRequest, error},
};
use serde_json::Value;
use std::net::IpAddr;
use url::Url;
use wreq::Version;
use crate::util::requester::Requester;
const FIREFOX_USER_AGENT: &str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &str =
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
const IMAGE_ACCEPT: &str = "image/avif,image/webp,image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5";
#[derive(Debug, Clone)]
pub struct NoodlemagazineProxy {}
impl NoodlemagazineProxy {
pub fn new() -> Self {
NoodlemagazineProxy {}
}
fn extract_playlist(text: &str) -> Option<&str> {
text.split("window.playlist = ").nth(1)?.split(';').next()
}
fn source_score(source: &Value) -> (u8, u32) {
let file = source["file"].as_str().unwrap_or_default();
let label = source["label"].as_str().unwrap_or_default();
let is_hls = u8::from(file.contains(".m3u8"));
let quality = label
.chars()
.filter(|c| c.is_ascii_digit())
.collect::<String>()
.parse::<u32>()
.unwrap_or(0);
(is_hls, quality)
}
fn select_best_source(playlist: &str) -> Option<String> {
let json: Value = serde_json::from_str(playlist).ok()?;
let sources = json["sources"].as_array()?;
sources
.iter()
.filter(|source| {
source["file"]
.as_str()
.map(|file| !file.is_empty())
.unwrap_or(false)
})
.max_by_key(|source| Self::source_score(source))
.and_then(|source| source["file"].as_str())
.map(str::to_string)
}
fn normalize_video_page_url(url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
url.to_string()
} else {
format!("https://{}", url.trim_start_matches('/'))
}
}
fn normalize_image_url(url: &str) -> String {
if url.starts_with("http://") || url.starts_with("https://") {
url.to_string()
} else {
format!("https://{}", url.trim_start_matches('/'))
}
}
fn root_referer() -> &'static str {
"https://noodlemagazine.com/"
}
fn root_html_headers() -> Vec<(String, String)> {
vec![
("Referer".to_string(), Self::root_referer().to_string()),
("User-Agent".to_string(), FIREFOX_USER_AGENT.to_string()),
("Accept".to_string(), HTML_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
]
}
fn image_headers(requester: &Requester, image_url: &str) -> Vec<(String, String)> {
let mut headers = vec![
("Referer".to_string(), Self::root_referer().to_string()),
("User-Agent".to_string(), FIREFOX_USER_AGENT.to_string()),
("Accept".to_string(), IMAGE_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
];
if let Some(cookie) = requester.cookie_header_for_url(image_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
}
fn has_allowed_image_extension(path: &str) -> bool {
let path = path.to_ascii_lowercase();
[".jpg", ".jpeg", ".png", ".webp", ".avif", ".gif"]
.iter()
.any(|ext| path.ends_with(ext))
}
fn is_disallowed_thumb_host(host: &str) -> bool {
if host.eq_ignore_ascii_case("localhost") {
return true;
}
match host.parse::<IpAddr>() {
Ok(IpAddr::V4(ip)) => {
ip.is_private()
|| ip.is_loopback()
|| ip.is_link_local()
|| ip.is_broadcast()
|| ip.is_documentation()
|| ip.is_unspecified()
}
Ok(IpAddr::V6(ip)) => {
ip.is_loopback()
|| ip.is_unspecified()
|| ip.is_multicast()
|| ip.is_unique_local()
|| ip.is_unicast_link_local()
}
Err(_) => false,
}
}
fn is_allowed_thumb_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
!Self::is_disallowed_thumb_host(host) && Self::has_allowed_image_extension(url.path())
}
fn is_binary_image_content_type(content_type: &str) -> bool {
let media_type = content_type
.split(';')
.next()
.unwrap_or_default()
.trim()
.to_ascii_lowercase();
media_type.starts_with("image/")
}
fn is_hls_url(url: &str) -> bool {
Url::parse(url)
.ok()
.map(|parsed| parsed.path().ends_with(".m3u8"))
.unwrap_or(false)
}
fn absolutize_uri(base_url: &Url, value: &str) -> String {
if value.is_empty() {
return String::new();
}
if value.starts_with('#')
|| value.starts_with("data:")
|| value.starts_with("http://")
|| value.starts_with("https://")
{
return value.to_string();
}
base_url
.join(value)
.map(|url| url.to_string())
.unwrap_or_else(|_| value.to_string())
}
fn rewrite_manifest_line(base_url: &Url, line: &str) -> String {
if line.trim().is_empty() {
return line.to_string();
}
if !line.starts_with('#') {
return Self::absolutize_uri(base_url, line);
}
let Some(uri_start) = line.find("URI=\"") else {
return line.to_string();
};
let value_start = uri_start + 5;
let Some(relative_end) = line[value_start..].find('"') else {
return line.to_string();
};
let value_end = value_start + relative_end;
let value = &line[value_start..value_end];
let rewritten = Self::absolutize_uri(base_url, value);
format!(
"{}{}{}",
&line[..value_start],
rewritten,
&line[value_end..]
)
}
fn rewrite_manifest(manifest_url: &str, body: &str) -> Option<String> {
let base_url = Url::parse(manifest_url).ok()?;
Some(
body.lines()
.map(|line| Self::rewrite_manifest_line(&base_url, line))
.collect::<Vec<_>>()
.join("\n"),
)
}
async fn resolve_source_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> Option<(String, String)> {
let mut requester = requester.get_ref().clone();
let url = Self::normalize_video_page_url(&url);
let text = requester
.get(&url, Some(Version::HTTP_2))
.await
.unwrap_or_default();
if text.is_empty() {
return None;
}
let Some(playlist) = Self::extract_playlist(&text) else {
return None;
};
Self::select_best_source(playlist).map(|source_url| (url, source_url))
}
}
pub async fn serve_media(
req: HttpRequest,
requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string();
let proxy = NoodlemagazineProxy::new();
let Some((video_page_url, source_url)) =
proxy.resolve_source_url(endpoint, requester.clone()).await
else {
return Ok(web::HttpResponse::BadGateway().finish());
};
if !NoodlemagazineProxy::is_hls_url(&source_url) {
return Ok(web::HttpResponse::Found()
.header("Location", source_url)
.finish());
}
let mut upstream_requester = requester.get_ref().clone();
let upstream = match upstream_requester
.get_raw_with_headers(&source_url, vec![("Referer".to_string(), video_page_url)])
.await
{
Ok(response) => response,
Err(_) => return Ok(web::HttpResponse::BadGateway().finish()),
};
let manifest_body = upstream.text().await.map_err(error::ErrorBadGateway)?;
let rewritten_manifest =
match NoodlemagazineProxy::rewrite_manifest(&source_url, &manifest_body) {
Some(body) => body,
None => return Ok(web::HttpResponse::BadGateway().finish()),
};
Ok(web::HttpResponse::Ok()
.header(CONTENT_TYPE, "application/vnd.apple.mpegurl")
.body(rewritten_manifest))
}
pub async fn get_image(
req: HttpRequest,
requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string();
let image_url = NoodlemagazineProxy::normalize_image_url(&endpoint);
if !NoodlemagazineProxy::is_allowed_thumb_url(&image_url) {
return Ok(web::HttpResponse::BadRequest().finish());
}
let mut requester = requester.get_ref().clone();
let _ = requester
.get_with_headers(
NoodlemagazineProxy::root_referer(),
NoodlemagazineProxy::root_html_headers(),
Some(Version::HTTP_11),
)
.await;
let mut headers = NoodlemagazineProxy::image_headers(&requester, image_url.as_str());
let mut upstream = requester
.get_raw_with_headers(image_url.as_str(), headers.clone())
.await
.ok();
let needs_warmup = upstream
.as_ref()
.map(|response| !response.status().is_success())
.unwrap_or(true);
if needs_warmup {
let _ = requester
.get_with_headers(image_url.as_str(), headers.clone(), Some(Version::HTTP_11))
.await;
headers = NoodlemagazineProxy::image_headers(&requester, image_url.as_str());
upstream = requester
.get_raw_with_headers(image_url.as_str(), headers)
.await
.ok();
}
let Some(upstream) = upstream.filter(|response| response.status().is_success()) else {
return Ok(web::HttpResponse::NotFound().finish());
};
let status = upstream.status();
let headers = upstream.headers().clone();
let content_type = headers
.get(CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.map(str::to_string)
.unwrap_or_default();
if !NoodlemagazineProxy::is_binary_image_content_type(&content_type) {
return Ok(web::HttpResponse::BadGateway().finish());
}
let bytes = upstream.bytes().await.map_err(error::ErrorBadGateway)?;
let mut resp = Response::build(status);
if !content_type.is_empty() {
resp.set_header(CONTENT_TYPE, content_type);
}
if let Some(cl) = headers.get(CONTENT_LENGTH) {
if let Ok(cl_str) = cl.to_str() {
resp.set_header(CONTENT_LENGTH, cl_str);
}
}
Ok(resp.body(bytes.to_vec()))
}
#[cfg(test)]
mod tests {
use super::NoodlemagazineProxy;
#[test]
fn extracts_playlist_from_page() {
let html = r#"
<script>
window.playlist = {"sources":[{"file":"https://cdn.example/360.mp4","label":"360p"}]};
</script>
"#;
assert_eq!(
NoodlemagazineProxy::extract_playlist(html),
Some(r#"{"sources":[{"file":"https://cdn.example/360.mp4","label":"360p"}]}"#)
);
}
#[test]
fn prefers_hls_then_highest_quality() {
let playlist = r#"{
"sources": [
{"file":"https://cdn.example/360.mp4","label":"360p"},
{"file":"https://cdn.example/720.mp4","label":"720p"},
{"file":"https://cdn.example/master.m3u8","label":"1080p"}
]
}"#;
assert_eq!(
NoodlemagazineProxy::select_best_source(playlist).as_deref(),
Some("https://cdn.example/master.m3u8")
);
}
#[test]
fn rewrites_manifest_to_direct_absolute_urls() {
let manifest = "#EXTM3U\n#EXT-X-STREAM-INF:BANDWIDTH=1\nlow/index.m3u8\n#EXT-X-KEY:METHOD=AES-128,URI=\"keys/key.bin\"\nsegment0.ts";
let rewritten =
NoodlemagazineProxy::rewrite_manifest("https://cdn.example/hls/master.m3u8", manifest)
.unwrap();
assert_eq!(
rewritten,
"#EXTM3U\n#EXT-X-STREAM-INF:BANDWIDTH=1\nhttps://cdn.example/hls/low/index.m3u8\n#EXT-X-KEY:METHOD=AES-128,URI=\"https://cdn.example/hls/keys/key.bin\"\nhttps://cdn.example/hls/segment0.ts"
);
}
#[test]
fn allows_https_image_thumbs_but_rejects_local_or_non_images() {
assert!(NoodlemagazineProxy::is_allowed_thumb_url(
"https://noodlemagazine.com/thumbs/example.webp"
));
assert!(NoodlemagazineProxy::is_allowed_thumb_url(
"https://cdn.example/previews/example.jpg"
));
assert!(!NoodlemagazineProxy::is_allowed_thumb_url(
"https://noodlemagazine.com/watch/-123_456"
));
assert!(!NoodlemagazineProxy::is_allowed_thumb_url(
"https://localhost/thumb.jpg"
));
}
#[test]
fn recognizes_binary_image_content_types() {
assert!(NoodlemagazineProxy::is_binary_image_content_type(
"image/webp"
));
assert!(NoodlemagazineProxy::is_binary_image_content_type(
"image/jpeg; charset=binary"
));
assert!(!NoodlemagazineProxy::is_binary_image_content_type(
"text/html; charset=utf-8"
));
assert!(!NoodlemagazineProxy::is_binary_image_content_type(
"application/json"
));
}
}

252
src/proxies/pimpbunny.rs Normal file
View File

@@ -0,0 +1,252 @@
use ntex::web;
use regex::Regex;
use serde_json::Value;
use url::Url;
use wreq::Version;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct PimpbunnyProxy {}
impl PimpbunnyProxy {
const FIREFOX_USER_AGENT: &'static str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &'static str =
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
pub fn new() -> Self {
PimpbunnyProxy {}
}
fn normalize_detail_url(url: &str) -> Option<String> {
let normalized = if url.starts_with("http://") || url.starts_with("https://") {
url.to_string()
} else {
format!("https://{}", url.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&normalized).then_some(normalized)
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
matches!(host, "pimpbunny.com" | "www.pimpbunny.com")
&& !url.path().starts_with("/contents/videos_screenshots/")
}
fn root_referer() -> &'static str {
"https://pimpbunny.com/"
}
fn html_headers_with_referer(referer: &str) -> Vec<(String, String)> {
vec![
("Referer".to_string(), referer.to_string()),
(
"User-Agent".to_string(),
Self::FIREFOX_USER_AGENT.to_string(),
),
("Accept".to_string(), Self::HTML_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
]
}
fn headers_with_cookies(
requester: &Requester,
request_url: &str,
referer: &str,
) -> Vec<(String, String)> {
let mut headers = Self::html_headers_with_referer(referer);
if let Some(cookie) = requester.cookie_header_for_url(request_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
}
async fn warm_root_session(requester: &mut Requester) {
let _ = requester
.get_with_headers(
Self::root_referer(),
Self::html_headers_with_referer(Self::root_referer()),
Some(Version::HTTP_11),
)
.await;
}
fn extract_json_ld_video(text: &str) -> Option<Value> {
let script_regex =
Regex::new(r#"(?s)<script[^>]+application/ld\+json[^>]*>(.*?)</script>"#).ok()?;
for captures in script_regex.captures_iter(text) {
let raw = captures.get(1).map(|value| value.as_str().trim())?;
let parsed: Value = serde_json::from_str(raw).ok()?;
if let Some(video) = Self::find_video_object(&parsed) {
return Some(video);
}
}
None
}
fn find_video_object(parsed: &Value) -> Option<Value> {
if parsed
.get("@type")
.and_then(Value::as_str)
.is_some_and(|value| value == "VideoObject")
{
return Some(parsed.clone());
}
if parsed
.get("contentUrl")
.and_then(Value::as_str)
.is_some_and(|value| !value.trim().is_empty())
{
return Some(parsed.clone());
}
if let Some(graph) = parsed.get("@graph").and_then(Value::as_array) {
for item in graph {
if item
.get("@type")
.and_then(Value::as_str)
.is_some_and(|value| value == "VideoObject")
{
return Some(item.clone());
}
if item
.get("contentUrl")
.and_then(Value::as_str)
.is_some_and(|value| !value.trim().is_empty())
{
return Some(item.clone());
}
}
}
if let Some(array) = parsed.as_array() {
for item in array {
if let Some(video) = Self::find_video_object(item) {
return Some(video);
}
}
}
None
}
fn extract_stream_url(json_ld: &Value) -> Option<String> {
json_ld
.get("contentUrl")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(str::to_string)
}
fn extract_stream_url_from_html(text: &str) -> Option<String> {
Regex::new(r#""contentUrl"\s*:\s*"([^"]+)""#)
.ok()?
.captures(text)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().trim().to_string())
.filter(|value| !value.is_empty())
}
}
impl crate::proxies::Proxy for PimpbunnyProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some(detail_url) = Self::normalize_detail_url(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
Self::warm_root_session(&mut requester).await;
let headers = Self::headers_with_cookies(&requester, &detail_url, Self::root_referer());
let text = match requester
.get_with_headers(&detail_url, headers, Some(Version::HTTP_2))
.await
{
Ok(text) => text,
Err(_) => return String::new(),
};
Self::extract_json_ld_video(&text)
.and_then(|json_ld| Self::extract_stream_url(&json_ld))
.or_else(|| Self::extract_stream_url_from_html(&text))
.unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::PimpbunnyProxy;
#[test]
fn allows_only_pimpbunny_detail_urls() {
assert!(PimpbunnyProxy::is_allowed_detail_url(
"https://pimpbunny.com/videos/example-video/"
));
assert!(PimpbunnyProxy::is_allowed_detail_url(
"https://www.pimpbunny.com/video/example/"
));
assert!(!PimpbunnyProxy::is_allowed_detail_url(
"http://pimpbunny.com/videos/example-video/"
));
assert!(!PimpbunnyProxy::is_allowed_detail_url(
"https://pimpbunny.com/contents/videos_screenshots/1/2/3.jpg"
));
assert!(!PimpbunnyProxy::is_allowed_detail_url(
"https://example.com/videos/example-video/"
));
}
#[test]
fn extracts_content_url_from_json_ld() {
let html = r#"
<script type="application/ld+json">{"contentUrl":"https://cdn.example/video.mp4"}</script>
"#;
let json_ld = PimpbunnyProxy::extract_json_ld_video(html).expect("json-ld should parse");
assert_eq!(
PimpbunnyProxy::extract_stream_url(&json_ld).as_deref(),
Some("https://cdn.example/video.mp4")
);
}
#[test]
fn extracts_video_object_from_graph_script() {
let html = r#"
<script type="application/ld+json">
{"@graph":[{"@type":"BreadcrumbList"},{"@type":"VideoObject","contentUrl":"https://cdn.example/graph.mp4"}]}
</script>
"#;
let json_ld =
PimpbunnyProxy::extract_json_ld_video(html).expect("video object should parse");
assert_eq!(
PimpbunnyProxy::extract_stream_url(&json_ld).as_deref(),
Some("https://cdn.example/graph.mp4")
);
}
#[test]
fn falls_back_to_raw_content_url_match() {
let html = r#"{"contentUrl":"https://cdn.example/fallback.mp4"}"#;
assert_eq!(
PimpbunnyProxy::extract_stream_url_from_html(html).as_deref(),
Some("https://cdn.example/fallback.mp4")
);
}
}

View File

@@ -0,0 +1,169 @@
use ntex::http::header::{CONTENT_LENGTH, CONTENT_TYPE};
use ntex::{
http::Response,
web::{self, HttpRequest, error},
};
use url::Url;
use wreq::Version;
use crate::util::requester::Requester;
const FIREFOX_USER_AGENT: &str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &str =
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
const IMAGE_ACCEPT: &str = "image/avif,image/webp,image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5";
fn root_referer() -> &'static str {
"https://pimpbunny.com/"
}
fn root_html_headers() -> Vec<(String, String)> {
vec![
("Referer".to_string(), root_referer().to_string()),
("User-Agent".to_string(), FIREFOX_USER_AGENT.to_string()),
("Accept".to_string(), HTML_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
]
}
fn image_headers(requester: &Requester, image_url: &str) -> Vec<(String, String)> {
let mut headers = vec![
("Referer".to_string(), root_referer().to_string()),
("User-Agent".to_string(), FIREFOX_USER_AGENT.to_string()),
("Accept".to_string(), IMAGE_ACCEPT.to_string()),
("Accept-Language".to_string(), "en-US,en;q=0.9".to_string()),
];
if let Some(cookie) = requester.cookie_header_for_url(image_url) {
headers.push(("Cookie".to_string(), cookie));
}
headers
}
fn is_allowed_thumb_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
matches!(host, "pimpbunny.com" | "www.pimpbunny.com")
&& url.path().starts_with("/contents/videos_screenshots/")
}
pub async fn get_image(
req: HttpRequest,
requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string();
let image_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint
} else {
format!("https://{}", endpoint.trim_start_matches('/'))
};
if !is_allowed_thumb_url(&image_url) {
return Ok(web::HttpResponse::BadRequest().finish());
}
let mut requester = requester.get_ref().clone();
let _ = requester
.get_with_headers(root_referer(), root_html_headers(), Some(Version::HTTP_11))
.await;
let mut headers = image_headers(&requester, image_url.as_str());
let mut upstream = requester
.get_raw_with_headers(image_url.as_str(), headers.clone())
.await
.ok();
let needs_warmup = upstream
.as_ref()
.map(|response| !response.status().is_success())
.unwrap_or(true);
if needs_warmup {
let _ = requester
.get_with_headers(root_referer(), root_html_headers(), Some(Version::HTTP_11))
.await;
headers = image_headers(&requester, image_url.as_str());
upstream = requester
.get_raw_with_headers(image_url.as_str(), headers.clone())
.await
.ok();
}
let needs_image_specific_warmup = upstream
.as_ref()
.map(|response| !response.status().is_success())
.unwrap_or(true);
if needs_image_specific_warmup {
let _ = requester
.get_with_headers(image_url.as_str(), headers.clone(), Some(Version::HTTP_11))
.await;
headers = image_headers(&requester, image_url.as_str());
upstream = requester
.get_raw_with_headers(image_url.as_str(), headers)
.await
.ok();
}
let Some(upstream) = upstream.filter(|response| response.status().is_success()) else {
return Ok(web::HttpResponse::NotFound().finish());
};
let status = upstream.status();
let headers = upstream.headers().clone();
let bytes = upstream.bytes().await.map_err(error::ErrorBadGateway)?;
let mut resp = Response::build(status);
if let Some(ct) = headers.get(CONTENT_TYPE) {
if let Ok(ct_str) = ct.to_str() {
resp.set_header(CONTENT_TYPE, ct_str);
}
}
if let Some(cl) = headers.get(CONTENT_LENGTH) {
if let Ok(cl_str) = cl.to_str() {
resp.set_header(CONTENT_LENGTH, cl_str);
}
}
Ok(resp.body(bytes.to_vec()))
}
#[cfg(test)]
mod tests {
use super::is_allowed_thumb_url;
#[test]
fn allows_expected_pimpbunny_thumb_paths() {
assert!(is_allowed_thumb_url(
"https://pimpbunny.com/contents/videos_screenshots/517000/517329/800x450/1.jpg"
));
assert!(is_allowed_thumb_url(
"https://www.pimpbunny.com/contents/videos_screenshots/1/2/800x450/3.webp"
));
}
#[test]
fn rejects_non_thumb_or_non_pimpbunny_urls() {
assert!(!is_allowed_thumb_url(
"http://pimpbunny.com/contents/videos_screenshots/x.jpg"
));
assert!(!is_allowed_thumb_url(
"https://pimpbunny.com/videos/example-video/"
));
assert!(!is_allowed_thumb_url(
"https://example.com/contents/videos_screenshots/x.jpg"
));
}
}

369
src/proxies/porndish.rs Normal file
View File

@@ -0,0 +1,369 @@
use ntex::web;
use regex::Regex;
use std::process::Command;
use url::Url;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct PorndishProxy {}
impl PorndishProxy {
pub fn new() -> Self {
Self {}
}
fn normalize_detail_url(endpoint: &str) -> Option<String> {
let endpoint = endpoint.trim();
if endpoint.is_empty() {
return None;
}
if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
Some(endpoint.to_string())
} else {
Some(format!("https://{}", endpoint.trim_start_matches('/')))
}
}
fn parse_url(url: &str) -> Option<Url> {
Url::parse(url).ok()
}
fn is_porndish_host(host: &str) -> bool {
matches!(host, "www.porndish.com" | "porndish.com")
}
fn is_myvidplay_host(host: &str) -> bool {
matches!(host, "myvidplay.com" | "www.myvidplay.com")
}
fn is_vidara_host(host: &str) -> bool {
matches!(host, "vidara.so" | "www.vidara.so")
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Self::parse_url(url) else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
Self::is_porndish_host(host) && url.path().starts_with("/porn/")
}
fn is_allowed_myvidplay_iframe_url(url: &str) -> bool {
let Some(url) = Self::parse_url(url) else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
Self::is_myvidplay_host(host) && url.path().starts_with("/e/")
}
fn is_allowed_myvidplay_pass_url(url: &str) -> bool {
let Some(url) = Self::parse_url(url) else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
Self::is_myvidplay_host(host) && url.path().starts_with("/pass_md5/")
}
fn is_allowed_vidara_iframe_url(url: &str) -> bool {
let Some(url) = Self::parse_url(url) else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
Self::is_vidara_host(host) && url.path().starts_with("/e/")
}
fn vidara_api_url(iframe_url: &str) -> Option<String> {
let url = Self::parse_url(iframe_url)?;
if !Self::is_allowed_vidara_iframe_url(iframe_url) {
return None;
}
let filecode = url
.path_segments()?
.filter(|segment| !segment.is_empty())
.next_back()?
.to_string();
if filecode.is_empty() {
return None;
}
Some(format!("https://vidara.so/api/stream?filecode={filecode}"))
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
async fn fetch_with_curl_cffi(url: &str, referer: Option<&str>) -> Option<String> {
let url = url.to_string();
let referer = referer.unwrap_or("").to_string();
let output = tokio::task::spawn_blocking(move || {
Command::new("python3")
.arg("-c")
.arg(
r#"
import sys
from curl_cffi import requests
url = sys.argv[1]
referer = sys.argv[2] if len(sys.argv) > 2 else ""
headers = {}
if referer:
headers["Referer"] = referer
response = requests.get(
url,
impersonate="chrome",
timeout=30,
allow_redirects=True,
headers=headers,
)
if response.status_code >= 400:
sys.exit(1)
sys.stdout.buffer.write(response.content)
"#,
)
.arg(url)
.arg(referer)
.output()
})
.await
.ok()?
.ok()?;
if !output.status.success() {
return None;
}
Some(String::from_utf8_lossy(&output.stdout).to_string())
}
async fn resolve_first_redirect(url: &str) -> Option<String> {
let url = url.to_string();
let output = tokio::task::spawn_blocking(move || {
Command::new("python3")
.arg("-c")
.arg(
r#"
import sys
from curl_cffi import requests
url = sys.argv[1]
response = requests.get(
url,
impersonate="chrome",
timeout=30,
allow_redirects=False,
)
location = response.headers.get("location", "")
if location:
sys.stdout.write(location)
"#,
)
.arg(url)
.output()
})
.await
.ok()?
.ok()?;
let location = String::from_utf8_lossy(&output.stdout).trim().to_string();
if location.is_empty() {
None
} else {
Some(location)
}
}
fn extract_iframe_fragments(html: &str) -> Vec<String> {
let Some(regex) = Self::regex(r#"const\s+[A-Za-z0-9_]+Content\s*=\s*"((?:\\.|[^"\\])*)";"#)
else {
return vec![];
};
let mut fragments = Vec::new();
for captures in regex.captures_iter(html) {
let Some(value) = captures.get(1).map(|value| value.as_str()) else {
continue;
};
let encoded = format!("\"{value}\"");
let decoded = serde_json::from_str::<String>(&encoded).unwrap_or_default();
if decoded.contains("<iframe") {
fragments.push(decoded);
}
}
fragments
}
fn parse_embed_source(fragment: &str) -> Option<String> {
let regex = Self::regex(r#"(?is)<iframe[^>]+src="([^"]+)"[^>]*>"#)?;
regex
.captures(fragment)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().to_string())
}
async fn resolve_myvidplay_stream(iframe_url: &str) -> Option<String> {
if !Self::is_allowed_myvidplay_iframe_url(iframe_url) {
return None;
}
let html =
Self::fetch_with_curl_cffi(iframe_url, Some("https://www.porndish.com/")).await?;
let pass_regex = Self::regex(r#"\$\.get\(\s*['"](/pass_md5/[^'"]+)['"]"#)?;
let path = pass_regex
.captures(&html)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().to_string())?;
let token = path.trim_end_matches('/').rsplit('/').next()?.to_string();
if token.is_empty() {
return None;
}
let pass_url = if path.starts_with("http://") || path.starts_with("https://") {
path
} else {
let base = Url::parse(iframe_url).ok()?;
base.join(&path).ok()?.to_string()
};
if !Self::is_allowed_myvidplay_pass_url(&pass_url) {
return None;
}
let base = Self::fetch_with_curl_cffi(&pass_url, Some(iframe_url))
.await?
.trim()
.to_string();
if base.is_empty() || base == "RELOAD" || !base.starts_with("http") {
return None;
}
let chars = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.ok()?
.as_millis();
let suffix = (0..10)
.map(|index| {
let pos = ((now + (index as u128 * 17)) % chars.len() as u128) as usize;
chars[pos] as char
})
.collect::<String>();
let stream_url = format!("{base}{suffix}?token={token}&expiry={now}");
Some(
Self::resolve_first_redirect(&stream_url)
.await
.unwrap_or(stream_url),
)
}
async fn resolve_vidara_stream(iframe_url: &str) -> Option<String> {
let api_url = Self::vidara_api_url(iframe_url)?;
let response = Self::fetch_with_curl_cffi(&api_url, Some(iframe_url)).await?;
let json: serde_json::Value = serde_json::from_str(&response).ok()?;
let stream_url = json
.get("streaming_url")
.and_then(|value| value.as_str())?
.trim()
.to_string();
if stream_url.is_empty() {
return None;
}
Some(stream_url)
}
pub async fn get_video_url(
&self,
url: String,
_requester: web::types::State<Requester>,
) -> String {
let Some(detail_url) = Self::normalize_detail_url(&url) else {
return String::new();
};
if !Self::is_allowed_detail_url(&detail_url) {
return String::new();
}
let Some(html) =
Self::fetch_with_curl_cffi(&detail_url, Some("https://www.porndish.com/")).await
else {
return String::new();
};
let mut fallback_iframe: Option<String> = None;
for fragment in Self::extract_iframe_fragments(&html) {
let Some(iframe_url) = Self::parse_embed_source(&fragment) else {
continue;
};
let iframe_url =
if iframe_url.starts_with("http://") || iframe_url.starts_with("https://") {
iframe_url
} else if iframe_url.starts_with("//") {
format!("https:{iframe_url}")
} else {
continue;
};
if Self::is_allowed_vidara_iframe_url(&iframe_url) {
if let Some(stream_url) = Self::resolve_vidara_stream(&iframe_url).await {
return stream_url;
}
}
if fallback_iframe.is_none() && Self::is_allowed_myvidplay_iframe_url(&iframe_url) {
fallback_iframe = Some(iframe_url);
}
}
if let Some(iframe_url) = fallback_iframe {
if let Some(stream_url) = Self::resolve_myvidplay_stream(&iframe_url).await {
return stream_url;
}
}
String::new()
}
}
#[cfg(test)]
mod tests {
use super::PorndishProxy;
#[test]
fn allows_only_porndish_detail_urls() {
assert!(PorndishProxy::is_allowed_detail_url(
"https://www.porndish.com/porn/example/"
));
assert!(!PorndishProxy::is_allowed_detail_url(
"https://www.porndish.com/search/example/"
));
assert!(!PorndishProxy::is_allowed_detail_url(
"https://example.com/porn/example/"
));
}
}

View File

@@ -0,0 +1,80 @@
use ntex::http::header::CONTENT_TYPE;
use ntex::{
http::Response,
web::{self, HttpRequest, error},
};
use std::process::Command;
use url::Url;
use crate::util::requester::Requester;
fn is_allowed_thumb_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
matches!(host, "www.porndish.com" | "porndish.com")
&& url.path().starts_with("/wp-content/uploads/")
}
pub async fn get_image(
req: HttpRequest,
_requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string();
let image_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint
} else {
format!("https://{}", endpoint.trim_start_matches('/'))
};
if !is_allowed_thumb_url(&image_url) {
return Ok(web::HttpResponse::BadRequest().finish());
}
let output = tokio::task::spawn_blocking(move || {
Command::new("python3")
.arg("-c")
.arg(
r#"
import sys
from curl_cffi import requests
url = sys.argv[1]
response = requests.get(
url,
impersonate="chrome",
timeout=30,
allow_redirects=True,
headers={"Referer": "https://www.porndish.com/"},
)
if response.status_code >= 400:
sys.stderr.write(f"status={response.status_code}\n")
sys.exit(1)
sys.stderr.write(response.headers.get("content-type", "application/octet-stream"))
sys.stdout.buffer.write(response.content)
"#,
)
.arg(image_url)
.output()
})
.await
.map_err(error::ErrorBadGateway)?
.map_err(error::ErrorBadGateway)?;
if !output.status.success() {
return Ok(web::HttpResponse::NotFound().finish());
}
let content_type = String::from_utf8_lossy(&output.stderr).trim().to_string();
let mut resp = Response::build(ntex::http::StatusCode::OK);
if !content_type.is_empty() {
resp.set_header(CONTENT_TYPE, content_type);
}
Ok(resp.body(output.stdout))
}

243
src/proxies/pornhd3x.rs Normal file
View File

@@ -0,0 +1,243 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering};
use ntex::web;
use regex::Regex;
use serde_json::Value;
use url::Url;
use wreq::Version;
use crate::util::requester::Requester;
const BASE_URL: &str = "https://www.pornhd3x.tv";
const SOURCE_SECRET: &str = "98126avrbi6m49vd7shxkn985";
const SOURCE_COOKIE_PREFIX: &str = "826avrbi6m49vd7shxkn985m";
const SOURCE_COOKIE_SUFFIX: &str = "k06twz87wwxtp3dqiicks2df";
#[derive(Debug, Clone)]
pub struct Pornhd3xProxy {
source_counter: Arc<AtomicU32>,
}
impl Pornhd3xProxy {
pub fn new() -> Self {
Self {
source_counter: Arc::new(AtomicU32::new(0)),
}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, Option<String>)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let (detail_part, quality) = match endpoint.split_once("/__quality__/") {
Some((detail, quality)) => {
(detail, Some(quality.replace("%20", " ").trim().to_string()))
}
None => (endpoint, None),
};
let detail_url =
if detail_part.starts_with("http://") || detail_part.starts_with("https://") {
detail_part.to_string()
} else {
format!("https://{}", detail_part.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&detail_url)
.then_some((detail_url, quality.filter(|value| !value.is_empty())))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "www.pornhd3x.tv" || host == "pornhd3x.tv") && url.path().starts_with("/movies/")
}
fn normalize_url(raw: &str) -> String {
let value = raw.trim();
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if value.starts_with('/') {
return format!("{BASE_URL}{value}");
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
value.to_string()
}
fn extract_episode_id(html: &str) -> Option<String> {
Regex::new(r#"(?is)(?:id=["']uuid["'][^>]*value=["']|episode-id=["'])([A-Za-z0-9]+)"#)
.ok()?
.captures(html)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().to_string())
}
fn build_source_cookie_name(episode_id: &str) -> String {
format!("{SOURCE_COOKIE_PREFIX}{episode_id}{SOURCE_COOKIE_SUFFIX}")
}
fn build_source_hash(episode_id: &str, nonce: &str) -> String {
format!(
"{:x}",
md5::compute(format!("{episode_id}{nonce}{SOURCE_SECRET}"))
)
}
fn next_source_request(&self) -> (u32, String) {
let count = self.source_counter.fetch_add(1, Ordering::Relaxed) + 1;
let nonce = format!("{:06x}", count % 0xFF_FFFF);
(count, nonce)
}
async fn fetch_sources(
&self,
requester: &mut Requester,
referer: &str,
episode_id: &str,
) -> Option<Value> {
let (count, nonce) = self.next_source_request();
let source_url = format!(
"{BASE_URL}/ajax/get_sources/{episode_id}/{hash}?count={count}&mobile=true",
hash = Self::build_source_hash(episode_id, &nonce),
);
let existing_cookie = requester.cookie_header_for_url(&source_url);
let cookie_value = format!("{}={nonce}", Self::build_source_cookie_name(episode_id));
let combined_cookie = match existing_cookie {
Some(existing) if !existing.trim().is_empty() => format!("{existing}; {cookie_value}"),
_ => cookie_value,
};
let response = requester
.get_with_headers(
&source_url,
vec![
("Cookie".to_string(), combined_cookie),
("Referer".to_string(), referer.to_string()),
("X-Requested-With".to_string(), "XMLHttpRequest".to_string()),
(
"Accept".to_string(),
"application/json, text/javascript, */*; q=0.01".to_string(),
),
],
Some(Version::HTTP_11),
)
.await
.ok()?;
serde_json::from_str::<Value>(&response).ok()
}
fn select_source_url(payload: &Value, quality: Option<&str>) -> Option<String> {
let sources = payload
.get("playlist")
.and_then(Value::as_array)
.into_iter()
.flatten()
.flat_map(|playlist| {
playlist
.get("sources")
.and_then(Value::as_array)
.into_iter()
.flatten()
})
.collect::<Vec<_>>();
if let Some(quality) = quality {
let quality = quality.trim().to_ascii_lowercase();
for source in &sources {
let label = source
.get("label")
.and_then(Value::as_str)
.unwrap_or_default()
.trim()
.to_ascii_lowercase();
if label == quality {
let file = source.get("file").and_then(Value::as_str)?;
return Some(Self::normalize_url(file));
}
}
}
for source in sources {
let Some(file) = source.get("file").and_then(Value::as_str) else {
continue;
};
let url = Self::normalize_url(file);
if !url.is_empty() {
return Some(url);
}
}
None
}
}
impl crate::proxies::Proxy for Pornhd3xProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some((detail_url, quality)) = Self::normalize_detail_request(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let detail_html = match requester.get(&detail_url, Some(Version::HTTP_11)).await {
Ok(text) => text,
Err(_) => return String::new(),
};
let Some(episode_id) = Self::extract_episode_id(&detail_html) else {
return String::new();
};
let Some(payload) = self
.fetch_sources(&mut requester, &detail_url, &episode_id)
.await
else {
return String::new();
};
Self::select_source_url(&payload, quality.as_deref()).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::Pornhd3xProxy;
#[test]
fn normalizes_detail_endpoint_and_quality() {
let (url, quality) = Pornhd3xProxy::normalize_detail_request(
"www.pornhd3x.tv/movies/example-video/__quality__/720p",
)
.expect("proxy target should parse");
assert_eq!(url, "https://www.pornhd3x.tv/movies/example-video");
assert_eq!(quality.as_deref(), Some("720p"));
}
#[test]
fn extracts_episode_id_from_detail_markup() {
let html = r#"
<input id="uuid" value="49Q27JL3HCPVNJQN">
<a class="btn-eps" episode-id="OTHER"></a>
"#;
assert_eq!(
Pornhd3xProxy::extract_episode_id(html).as_deref(),
Some("49Q27JL3HCPVNJQN")
);
}
}

301
src/proxies/shooshtime.rs Normal file
View File

@@ -0,0 +1,301 @@
use ntex::http::Response;
use ntex::http::header::{CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE};
use ntex::web::{self, HttpRequest, error};
use regex::Regex;
use url::Url;
use crate::util::requester::Requester;
const BASE_URL: &str = "https://shooshtime.com";
#[derive(Debug, Clone)]
struct SourceCandidate {
url: String,
quality: String,
}
#[derive(Debug, Clone)]
pub struct ShooshtimeProxy {}
impl ShooshtimeProxy {
pub fn new() -> Self {
Self {}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, Option<String>)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let (detail_part, quality) = match endpoint.split_once("/__quality__/") {
Some((detail, quality)) => {
(detail, Some(quality.replace("%20", " ").trim().to_string()))
}
None => (endpoint, None),
};
let mut detail_url =
if detail_part.starts_with("http://") || detail_part.starts_with("https://") {
detail_part.to_string()
} else {
format!("https://{}", detail_part.trim_start_matches('/'))
};
if detail_url.contains("/videos/") && !detail_url.ends_with('/') {
detail_url.push('/');
}
Self::is_allowed_detail_url(&detail_url)
.then_some((detail_url, quality.filter(|value| !value.is_empty())))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "shooshtime.com" || host == "www.shooshtime.com")
&& url.path().starts_with("/videos/")
}
fn is_allowed_media_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "shooshtime.com" || host == "www.shooshtime.com")
&& url.path().starts_with("/get_file/")
}
fn normalize_url(raw: &str) -> String {
let value = raw.trim().replace("\\/", "/");
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if value.starts_with('/') {
return format!("{BASE_URL}{value}");
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
value
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
fn extract_js_value(block: &str, regex: &Regex) -> Option<String> {
regex
.captures(block)
.and_then(|value| value.get(1))
.map(|value| value.as_str().replace("\\/", "/").replace("\\'", "'"))
}
fn extract_sources(html: &str) -> Vec<SourceCandidate> {
let Some(flashvars_regex) = Self::regex(r#"(?s)var\s+flashvars\s*=\s*\{(.*?)\};"#) else {
return vec![];
};
let Some(flashvars) = flashvars_regex
.captures(html)
.and_then(|value| value.get(1))
.map(|value| value.as_str().to_string())
else {
return vec![];
};
let value_regex = |key: &str| Self::regex(&format!(r#"{key}:\s*'([^']*)'"#));
let primary_url_regex = match value_regex("video_url") {
Some(value) => value,
None => return vec![],
};
let primary_quality_regex = match value_regex("video_url_text") {
Some(value) => value,
None => return vec![],
};
let alt_url_regex = match value_regex("video_alt_url") {
Some(value) => value,
None => return vec![],
};
let alt_quality_regex = match value_regex("video_alt_url_text") {
Some(value) => value,
None => return vec![],
};
let mut sources = Vec::new();
if let Some(url) = Self::extract_js_value(&flashvars, &primary_url_regex) {
let normalized = Self::normalize_url(&url);
if !normalized.is_empty() && Self::is_allowed_media_url(&normalized) {
sources.push(SourceCandidate {
url: normalized,
quality: Self::extract_js_value(&flashvars, &primary_quality_regex)
.unwrap_or_else(|| "480p".to_string()),
});
}
}
if let Some(url) = Self::extract_js_value(&flashvars, &alt_url_regex) {
let normalized = Self::normalize_url(&url);
if !normalized.is_empty() && Self::is_allowed_media_url(&normalized) {
sources.push(SourceCandidate {
url: normalized,
quality: Self::extract_js_value(&flashvars, &alt_quality_regex)
.unwrap_or_else(|| "720p".to_string()),
});
}
}
sources
}
fn quality_score(label: &str) -> u32 {
label
.chars()
.filter(|value| value.is_ascii_digit())
.collect::<String>()
.parse::<u32>()
.unwrap_or(0)
}
fn select_source_url(html: &str, quality: Option<&str>) -> Option<String> {
let sources = Self::extract_sources(html);
if sources.is_empty() {
return None;
}
if let Some(quality) = quality {
let wanted = quality.trim().to_ascii_lowercase();
if let Some(source) = sources
.iter()
.find(|source| source.quality.trim().to_ascii_lowercase() == wanted)
{
return Some(source.url.clone());
}
}
sources
.iter()
.max_by_key(|source| Self::quality_score(&source.quality))
.map(|source| source.url.clone())
}
}
pub async fn serve_media(
req: HttpRequest,
requester: web::types::State<Requester>,
) -> Result<impl web::Responder, web::Error> {
let endpoint = req.match_info().query("endpoint").to_string();
let Some((detail_url, quality)) = ShooshtimeProxy::normalize_detail_request(&endpoint) else {
return Ok(web::HttpResponse::BadRequest().finish());
};
let mut requester = requester.get_ref().clone();
let html = match requester.get(&detail_url, None).await {
Ok(html) => html,
Err(_) => return Ok(web::HttpResponse::BadGateway().finish()),
};
let Some(source_url) = ShooshtimeProxy::select_source_url(&html, quality.as_deref()) else {
return Ok(web::HttpResponse::BadGateway().finish());
};
let mut headers = vec![("Referer".to_string(), detail_url)];
if let Some(range) = req
.headers()
.get("Range")
.and_then(|value| value.to_str().ok())
{
headers.push(("Range".to_string(), range.to_string()));
}
let upstream = match requester.get_raw_with_headers(&source_url, headers).await {
Ok(response) => response,
Err(_) => return Ok(web::HttpResponse::BadGateway().finish()),
};
let status = upstream.status();
let upstream_headers = upstream.headers().clone();
let bytes = upstream.bytes().await.map_err(error::ErrorBadGateway)?;
let mut response = Response::build(status);
if let Some(value) = upstream_headers
.get(CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
{
response.set_header(CONTENT_TYPE, value);
}
if let Some(value) = upstream_headers
.get(CONTENT_LENGTH)
.and_then(|value| value.to_str().ok())
{
response.set_header(CONTENT_LENGTH, value);
}
if let Some(value) = upstream_headers
.get(CONTENT_RANGE)
.and_then(|value| value.to_str().ok())
{
response.set_header(CONTENT_RANGE, value);
}
if let Some(value) = upstream_headers
.get("Accept-Ranges")
.and_then(|value| value.to_str().ok())
{
response.set_header("Accept-Ranges", value);
}
Ok(response.body(bytes.to_vec()))
}
#[cfg(test)]
mod tests {
use super::ShooshtimeProxy;
#[test]
fn normalizes_detail_endpoint_and_quality() {
let (url, quality) = ShooshtimeProxy::normalize_detail_request(
"shooshtime.com/videos/example/123/__quality__/720p",
)
.expect("proxy target should parse");
assert_eq!(url, "https://shooshtime.com/videos/example/123/");
assert_eq!(quality.as_deref(), Some("720p"));
}
#[test]
fn selects_requested_or_best_quality() {
let html = r#"
<script>
var flashvars = {
video_url: 'https://shooshtime.com/get_file/1/token/1/2/3.mp4/?x=1',
video_url_text: '480p',
video_alt_url: 'https://shooshtime.com/get_file/1/token/1/2/3_720p.mp4/?x=2',
video_alt_url_text: '720p'
};
</script>
"#;
assert_eq!(
ShooshtimeProxy::select_source_url(html, Some("480p")).as_deref(),
Some("https://shooshtime.com/get_file/1/token/1/2/3.mp4/?x=1")
);
assert_eq!(
ShooshtimeProxy::select_source_url(html, None).as_deref(),
Some("https://shooshtime.com/get_file/1/token/1/2/3_720p.mp4/?x=2")
);
}
}

105
src/proxies/spankbang.rs Normal file
View File

@@ -0,0 +1,105 @@
use ntex::web;
use regex::Regex;
use wreq::Version;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct SpankbangProxy {}
impl SpankbangProxy {
pub fn new() -> Self {
SpankbangProxy {}
}
fn request_headers() -> Vec<(String, String)> {
vec![("Referer".to_string(), "https://spankbang.com/".to_string())]
}
fn extract_stream_data(text: &str) -> Option<&str> {
let marker = "var stream_data = ";
let start = text.find(marker)? + marker.len();
let rest = &text[start..];
let end = rest.find("};")?;
Some(&rest[..=end])
}
fn extract_first_stream_url(stream_data: &str, key: &str) -> Option<String> {
let pattern = format!(r"'{}'\s*:\s*\[\s*'([^']+)'", regex::escape(key));
let regex = Regex::new(&pattern).ok()?;
regex
.captures(stream_data)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().to_string())
}
fn select_best_stream_url(stream_data: &str) -> Option<String> {
for key in [
"m3u8", "4k", "1080p", "720p", "480p", "320p", "240p", "main",
] {
if let Some(url) = Self::extract_first_stream_url(stream_data, key) {
return Some(url);
}
}
None
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let url = format!("https://spankbang.com/{}", url.trim_start_matches('/'));
let text = requester
.get_with_headers(&url, Self::request_headers(), Some(Version::HTTP_2))
.await
.unwrap_or_default();
if text.is_empty() {
return String::new();
}
let Some(stream_data) = Self::extract_stream_data(&text) else {
return String::new();
};
Self::select_best_stream_url(stream_data).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::SpankbangProxy;
#[test]
fn prefers_m3u8_when_present() {
assert_eq!(
SpankbangProxy::request_headers(),
vec![("Referer".to_string(), "https://spankbang.com/".to_string())]
);
let data = r#"
var stream_data = {'240p': ['https://cdn.example/240.mp4'], '720p': ['https://cdn.example/720.mp4'], 'm3u8': ['https://cdn.example/master.m3u8'], 'main': ['https://cdn.example/720.mp4']};
"#;
let stream_data = SpankbangProxy::extract_stream_data(data).unwrap();
assert_eq!(
SpankbangProxy::select_best_stream_url(stream_data).as_deref(),
Some("https://cdn.example/master.m3u8")
);
}
#[test]
fn falls_back_to_highest_quality_mp4() {
let data = r#"
var stream_data = {'240p': ['https://cdn.example/240.mp4'], '480p': ['https://cdn.example/480.mp4'], '720p': ['https://cdn.example/720.mp4'], '1080p': [], '4k': [], 'm3u8': [], 'main': ['https://cdn.example/480.mp4']};
"#;
let stream_data = SpankbangProxy::extract_stream_data(data).unwrap();
assert_eq!(
SpankbangProxy::select_best_stream_url(stream_data).as_deref(),
Some("https://cdn.example/720.mp4")
);
}
}

View File

@@ -1,12 +1,22 @@
use ntex::web::{self, HttpRequest}; use ntex::web::{self, HttpRequest};
use crate::proxies::doodstream::DoodstreamProxy;
use crate::proxies::javtiful::JavtifulProxy; use crate::proxies::javtiful::JavtifulProxy;
use crate::proxies::pimpbunny::PimpbunnyProxy;
use crate::proxies::porndish::PorndishProxy;
use crate::proxies::pornhd3x::Pornhd3xProxy;
use crate::proxies::spankbang::SpankbangProxy;
use crate::proxies::sxyprn::SxyprnProxy; use crate::proxies::sxyprn::SxyprnProxy;
use crate::proxies::*; use crate::proxies::*;
use crate::util::requester::Requester; use crate::util::requester::Requester;
pub fn config(cfg: &mut web::ServiceConfig) { pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
web::resource("/doodstream/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/sxyprn/{endpoint}*") web::resource("/sxyprn/{endpoint}*")
.route(web::post().to(proxy2redirect)) .route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)), .route(web::get().to(proxy2redirect)),
@@ -16,6 +26,41 @@ pub fn config(cfg: &mut web::ServiceConfig) {
.route(web::post().to(proxy2redirect)) .route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)), .route(web::get().to(proxy2redirect)),
) )
.service(
web::resource("/spankbang/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/porndish/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/pornhd3x/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/shooshtime/{endpoint}*")
.route(web::post().to(crate::proxies::shooshtime::serve_media))
.route(web::get().to(crate::proxies::shooshtime::serve_media)),
)
.service(
web::resource("/pimpbunny/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/noodlemagazine/{endpoint}*")
.route(web::post().to(crate::proxies::noodlemagazine::serve_media))
.route(web::get().to(crate::proxies::noodlemagazine::serve_media)),
)
.service(
web::resource("/noodlemagazine-thumb/{endpoint}*")
.route(web::post().to(crate::proxies::noodlemagazine::get_image))
.route(web::get().to(crate::proxies::noodlemagazine::get_image)),
)
.service( .service(
web::resource("/hanime-cdn/{endpoint}*") web::resource("/hanime-cdn/{endpoint}*")
.route(web::post().to(crate::proxies::hanimecdn::get_image)) .route(web::post().to(crate::proxies::hanimecdn::get_image))
@@ -25,6 +70,16 @@ pub fn config(cfg: &mut web::ServiceConfig) {
web::resource("/hqporner-thumb/{endpoint}*") web::resource("/hqporner-thumb/{endpoint}*")
.route(web::post().to(crate::proxies::hqpornerthumb::get_image)) .route(web::post().to(crate::proxies::hqpornerthumb::get_image))
.route(web::get().to(crate::proxies::hqpornerthumb::get_image)), .route(web::get().to(crate::proxies::hqpornerthumb::get_image)),
)
.service(
web::resource("/porndish-thumb/{endpoint}*")
.route(web::post().to(crate::proxies::porndishthumb::get_image))
.route(web::get().to(crate::proxies::porndishthumb::get_image)),
);
cfg.service(
web::resource("/pimpbunny-thumb/{endpoint}*")
.route(web::post().to(crate::proxies::pimpbunnythumb::get_image))
.route(web::get().to(crate::proxies::pimpbunnythumb::get_image)),
); );
} }
@@ -45,8 +100,13 @@ async fn proxy2redirect(
fn get_proxy(proxy: &str) -> Option<AnyProxy> { fn get_proxy(proxy: &str) -> Option<AnyProxy> {
match proxy { match proxy {
"doodstream" => Some(AnyProxy::Doodstream(DoodstreamProxy::new())),
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())), "sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
"javtiful" => Some(AnyProxy::Javtiful(JavtifulProxy::new())), "javtiful" => Some(AnyProxy::Javtiful(JavtifulProxy::new())),
"pornhd3x" => Some(AnyProxy::Pornhd3x(Pornhd3xProxy::new())),
"pimpbunny" => Some(AnyProxy::Pimpbunny(PimpbunnyProxy::new())),
"porndish" => Some(AnyProxy::Porndish(PorndishProxy::new())),
"spankbang" => Some(AnyProxy::Spankbang(SpankbangProxy::new())),
_ => None, _ => None,
} }
} }

View File

@@ -22,6 +22,15 @@ pub struct Channel {
pub cacheDuration: Option<u32>, //Some(86400) pub cacheDuration: Option<u32>, //Some(86400)
} }
#[derive(serde::Serialize, Debug, Clone)]
pub struct ChannelGroup {
pub id: String,
pub title: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub systemImage: Option<String>,
pub channelIds: Vec<String>,
}
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub struct ChannelOption { pub struct ChannelOption {
pub id: String, //"channels", pub id: String, //"channels",
@@ -109,20 +118,49 @@ impl Status {
.to_string(), .to_string(),
} }
} }
#[allow(dead_code)]
pub fn add_notice(&mut self, notice: Notice) {
self.notices.push(notice);
}
#[allow(dead_code)]
pub fn add_channel(&mut self, channel: Channel) { pub fn add_channel(&mut self, channel: Channel) {
self.channels.push(channel); self.channels.push(channel);
} }
#[allow(dead_code)] }
pub fn add_option(&mut self, option: Options) {
self.options.push(option); #[derive(serde::Serialize)]
} pub struct ChannelView {
#[allow(dead_code)] pub id: String,
pub fn add_category(&mut self, category: String) { pub name: String,
self.categories.push(category); pub description: String,
} pub premium: bool,
pub favicon: String,
pub status: String,
pub categories: Vec<String>,
pub options: Vec<ChannelOption>,
pub nsfw: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub groupKey: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub sortOrder: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tags: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ytdlpCommand: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cacheDuration: Option<u32>,
}
#[derive(serde::Serialize)]
pub struct StatusResponse {
pub id: String,
pub name: String,
pub subtitle: String,
pub description: String,
pub iconUrl: String,
pub color: String,
pub status: String,
pub notices: Vec<Notice>,
pub channels: Vec<ChannelView>,
pub channelGroups: Vec<ChannelGroup>,
pub subscription: Subscription,
pub nsfw: bool,
pub categories: Vec<String>,
pub options: Vec<Options>,
pub filtersFooter: String,
} }

41
src/util/flow_debug.rs Normal file
View File

@@ -0,0 +1,41 @@
use std::sync::atomic::{AtomicU64, Ordering};
#[cfg(feature = "debug")]
use std::time::{SystemTime, UNIX_EPOCH};
static NEXT_TRACE_ID: AtomicU64 = AtomicU64::new(1);
pub fn next_trace_id(prefix: &str) -> String {
let id = NEXT_TRACE_ID.fetch_add(1, Ordering::Relaxed);
format!("{prefix}-{id:06}")
}
#[cfg(feature = "debug")]
pub fn emit(module: &str, line: u32, message: String) {
let millis = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_millis())
.unwrap_or_default();
eprintln!("[debug][{millis}][{module}:{line}] {message}");
}
pub fn preview(value: &str, limit: usize) -> String {
if value.len() <= limit {
return value.to_string();
}
let mut end = limit;
while !value.is_char_boundary(end) {
end -= 1;
}
format!("{}...", &value[..end])
}
#[macro_export]
macro_rules! flow_debug {
($($arg:tt)*) => {{
#[cfg(feature = "debug")]
{
$crate::util::flow_debug::emit(module_path!(), line!(), format!($($arg)*));
}
}};
}

84
src/util/hoster_proxy.rs Normal file
View File

@@ -0,0 +1,84 @@
use url::Url;
use crate::providers::{build_proxy_url, strip_url_scheme};
use crate::videos::ServerOptions;
const DOODSTREAM_HOSTS: &[&str] = &[
"turboplayers.xyz",
"www.turboplayers.xyz",
"trailerhg.xyz",
"www.trailerhg.xyz",
"streamhg.com",
"www.streamhg.com",
];
pub fn proxy_name_for_url(url: &str) -> Option<&'static str> {
let parsed = Url::parse(url).ok()?;
let host = parsed.host_str()?.to_ascii_lowercase();
if DOODSTREAM_HOSTS.contains(&host.as_str()) {
return Some("doodstream");
}
None
}
pub fn rewrite_hoster_url(options: &ServerOptions, url: &str) -> String {
match proxy_name_for_url(url) {
Some(proxy_name) => build_proxy_url(options, proxy_name, &strip_url_scheme(url)),
None => url.to_string(),
}
}
#[cfg(test)]
mod tests {
use super::{proxy_name_for_url, rewrite_hoster_url};
use crate::videos::ServerOptions;
fn options() -> ServerOptions {
ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
}
}
#[test]
fn matches_doodstream_family_hosts() {
assert_eq!(
proxy_name_for_url("https://turboplayers.xyz/t/69bdfb21cc640"),
Some("doodstream")
);
assert_eq!(
proxy_name_for_url("https://trailerhg.xyz/e/ttdc7a6qpskt"),
Some("doodstream")
);
assert_eq!(
proxy_name_for_url("https://streamhg.com/about"),
Some("doodstream")
);
assert_eq!(proxy_name_for_url("https://example.com/video"), None);
}
#[test]
fn rewrites_known_hoster_urls_to_proxy_urls() {
assert_eq!(
rewrite_hoster_url(&options(), "https://turboplayers.xyz/t/69bdfb21cc640"),
"https://example.com/proxy/doodstream/turboplayers.xyz/t/69bdfb21cc640"
);
assert_eq!(
rewrite_hoster_url(&options(), "https://example.com/video"),
"https://example.com/video"
);
}
}

View File

@@ -1,6 +1,8 @@
pub mod cache; pub mod cache;
pub mod discord; pub mod discord;
pub mod flaresolverr; pub mod flaresolverr;
pub mod flow_debug;
pub mod hoster_proxy;
pub mod proxy; pub mod proxy;
pub mod requester; pub mod requester;
pub mod time; pub mod time;
@@ -26,6 +28,7 @@ pub fn parse_abbreviated_number(s: &str) -> Option<u32> {
.map(|n| (n * multiplier) as u32) .map(|n| (n * multiplier) as u32)
} }
#[cfg(not(hottub_single_provider))]
pub fn interleave<T: Clone>(lists: &[Vec<T>]) -> Vec<T> { pub fn interleave<T: Clone>(lists: &[Vec<T>]) -> Vec<T> {
let mut result = Vec::new(); let mut result = Vec::new();

View File

@@ -1,10 +1,14 @@
use serde::Serialize; use serde::Serialize;
use std::env; use std::env;
use std::fmt;
use std::sync::{Arc, OnceLock};
use wreq::Client; use wreq::Client;
use wreq::Proxy; use wreq::Proxy;
use wreq::Response; use wreq::Response;
use wreq::Uri;
use wreq::Version; use wreq::Version;
use wreq::header::HeaderValue; use wreq::cookie::{CookieStore, Cookies, Jar};
use wreq::header::{HeaderMap, HeaderValue, SET_COOKIE, USER_AGENT};
use wreq::multipart::Form; use wreq::multipart::Form;
use wreq::redirect::Policy; use wreq::redirect::Policy;
use wreq_util::Emulation; use wreq_util::Emulation;
@@ -16,28 +20,168 @@ use crate::util::proxy;
// A Send + Sync error type for all async paths // A Send + Sync error type for all async paths
type AnyErr = Box<dyn std::error::Error + Send + Sync + 'static>; type AnyErr = Box<dyn std::error::Error + Send + Sync + 'static>;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Clone)]
pub struct Requester { pub struct Requester {
#[serde(skip)] #[serde(skip)]
client: Client, client: Client,
#[serde(skip)]
cookie_jar: Arc<Jar>,
#[serde(skip)]
debug_trace_id: Option<String>,
proxy: bool, proxy: bool,
flaresolverr_session: Option<String>, flaresolverr_session: Option<String>,
user_agent: Option<String>,
}
impl fmt::Debug for Requester {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Requester")
.field("proxy", &self.proxy)
.field("debug_trace_id", &self.debug_trace_id)
.field("flaresolverr_session", &self.flaresolverr_session)
.field("user_agent", &self.user_agent)
.finish()
}
} }
impl Requester { impl Requester {
pub fn new() -> Self { fn shared_cookie_jar() -> Arc<Jar> {
let client = Client::builder() static SHARED_COOKIE_JAR: OnceLock<Arc<Jar>> = OnceLock::new();
SHARED_COOKIE_JAR
.get_or_init(|| Arc::new(Jar::default()))
.clone()
}
fn origin_url_for_cookie_scope(url: &str) -> Option<url::Url> {
let parsed = url::Url::parse(url).ok()?;
let host = parsed.host_str()?;
let scheme = parsed.scheme();
url::Url::parse(&format!("{scheme}://{host}/")).ok()
}
fn store_response_cookies(&self, url: &str, response: &Response) {
let Some(origin) = Self::origin_url_for_cookie_scope(url) else {
return;
};
for value in response.headers().get_all(SET_COOKIE).iter() {
if let Ok(cookie) = value.to_str() {
self.cookie_jar.add_cookie_str(cookie, &origin.to_string());
}
}
}
fn store_flaresolverr_cookies(
&mut self,
request_url: &str,
cookies: &[crate::util::flaresolverr::FlaresolverrCookie],
) {
let fallback_origin = Self::origin_url_for_cookie_scope(request_url);
for cookie in cookies {
let origin = if !cookie.domain.is_empty() {
let scheme = fallback_origin
.as_ref()
.map(|url| url.scheme())
.unwrap_or("https");
let host = cookie.domain.trim_start_matches('.');
url::Url::parse(&format!("{scheme}://{host}/"))
.ok()
.or_else(|| fallback_origin.clone())
} else {
fallback_origin.clone()
};
let Some(origin) = origin else {
continue;
};
let mut cookie_string =
format!("{}={}; Path={}", cookie.name, cookie.value, cookie.path);
if !cookie.domain.is_empty() {
cookie_string.push_str(&format!("; Domain={}", cookie.domain));
}
if cookie.secure {
cookie_string.push_str("; Secure");
}
if cookie.httpOnly {
cookie_string.push_str("; HttpOnly");
}
if let Some(same_site) = cookie.sameSite.as_deref() {
if !same_site.is_empty() {
cookie_string.push_str(&format!("; SameSite={same_site}"));
}
}
self.cookie_jar
.add_cookie_str(&cookie_string, &origin.to_string());
}
}
fn debug_cookie_preview_from_owned_headers(
&self,
url: &str,
headers: &[(String, String)],
) -> String {
if let Some((_, value)) = headers
.iter()
.find(|(key, _)| key.eq_ignore_ascii_case("cookie"))
{
return crate::util::flow_debug::preview(value, 160);
}
self.cookie_header_for_url(url)
.map(|cookie| crate::util::flow_debug::preview(&cookie, 160))
.unwrap_or_else(|| "none".to_string())
}
#[cfg(any(not(hottub_single_provider), hottub_provider = "hypnotube"))]
fn debug_cookie_preview_from_borrowed_headers(
&self,
url: &str,
headers: &[(&str, &str)],
) -> String {
if let Some((_, value)) = headers
.iter()
.find(|(key, _)| key.eq_ignore_ascii_case("cookie"))
{
return crate::util::flow_debug::preview(value, 160);
}
self.cookie_header_for_url(url)
.map(|cookie| crate::util::flow_debug::preview(&cookie, 160))
.unwrap_or_else(|| "none".to_string())
}
fn build_client(cookie_jar: Arc<Jar>, user_agent: Option<&str>) -> Client {
let mut builder = Client::builder()
.cert_verification(false) .cert_verification(false)
.emulation(Emulation::Firefox136) .emulation(Emulation::Firefox146)
.cookie_store(true) .cookie_provider(cookie_jar)
.redirect(Policy::default()) .redirect(Policy::default());
.build()
.expect("Failed to create HTTP client"); if let Some(user_agent) = user_agent {
let mut headers = HeaderMap::new();
if let Ok(value) = HeaderValue::from_str(user_agent) {
headers.insert(USER_AGENT, value);
builder = builder.default_headers(headers);
}
}
builder.build().expect("Failed to create HTTP client")
}
pub fn new() -> Self {
let cookie_jar = Self::shared_cookie_jar();
let client = Self::build_client(cookie_jar.clone(), None);
let requester = Requester { let requester = Requester {
client, client,
cookie_jar,
debug_trace_id: None,
proxy: false, proxy: false,
flaresolverr_session: None, flaresolverr_session: None,
user_agent: None,
}; };
proxy::init_all_proxies_background(requester.clone()); proxy::init_all_proxies_background(requester.clone());
@@ -52,13 +196,47 @@ impl Requester {
self.proxy = proxy; self.proxy = proxy;
} }
pub fn set_debug_trace_id(&mut self, debug_trace_id: Option<String>) {
self.debug_trace_id = debug_trace_id;
}
#[cfg(feature = "debug")]
pub fn debug_trace_id(&self) -> Option<&str> {
self.debug_trace_id.as_deref()
}
pub fn cookie_header_for_url(&self, url: &str) -> Option<String> {
let parsed = url.parse::<Uri>().ok()?;
match self.cookie_jar.cookies(&parsed) {
Cookies::Compressed(value) => value.to_str().ok().map(ToOwned::to_owned),
Cookies::Uncompressed(values) => {
let joined = values
.into_iter()
.filter_map(|value| value.to_str().ok().map(ToOwned::to_owned))
.collect::<Vec<_>>()
.join("; ");
(!joined.is_empty()).then_some(joined)
}
Cookies::Empty => None,
_ => None,
}
}
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> { pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
let client = Client::builder() let cookie_preview = self
.cert_verification(false) .cookie_header_for_url(url)
.emulation(Emulation::Firefox136) .map(|cookie| crate::util::flow_debug::preview(&cookie, 160))
.cookie_store(true) .unwrap_or_else(|| "none".to_string());
.build() #[cfg(not(feature = "debug"))]
.expect("Failed to create HTTP client"); let _ = &cookie_preview;
crate::flow_debug!(
"trace={} requester get_raw url={} cookies={} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
cookie_preview,
self.proxy
);
let client = Self::build_client(self.cookie_jar.clone(), self.user_agent.as_deref());
let mut request = client.get(url).version(Version::HTTP_11); let mut request = client.get(url).version(Version::HTTP_11);
@@ -69,7 +247,9 @@ impl Requester {
} }
} }
request.send().await let response = request.send().await?;
self.store_response_cookies(url, &response);
Ok(response)
} }
pub async fn get_raw_with_headers( pub async fn get_raw_with_headers(
@@ -77,12 +257,18 @@ impl Requester {
url: &str, url: &str,
headers: Vec<(String, String)>, headers: Vec<(String, String)>,
) -> Result<Response, wreq::Error> { ) -> Result<Response, wreq::Error> {
let client = Client::builder() let cookie_preview = self.debug_cookie_preview_from_owned_headers(url, &headers);
.cert_verification(false) #[cfg(not(feature = "debug"))]
.emulation(Emulation::Firefox136) let _ = &cookie_preview;
.cookie_store(true) crate::flow_debug!(
.build() "trace={} requester get_raw_with_headers url={} headers={} cookies={} proxy={}",
.expect("Failed to create HTTP client"); self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
headers.len(),
cookie_preview,
self.proxy
);
let client = Self::build_client(self.cookie_jar.clone(), self.user_agent.as_deref());
let mut request = client.get(url).version(Version::HTTP_11); let mut request = client.get(url).version(Version::HTTP_11);
@@ -96,7 +282,9 @@ impl Requester {
for (key, value) in headers.iter() { for (key, value) in headers.iter() {
request = request.header(key, value); request = request.header(key, value);
} }
request.send().await let response = request.send().await?;
self.store_response_cookies(url, &response);
Ok(response)
} }
pub async fn post_json<S>( pub async fn post_json<S>(
@@ -108,6 +296,17 @@ impl Requester {
where where
S: Serialize + ?Sized, S: Serialize + ?Sized,
{ {
let cookie_preview = self.debug_cookie_preview_from_owned_headers(url, &headers);
#[cfg(not(feature = "debug"))]
let _ = &cookie_preview;
crate::flow_debug!(
"trace={} requester post_json url={} headers={} cookies={} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
headers.len(),
cookie_preview,
self.proxy
);
let mut request = self.client.post(url).version(Version::HTTP_11).json(data); let mut request = self.client.post(url).version(Version::HTTP_11).json(data);
// Set custom headers // Set custom headers
@@ -122,15 +321,30 @@ impl Requester {
} }
} }
request.send().await let response = request.send().await?;
self.store_response_cookies(url, &response);
Ok(response)
} }
#[cfg(any(not(hottub_single_provider), hottub_provider = "hypnotube"))]
pub async fn post( pub async fn post(
&mut self, &mut self,
url: &str, url: &str,
data: &str, data: &str,
headers: Vec<(&str, &str)>, headers: Vec<(&str, &str)>,
) -> Result<Response, wreq::Error> { ) -> Result<Response, wreq::Error> {
let cookie_preview = self.debug_cookie_preview_from_borrowed_headers(url, &headers);
#[cfg(not(feature = "debug"))]
let _ = &cookie_preview;
crate::flow_debug!(
"trace={} requester post url={} headers={} cookies={} body_len={} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
headers.len(),
cookie_preview,
data.len(),
self.proxy
);
let mut request = self let mut request = self
.client .client
.post(url) .post(url)
@@ -149,7 +363,9 @@ impl Requester {
} }
} }
request.send().await let response = request.send().await?;
self.store_response_cookies(url, &response);
Ok(response)
} }
pub async fn post_multipart( pub async fn post_multipart(
@@ -159,6 +375,17 @@ impl Requester {
headers: Vec<(String, String)>, headers: Vec<(String, String)>,
_http_version: Option<Version>, _http_version: Option<Version>,
) -> Result<Response, wreq::Error> { ) -> Result<Response, wreq::Error> {
let cookie_preview = self.debug_cookie_preview_from_owned_headers(url, &headers);
#[cfg(not(feature = "debug"))]
let _ = &cookie_preview;
crate::flow_debug!(
"trace={} requester post_multipart url={} headers={} cookies={} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
headers.len(),
cookie_preview,
self.proxy
);
let http_version = match _http_version { let http_version = match _http_version {
Some(v) => v, Some(v) => v,
None => Version::HTTP_11, None => Version::HTTP_11,
@@ -178,7 +405,9 @@ impl Requester {
} }
} }
request.send().await let response = request.send().await?;
self.store_response_cookies(url, &response);
Ok(response)
} }
pub async fn get( pub async fn get(
@@ -195,6 +424,18 @@ impl Requester {
headers: Vec<(String, String)>, headers: Vec<(String, String)>,
_http_version: Option<Version>, _http_version: Option<Version>,
) -> Result<String, AnyErr> { ) -> Result<String, AnyErr> {
let cookie_preview = self.debug_cookie_preview_from_owned_headers(url, &headers);
#[cfg(not(feature = "debug"))]
let _ = &cookie_preview;
crate::flow_debug!(
"trace={} requester get_with_headers start url={} headers={} cookies={} http_version={:?} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
headers.len(),
cookie_preview,
_http_version,
self.proxy
);
let http_version = match _http_version { let http_version = match _http_version {
Some(v) => v, Some(v) => v,
None => Version::HTTP_11, None => Version::HTTP_11,
@@ -211,10 +452,22 @@ impl Requester {
} }
} }
let response = request.send().await?; let response = request.send().await?;
self.store_response_cookies(url, &response);
crate::flow_debug!(
"trace={} requester direct response url={} status={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
response.status()
);
if response.status().is_success() || response.status().as_u16() == 404 { if response.status().is_success() || response.status().as_u16() == 404 {
return Ok(response.text().await?); return Ok(response.text().await?);
} }
if response.status().as_u16() == 429 { if response.status().as_u16() == 429 {
crate::flow_debug!(
"trace={} requester direct retry url={} status=429",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120)
);
tokio::time::sleep(std::time::Duration::from_secs(1)).await; tokio::time::sleep(std::time::Duration::from_secs(1)).await;
continue; continue;
} else { } else {
@@ -237,6 +490,12 @@ impl Requester {
if self.proxy && env::var("BURP_URL").is_ok() { if self.proxy && env::var("BURP_URL").is_ok() {
flare.set_proxy(true); flare.set_proxy(true);
} }
crate::flow_debug!(
"trace={} requester flaresolverr url={} proxy={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
self.proxy
);
let res = flare let res = flare
.solve(FlareSolverrRequest { .solve(FlareSolverrRequest {
@@ -248,32 +507,17 @@ impl Requester {
.map_err(|e| -> AnyErr { format!("Failed to solve FlareSolverr: {e}").into() })?; .map_err(|e| -> AnyErr { format!("Failed to solve FlareSolverr: {e}").into() })?;
// Rebuild client and apply UA/cookies from FlareSolverr // Rebuild client and apply UA/cookies from FlareSolverr
let cookie_origin = url.split('/').take(3).collect::<Vec<&str>>().join("/");
self.client = Client::builder()
.cert_verification(false)
.emulation(Emulation::Firefox136)
.cookie_store(true)
.redirect(Policy::default())
.build()
.expect("Failed to create HTTP client");
let useragent = res.solution.userAgent; let useragent = res.solution.userAgent;
self.client self.user_agent = Some(useragent);
.update() self.store_flaresolverr_cookies(url, &res.solution.cookies);
.headers(|headers| {
headers.insert("User-Agent", HeaderValue::from_str(&useragent).unwrap());
})
.apply()
.unwrap();
if let Ok(origin) = url::Url::parse(&cookie_origin) { self.client = Self::build_client(self.cookie_jar.clone(), self.user_agent.as_deref());
for cookie in res.solution.cookies { crate::flow_debug!(
let header = "trace={} requester flaresolverr solved url={} user_agent={}",
HeaderValue::from_str(&format!("{}={}", cookie.name, cookie.value)).unwrap(); self.debug_trace_id().unwrap_or("none"),
self.client.set_cookie(&origin, header); crate::util::flow_debug::preview(url, 120),
} crate::util::flow_debug::preview(self.user_agent.as_deref().unwrap_or("unknown"), 96)
} );
// Retry the original URL with the updated client & (optional) proxy // Retry the original URL with the updated client & (optional) proxy
let mut request = self.client.get(url).version(Version::HTTP_11); let mut request = self.client.get(url).version(Version::HTTP_11);
@@ -288,11 +532,44 @@ impl Requester {
} }
let response = request.send().await?; let response = request.send().await?;
self.store_response_cookies(url, &response);
crate::flow_debug!(
"trace={} requester retry response url={} status={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120),
response.status()
);
if response.status().is_success() { if response.status().is_success() {
return Ok(response.text().await?); return Ok(response.text().await?);
} }
// Fall back to FlareSolverr-provided body // Fall back to FlareSolverr-provided body
crate::flow_debug!(
"trace={} requester fallback body url={}",
self.debug_trace_id().unwrap_or("none"),
crate::util::flow_debug::preview(url, 120)
);
Ok(res.solution.response) Ok(res.solution.response)
} }
} }
#[cfg(test)]
mod tests {
use super::Requester;
#[test]
fn new_requesters_share_cookie_jar() {
let a = Requester::new();
let b = Requester::new();
let origin = "https://shared-cookie-requester-test.invalid/";
a.cookie_jar
.add_cookie_str("shared_cookie=1; Path=/; SameSite=Lax", origin);
let cookie_header = b
.cookie_header_for_url("https://shared-cookie-requester-test.invalid/path")
.unwrap_or_default();
assert!(cookie_header.contains("shared_cookie=1"));
}
}

View File

@@ -50,7 +50,7 @@ pub struct VideosRequest {
pub networks: Option<String>, // pub networks: Option<String>, //
pub stars: Option<String>, // pub stars: Option<String>, //
pub categories: Option<String>, pub categories: Option<String>,
pub duration: Option<String> pub duration: Option<String>,
} }
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
@@ -60,13 +60,14 @@ pub struct ServerOptions {
pub sites: Option<String>, // pub sites: Option<String>, //
pub filter: Option<String>, pub filter: Option<String>,
pub language: Option<String>, // "en" pub language: Option<String>, // "en"
pub public_url_base: Option<String>,
pub requester: Option<Requester>, pub requester: Option<Requester>,
pub network: Option<String>, // pub network: Option<String>, //
pub stars: Option<String>, // pub stars: Option<String>, //
pub categories: Option<String>, // pub categories: Option<String>, //
pub duration: Option<String>, // pub duration: Option<String>, //
pub sort: Option<String>, // pub sort: Option<String>, //
pub sexuality: Option<String>, // pub sexuality: Option<String>, //
} }
#[derive(serde::Serialize, Debug)] #[derive(serde::Serialize, Debug)]
@@ -83,13 +84,15 @@ pub struct VideoEmbed {
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct VideoItem { pub struct VideoItem {
pub duration: u32, // 110, pub duration: u32, // 110,
#[serde(default)]
pub isLive: bool,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub views: Option<u32>, // 14622653, pub views: Option<u32>, // 14622653,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub rating: Option<f32>, // 0.0, pub rating: Option<f32>, // 0.0,
pub id: String, // "c85017ca87477168d648727753c4ded8a35f173e22ef93743e707b296becb299", pub id: String, // "c85017ca87477168d648727753c4ded8a35f173e22ef93743e707b296becb299",
pub title: String, // "20 Minutes of Adorable Kittens BEST Compilation", pub title: String, // "20 Minutes of Adorable Kittens BEST Compilation",
pub url: String, // "https://www.youtube.com/watch?v=y0sF5xhGreA", pub url: String, // "https://www.youtube.com/watch?v=y0sF5xhGreA",
pub channel: String, // "youtube", pub channel: String, // "youtube",
pub thumb: String, // "https://i.ytimg.com/vi/y0sF5xhGreA/hqdefault.jpg", pub thumb: String, // "https://i.ytimg.com/vi/y0sF5xhGreA/hqdefault.jpg",
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@@ -111,7 +114,6 @@ pub struct VideoItem {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub aspectRatio: Option<f32>, pub aspectRatio: Option<f32>,
} }
#[allow(dead_code)]
impl VideoItem { impl VideoItem {
pub fn new( pub fn new(
id: String, id: String,
@@ -123,8 +125,9 @@ impl VideoItem {
) -> Self { ) -> Self {
VideoItem { VideoItem {
duration: duration, // Placeholder, adjust as needed duration: duration, // Placeholder, adjust as needed
views: None, // Placeholder, adjust as needed isLive: false,
rating: None, // Placeholder, adjust as needed views: None, // Placeholder, adjust as needed
rating: None, // Placeholder, adjust as needed
id, id,
title, title,
url, url,
@@ -141,9 +144,11 @@ impl VideoItem {
aspectRatio: None, aspectRatio: None,
} }
} }
#[cfg(any(not(hottub_single_provider), hottub_provider = "hentaihaven"))]
pub fn from(s: String) -> Result<Self, serde_json::Error> { pub fn from(s: String) -> Result<Self, serde_json::Error> {
serde_json::from_str::<VideoItem>(&s) serde_json::from_str::<VideoItem>(&s)
} }
#[cfg(any(not(hottub_single_provider), hottub_provider = "hanime"))]
pub fn tags(mut self, tags: Vec<String>) -> Self { pub fn tags(mut self, tags: Vec<String>) -> Self {
if tags.is_empty() { if tags.is_empty() {
return self; return self;
@@ -151,30 +156,113 @@ impl VideoItem {
self.tags = Some(tags); self.tags = Some(tags);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "chaturbate",
hottub_provider = "porn4fans",
hottub_provider = "xfree",
hottub_provider = "pornhub",
))]
pub fn uploader(mut self, uploader: String) -> Self { pub fn uploader(mut self, uploader: String) -> Self {
self.uploader = Some(uploader); self.uploader = Some(uploader);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "heavyfetish",
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "chaturbate",
))]
pub fn uploader_url(mut self, uploader_url: String) -> Self { pub fn uploader_url(mut self, uploader_url: String) -> Self {
self.uploaderUrl = Some(uploader_url); self.uploaderUrl = Some(uploader_url);
self self
} }
pub fn verified(mut self, verified: bool) -> Self { #[cfg(any(
self.verified = Some(verified); not(hottub_single_provider),
self hottub_provider = "beeg",
} hottub_provider = "chaturbate",
hottub_provider = "freepornvideosxxx",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hentaihaven",
hottub_provider = "hypnotube",
hottub_provider = "javtiful",
hottub_provider = "noodlemagazine",
hottub_provider = "okxxx",
hottub_provider = "omgxxx",
hottub_provider = "perfectgirls",
hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "porn00",
hottub_provider = "porn4fans",
hottub_provider = "porndish",
hottub_provider = "pornhat",
hottub_provider = "pornhub",
hottub_provider = "redtube",
hottub_provider = "rule34gen",
hottub_provider = "rule34video",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "sxyprn",
hottub_provider = "tnaflix",
hottub_provider = "tokyomotion",
hottub_provider = "viralxxxporn",
hottub_provider = "xfree",
hottub_provider = "xxthots",
hottub_provider = "yesporn",
hottub_provider = "youjizz",
))]
pub fn views(mut self, views: u32) -> Self { pub fn views(mut self, views: u32) -> Self {
self.views = Some(views); self.views = Some(views);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "beeg",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hsex",
hottub_provider = "porn4fans",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "tokyomotion",
hottub_provider = "vrporn",
hottub_provider = "yesporn",
))]
pub fn rating(mut self, rating: f32) -> Self { pub fn rating(mut self, rating: f32) -> Self {
self.rating = Some(rating); self.rating = Some(rating);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "heavyfetish",
hottub_provider = "xfree",
))]
pub fn uploaded_at(mut self, uploaded_at: u64) -> Self { pub fn uploaded_at(mut self, uploaded_at: u64) -> Self {
self.uploadedAt = Some(uploaded_at); self.uploadedAt = Some(uploaded_at);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hentaihaven",
hottub_provider = "hqporner",
hottub_provider = "javtiful",
hottub_provider = "noodlemagazine",
hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "shooshtime",
))]
pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self { pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self {
if formats.is_empty() { if formats.is_empty() {
return self; return self;
@@ -182,26 +270,52 @@ impl VideoItem {
self.formats = Some(formats); self.formats = Some(formats);
self self
} }
pub fn add_format(mut self, format: VideoFormat) { #[cfg(any(
if let Some(formats) = self.formats.as_mut() { not(hottub_single_provider),
formats.push(format); hottub_provider = "freepornvideosxxx",
} else { hottub_provider = "heavyfetish",
self.formats = Some(vec![format]); hottub_provider = "homoxxx",
} hottub_provider = "javtiful",
} hottub_provider = "missav",
pub fn embed(mut self, embed: VideoEmbed) -> Self { hottub_provider = "okxxx",
self.embed = Some(embed); hottub_provider = "omgxxx",
self hottub_provider = "perfectgirls",
} hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "pornhat",
hottub_provider = "redtube",
hottub_provider = "rule34gen",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "sxyprn",
hottub_provider = "tnaflix",
hottub_provider = "xfree",
hottub_provider = "xxdbx",
hottub_provider = "yesporn",
))]
pub fn preview(mut self, preview: String) -> Self { pub fn preview(mut self, preview: String) -> Self {
self.preview = Some(preview); self.preview = Some(preview);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hentaihaven",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "paradisehill",
hottub_provider = "xfree",
))]
pub fn aspect_ratio(mut self, aspect_ratio: f32) -> Self { pub fn aspect_ratio(mut self, aspect_ratio: f32) -> Self {
self.aspectRatio = Some(aspect_ratio); self.aspectRatio = Some(aspect_ratio);
self self
} }
#[cfg(any(not(hottub_single_provider), hottub_provider = "chaturbate"))]
pub fn is_live(mut self, is_live: bool) -> Self {
self.isLive = is_live;
self
}
} }
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
@@ -285,6 +399,13 @@ impl VideoFormat {
http_headers: None, http_headers: None,
} }
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "vrporn",
hottub_provider = "perverzija",
hottub_provider = "porndish",
hottub_provider = "spankbang",
))]
pub fn add_http_header(&mut self, key: String, value: String) { pub fn add_http_header(&mut self, key: String, value: String) {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());
@@ -293,6 +414,14 @@ impl VideoFormat {
headers.insert(key, value); headers.insert(key, value);
} }
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hentaihaven",
hottub_provider = "noodlemagazine",
hottub_provider = "shooshtime",
hottub_provider = "heavyfetish",
hottub_provider = "hsex",
))]
pub fn http_header(&mut self, key: String, value: String) -> Self { pub fn http_header(&mut self, key: String, value: String) -> Self {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());