Compare commits

..

42 Commits

Author SHA1 Message Date
Simon
52081698e9 fixed hanime search 2025-06-05 04:22:48 +00:00
Simon
d837028faf sort burp view 2025-06-04 18:52:35 +00:00
Simon
cb03417f5f removed the all channel 2025-06-04 18:47:27 +00:00
Simon
d7fc427696 implemented hanime 2025-06-04 18:33:49 +00:00
Simon
3150e57411 caching 2025-06-04 07:35:55 +00:00
Simon
8d5da3a4dc hotfix 2025-06-03 19:26:26 +00:00
Simon
2ddc5e86e2 hotfix 2025-06-03 18:10:08 +00:00
Simon
2e8b8bea0c implemented tags for videos 2025-06-03 15:34:02 +00:00
Simon
082b3b5c1d fixed query 2025-06-03 13:46:54 +00:00
Simon
a7610e1bb3 cleanup and fixed query 2025-06-03 12:59:31 +00:00
Simon
261c81e391 cleanup and fixing 2025-06-03 12:29:41 +00:00
Simon
1324d58f50 docker-compose 2025-06-03 11:55:10 +00:00
Simon
9399949c36 renamed var 2025-06-03 11:52:27 +00:00
Simon
03e4554131 increased wait time and activated burpsuite for supervisord 2025-06-03 10:44:34 +00:00
Simon
c218828d40 hotfix 2025-06-03 10:40:15 +00:00
Simon
15c5216309 simplified and unsecure ;) 2025-06-03 10:39:28 +00:00
Simon
58cff87274 hotfix 2025-06-03 10:38:16 +00:00
Simon
e51de99853 clear tmp for burp 2025-06-03 10:37:21 +00:00
Simon
6b1746180f hotfix path 2025-06-03 10:33:58 +00:00
Simon
08d7b09e05 update start script 2025-06-03 10:31:19 +00:00
Simon
d74b7b97e6 added jar path 2025-06-03 10:28:03 +00:00
Simon
d1b23dd293 added missing import 2025-06-03 10:26:22 +00:00
Simon
0f9c23168c burp start script 2025-06-03 10:24:22 +00:00
Simon
4cd9661d4b fixed path 2025-06-03 10:09:27 +00:00
Simon
91afe6e48f gnome screenshot for autoburp 2025-06-03 09:58:18 +00:00
Simon
ae312a83fb added start_burp.sh 2025-06-03 09:54:25 +00:00
Simon
4cf29ce201 typo 2025-06-03 09:47:56 +00:00
Simon
8da7b30c07 Dockerfile hotfix 2025-06-03 08:37:01 +00:00
Simon
cae15e7636 auto burp part 1 2025-06-03 08:28:34 +00:00
Simon
d2254128d7 java update 2025-06-03 07:43:31 +00:00
Simon
be83e12bc3 hotfix hottub path 2025-06-03 07:28:23 +00:00
Simon
babaf90762 hotfix hottub supervisord 2025-06-03 07:26:33 +00:00
Simon
860eadcbd4 supervisor and other update 2025-06-03 07:08:35 +00:00
Simon
ae8fd8e922 MOCK API for tests 2025-06-01 18:09:20 +00:00
Simon
918ed1a125 flaresolverr for loading behind cloudflare 2025-06-01 11:16:26 +00:00
Simon
edc7879324 removed proxy 2025-05-31 13:58:46 +00:00
Simon
580751af03 implemented query and flaresolverr 2025-05-31 13:54:27 +00:00
Simon
3fe699b62d removed openssl from ntex cargo.toml 2025-05-31 09:55:34 +00:00
Simon
0cb3531ae4 removed default env logger 2025-05-31 09:53:14 +00:00
Simon
5b9a1b351c more cleanup 2025-05-31 09:47:30 +00:00
20bf6b745b Merge pull request 'some cleanup' (#2) from master into main
Reviewed-on: #2
2025-05-31 11:45:23 +02:00
7fa6bdeb3c Merge pull request 'init' (#1) from master into main
Reviewed-on: #1
2025-05-31 11:32:29 +02:00
27 changed files with 1111 additions and 231 deletions

1
.gitignore vendored
View File

@@ -3,6 +3,7 @@
# will have compiled files and executables # will have compiled files and executables
debug/ debug/
target/ target/
.testing/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html

View File

@@ -10,7 +10,7 @@ env_logger = "0.11.8"
error-chain = "0.12.4" error-chain = "0.12.4"
futures = "0.3.31" futures = "0.3.31"
htmlentity = "1.3.2" htmlentity = "1.3.2"
ntex = { version = "2.0", features = ["tokio", "openssl"] } ntex = { version = "2.0", features = ["tokio"] }
ntex-files = "2.0.0" ntex-files = "2.0.0"
once_cell = "1.21.3" once_cell = "1.21.3"
reqwest = { version = "0.12.18", features = ["blocking", "json", "rustls-tls"] } reqwest = { version = "0.12.18", features = ["blocking", "json", "rustls-tls"] }

47
Dockerfile Normal file
View File

@@ -0,0 +1,47 @@
#FROM debian
FROM consol/debian-xfce-vnc:latest
ENV REFRESHED_AT 2025_06_03
# Switch to root user to install additional software
USER 0
RUN apt update
RUN apt install -yq libssl-dev \
wget curl unzip \
openssl \
ca-certificates \
fontconfig \
fonts-dejavu \
libxext6 \
libxrender1 \
libxtst6 \
gnupg \
supervisor \
python3 python3-pip python3-venv\
scrot python3-tk python3-dev \
libx11-6 libx11-dev libxext-dev libxtst6 \
libpng-dev libjpeg-dev libtiff-dev libfreetype6-dev \
x11-xserver-utils \
xserver-xorg \
fluxbox \
xvfb \
gnome-screenshot \
&& apt-get clean
RUN mkdir -p /usr/share/man/man1 && \
curl -fsSL https://packages.adoptium.net/artifactory/api/gpg/key/public | gpg --dearmor -o /etc/apt/trusted.gpg.d/adoptium.gpg && \
echo "deb https://packages.adoptium.net/artifactory/deb bullseye main" > /etc/apt/sources.list.d/adoptium.list && \
apt-get update && \
apt-get install -y temurin-21-jdk
RUN java -version
RUN curl https://portswigger.net/burp/releases/download \
-o burpsuite_community.jar
USER 1000
RUN python3 -m venv ~/.venv && bash -c "source ~/.venv/bin/activate && pip3 install pyautogui pillow opencv-python"
RUN echo "source ~/.venv/bin/activate" >> ~/.bashrc

BIN
burp/accept.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 698 B

BIN
burp/close.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
burp/http_history.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 780 B

BIN
burp/next_button.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 526 B

20
burp/project_options.json Normal file
View File

@@ -0,0 +1,20 @@
{
"proxy":{
"request_listeners":[
{
"certificate_mode":"per_host",
"custom_tls_protocols":[
"SSLv3",
"TLSv1",
"TLSv1.1",
"TLSv1.2",
"TLSv1.3"
],
"listen_mode":"all_interfaces",
"listener_port":8080,
"running":true,
"use_custom_tls_protocols":false
}
]
}
}

BIN
burp/proxy.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 780 B

BIN
burp/sort.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 B

BIN
burp/start_burp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 818 B

80
burp/start_burp.py Normal file
View File

@@ -0,0 +1,80 @@
import pyautogui
import time
import os
import subprocess
import glob
BURP_JAR = "/headless/burpsuite_community.jar"
CONFIG_FILE = "/app/burp/project_options.json"
def start_burp():
os.system("rm -rf /tmp/burp*")
burp_process = subprocess.Popen([
"java", "-jar", BURP_JAR,
f"--config-file={CONFIG_FILE}"
])
return burp_process
time.sleep(5)
print("Starting Burp Suite...")
burp_process = start_burp()
button = None
while True:
try:
button = pyautogui.locateCenterOnScreen("/app/burp/next_button.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Next' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/start_burp.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Start Burp' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/accept.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Accept' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/proxy.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Proxy' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/http_history.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'HTTP History' button...")
pyautogui.click(button)
time.sleep(2)
try:
button = pyautogui.locateCenterOnScreen("/app/burp/sort.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Sorting' button...")
pyautogui.click(button)
time.sleep(60*60*24)
burp_process.terminate()
print("Starting Burp Suite...")
burp_process = start_burp()

23
docker-compose.yml Normal file
View File

@@ -0,0 +1,23 @@
services:
hottub:
build:
context: .
dockerfile: Dockerfile
container_name: hottub
entrypoint: supervisord
command: ["-c", "/app/supervisord/supervisord.conf"]
volumes:
- /path/to/hottub:/app
environment:
- RUST_LOG=info
- BURP_URL=http://127.0.0.1:8081
restart: unless-stopped
working_dir: /app
ports:
- 6901:6901
- 8080:18080
networks:
traefik_default:
external: true

View File

@@ -1,18 +1,13 @@
use htmlentity::entity::decode; use futures::channel;
use htmlentity::entity::ICodedDataTrait;
use ntex::http::header; use ntex::http::header;
use ntex::util::Buf;
use ntex::web; use ntex::web;
use ntex::web::HttpRequest; use ntex::web::HttpRequest;
use ntex::web::HttpResponse;
use serde_json::json;
use serde_json::Value;
use std::collections::HashMap;
use crate::providers::hanime::HanimeProvider;
use crate::providers::perverzija::PerverzijaProvider; use crate::providers::perverzija::PerverzijaProvider;
use crate::util::cache::VideoCache;
use crate::{providers::*, status::*, videos::*}; use crate::{providers::*, status::*, videos::*};
// this function could be located in a different module
pub fn config(cfg: &mut web::ServiceConfig) { pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
web::resource("/status") web::resource("/status")
@@ -37,86 +32,86 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
// You can now use `method`, `host`, and `port` as needed // You can now use `method`, `host`, and `port` as needed
status.add_channel(Channel { // status.add_channel(Channel {
id: "all".to_string(), // id: "all".to_string(),
name: "SpaceMoehre's Hottub".to_string(), // name: "SpaceMoehre's Hottub".to_string(),
favicon: format!("http://{}/static/favicon.ico", host).to_string(), // favicon: format!("http://{}/static/favicon.ico", host).to_string(),
premium: false, // premium: false,
description: "Work in Progress".to_string(), // description: "Work in Progress".to_string(),
status: "active".to_string(), // status: "active".to_string(),
categories: vec![], // categories: vec![],
options: vec![ // options: vec![
Channel_Option { // ChannelOption {
id: "channels".to_string(), // id: "channels".to_string(),
title: "Sites".to_string(), // title: "Sites".to_string(),
description: "Websites included in search results.".to_string(), // description: "Websites included in search results.".to_string(),
systemImage: "network".to_string(), // systemImage: "network".to_string(),
colorName: "purple".to_string(), // colorName: "purple".to_string(),
options: vec![ // options: vec![
Filter_Option { // FilterOption {
id: "perverzija".to_string(), // id: "perverzija".to_string(),
title: "Perverzija".to_string(), // title: "Perverzija".to_string(),
}, // },
], // ],
multiSelect: true, // multiSelect: true,
}, // },
Channel_Option { // ChannelOption {
id: "sort".to_string(), // id: "sort".to_string(),
title: "Sort".to_string(), // title: "Sort".to_string(),
description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(), // description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
systemImage: "list.number".to_string(), // systemImage: "list.number".to_string(),
colorName: "blue".to_string(), // colorName: "blue".to_string(),
options: vec![ // options: vec![
Filter_Option { // FilterOption {
id: "date".to_string(), // id: "date".to_string(),
title: "Date".to_string(), // title: "Date".to_string(),
}, // },
Filter_Option { // FilterOption {
id: "name".to_string(), // id: "name".to_string(),
title: "Name".to_string(), // title: "Name".to_string(),
}, // },
], // ],
multiSelect: false, // multiSelect: false,
}, // },
Channel_Option { // ChannelOption {
id: "duration".to_string(), // id: "duration".to_string(),
title: "Duration".to_string(), // title: "Duration".to_string(),
description: "Filter the videos by duration.".to_string(), // description: "Filter the videos by duration.".to_string(),
systemImage: "timer".to_string(), // systemImage: "timer".to_string(),
colorName: "green".to_string(), // colorName: "green".to_string(),
options: vec![ // options: vec![
Filter_Option { // FilterOption {
id: "short".to_string(), // id: "short".to_string(),
title: "< 1h".to_string(), // title: "< 1h".to_string(),
}, // },
Filter_Option { // FilterOption {
id: "long".to_string(), // id: "long".to_string(),
title: "> 1h".to_string(), // title: "> 1h".to_string(),
}, // },
], // ],
multiSelect: true, // multiSelect: true,
}, // },
Channel_Option { // ChannelOption {
id: "featured".to_string(), // id: "featured".to_string(),
title: "Featured".to_string(), // title: "Featured".to_string(),
description: "Filter Featured Videos.".to_string(), // description: "Filter Featured Videos.".to_string(),
systemImage: "star".to_string(), // systemImage: "star".to_string(),
colorName: "red".to_string(), // colorName: "red".to_string(),
options: vec![ // options: vec![
Filter_Option { // FilterOption {
id: "all".to_string(), // id: "all".to_string(),
title: "No".to_string(), // title: "No".to_string(),
}, // },
Filter_Option { // FilterOption {
id: "featured".to_string(), // id: "featured".to_string(),
title: "Yes".to_string(), // title: "Yes".to_string(),
}, // },
], // ],
multiSelect: false, // multiSelect: false,
}, // },
], // ],
nsfw: true, // nsfw: true,
}); // });
status.add_channel(Channel { status.add_channel(Channel {
id: "perverzija".to_string(), id: "perverzija".to_string(),
name: "Perverzija".to_string(), name: "Perverzija".to_string(),
@@ -126,36 +121,54 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
status: "active".to_string(), status: "active".to_string(),
categories: vec![], categories: vec![],
options: vec![ options: vec![
Channel_Option { ChannelOption {
id: "sort".to_string(), id: "sort".to_string(),
title: "Sort".to_string(), title: "Sort".to_string(),
description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(), description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
systemImage: "list.number".to_string(), systemImage: "list.number".to_string(),
colorName: "blue".to_string(), colorName: "blue".to_string(),
options: vec![ options: vec![
Filter_Option { FilterOption {
id: "date".to_string(), id: "date".to_string(),
title: "Date".to_string(), title: "Date".to_string(),
}, },
Filter_Option { FilterOption {
id: "name".to_string(), id: "name".to_string(),
title: "Name".to_string(), title: "Name".to_string(),
}, },
], ],
multiSelect: false, multiSelect: false,
}, },
Channel_Option { ChannelOption {
id: "featured".to_string(),
title: "Featured".to_string(),
description: "Filter Featured Videos.".to_string(),
systemImage: "star".to_string(),
colorName: "red".to_string(),
options: vec![
FilterOption {
id: "all".to_string(),
title: "No".to_string(),
},
FilterOption {
id: "featured".to_string(),
title: "Yes".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "duration".to_string(), id: "duration".to_string(),
title: "Duration".to_string(), title: "Duration".to_string(),
description: "Filter the videos by duration.".to_string(), description: "Filter the videos by duration.".to_string(),
systemImage: "timer".to_string(), systemImage: "timer".to_string(),
colorName: "green".to_string(), colorName: "green".to_string(),
options: vec![ options: vec![
Filter_Option { FilterOption {
id: "short".to_string(), id: "short".to_string(),
title: "< 1h".to_string(), title: "< 1h".to_string(),
}, },
Filter_Option { FilterOption {
id: "long".to_string(), id: "long".to_string(),
title: "> 1h".to_string(), title: "> 1h".to_string(),
}, },
@@ -165,24 +178,25 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
], ],
nsfw: true, nsfw: true,
}); });
status.add_channel(Channel {
id: "hanime".to_string(),
name: "Hanime".to_string(),
description: "Free Hentai from Hanime".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hanime.tv".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![],
nsfw: true,
});
status.iconUrl = format!("http://{}/favicon.ico", host).to_string(); status.iconUrl = format!("http://{}/favicon.ico", host).to_string();
Ok(web::HttpResponse::Ok().json(&status)) Ok(web::HttpResponse::Ok().json(&status))
} }
async fn videos_post( async fn videos_post(
video_request: web::types::Json<Videos_Request>, video_request: web::types::Json<Videos_Request>,
cache: web::types::State<VideoCache>
) -> Result<impl web::Responder, web::Error> { ) -> Result<impl web::Responder, web::Error> {
let mut format = Video_Format::new(
"https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a"
.to_string(),
"1080p".to_string(),
"m3u8".to_string(),
);
format.add_http_header(
"Referer".to_string(),
"https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a"
.to_string(),
);
let mut videos = Videos { let mut videos = Videos {
pageInfo: PageInfo { pageInfo: PageInfo {
hasNextPage: true, hasNextPage: true,
@@ -215,60 +229,19 @@ async fn videos_post(
.parse() .parse()
.unwrap(); .unwrap();
let featured = video_request.featured.as_deref().unwrap_or("all").to_string(); let featured = video_request.featured.as_deref().unwrap_or("all").to_string();
let provider = PerverzijaProvider::new(); let provider = get_provider(channel.as_str())
.ok_or_else(|| web::error::ErrorBadRequest("Invalid channel".to_string()))?;
let video_items = provider let video_items = provider
.get_videos(channel, sort, query, page.to_string(), perPage.to_string(), featured) .get_videos(cache.get_ref().clone(), channel, sort, query, page.to_string(), perPage.to_string(), featured)
.await; .await;
videos.items = video_items.clone(); videos.items = video_items.clone();
Ok(web::HttpResponse::Ok().json(&videos)) Ok(web::HttpResponse::Ok().json(&videos))
} }
// async fn videos_get(_req: HttpRequest) -> Result<impl web::Responder, web::Error> { pub fn get_provider(channel: &str) -> Option<AnyProvider> {
// let mut http_headers: HashMap<String, String> = HashMap::new(); match channel {
// // http_headers.insert( "perverzija" => Some(AnyProvider::Perverzija(PerverzijaProvider::new())),
// // "Referer".to_string(), "hanime" => Some(AnyProvider::Hanime(HanimeProvider::new())),
// // "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a" _ => Some(AnyProvider::Perverzija(PerverzijaProvider::new())),
// // .to_string(), }
// // ); }
// let mut format = Video_Format::new(
// "https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a"
// .to_string(),
// "1080p".to_string(),
// "m3u8".to_string(),
// );
// format.add_http_header(
// "Referer".to_string(),
// "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a"
// .to_string(),
// );
// let videos = Videos {
// pageInfo: PageInfo {
// hasNextPage: true,
// resultsPerPage: 10,
// },
// items: vec![
// Video_Item{
// duration: 110, // 110,
// views: Some(14622653), // 14622653,
// rating: Some(0.0), // 0.0,
// id: "794a51bb65913debd98f73111705738a".to_string(), // "c85017ca87477168d648727753c4ded8a35f173e22ef93743e707b296becb299",
// title: "BrazzersExxtra &#8211; Give Me A D! The Best Of Cheerleaders".to_string(), // "20 Minutes of Adorable Kittens BEST Compilation",
// // url: "https://tube.perverzija.com/brazzersexxtra-give-me-a-d-the-best-of-cheerleaders/".to_string(),
// // url : "https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a".to_string(), // "https://www.youtube.com/watch?v=y0sF5xhGreA",
// url : "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a".to_string(),
// channel: "perverzija".to_string(), // "youtube",
// thumb: "https://tube.perverzija.com/wp-content/uploads/2025/05/BrazzersExxtra-Give-Me-A-D-The-Best-Of-Cheerleaders.jpg".to_string(), // "https://i.ytimg.com/vi/y0sF5xhGreA/hqdefault.jpg",
// uploader: Some("Brazzers".to_string()), // "The Pet Collective",
// uploaderUrl: Some("https://brazzers.com".to_string()), // "https://www.youtube.com/@petcollective",
// verified: Some(false), // false,
// tags: Some(vec![]), // [],
// uploadedAt: Some(1741142954), // 1741142954
// formats: Some(vec![format]), // Additional HTTP headers if needed
// }
// ],
// };
// println!("Video: {:?}", videos);
// Ok(web::HttpResponse::Ok().json(&videos))
// }

View File

@@ -1,26 +1,23 @@
#![allow(non_snake_case)]
use ntex_files as fs; use ntex_files as fs;
use ntex::web; use ntex::web;
use ntex::web::HttpResponse;
use serde::Deserialize;
use serde_json::{json};
use std::thread;
use std::time::Duration;
mod api; mod api;
mod status; mod status;
mod videos; mod videos;
mod providers; mod providers;
mod util; mod util;
#[ntex::main] #[ntex::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "ntex=warn");
std::env::set_var("RUST_BACKTRACE", "1"); std::env::set_var("RUST_BACKTRACE", "1");
env_logger::init(); // You need this to actually see logs env_logger::init(); // You need this to actually see logs
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
web::HttpServer::new(|| { web::HttpServer::new(move || {
web::App::new() web::App::new()
.state(cache.clone())
.wrap(web::middleware::Logger::default()) .wrap(web::middleware::Logger::default())
.service(web::scope("/api").configure(api::config)) .service(web::scope("/api").configure(api::config))
.service(fs::Files::new("/", "static")) .service(fs::Files::new("/", "static"))

0
src/providers/all.rs Normal file
View File

266
src/providers/hanime.rs Normal file
View File

@@ -0,0 +1,266 @@
use std::time::Duration;
use std::vec;
use std::env;
use error_chain::error_chain;
use htmlentity::entity::{decode, ICodedDataTrait};
use reqwest::{Proxy};
use futures::future::join_all;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, Video_Embed, Video_Item}; // Make sure Provider trait is imported
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(reqwest::Error);
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
struct HanimeSearchRequest{
search_text: String,
tags: Vec<String>,
tags_mode: String,
brands: Vec<String>,
blacklist: Vec<String>,
order_by: String,
ordering: String,
page: u8
}
impl HanimeSearchRequest {
pub fn new() -> Self {
HanimeSearchRequest {
search_text: "".to_string(),
tags: vec![],
tags_mode: "AND".to_string(),
brands: vec![],
blacklist: vec![],
order_by: "created_at_unix".to_string(),
ordering: "desc".to_string(),
page: 0
}
}
pub fn tags(mut self, tags: Vec<String>) -> Self {
self.tags = tags;
self
}
pub fn search_text(mut self, search_text: String) -> Self {
self.search_text = search_text;
self
}
pub fn tags_mode(mut self, tags_mode: String) -> Self {
self.tags_mode = tags_mode;
self
}
pub fn brands(mut self, brands: Vec<String>) -> Self {
self.brands = brands;
self
}
pub fn blacklist(mut self, blacklist: Vec<String>) -> Self {
self.blacklist = blacklist;
self
}
pub fn order_by(mut self, order_by: String) -> Self {
self.order_by = order_by;
self
}
pub fn ordering(mut self, ordering: String) -> Self {
self.ordering = ordering;
self
}
pub fn page(mut self, page: u8) -> Self {
self.page = page;
self
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
struct HanimeSearchResponse{
page: u8,
nbPages:u8,
nbHits: u32,
hitsPerPage: u8,
hits: String
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
struct HanimeSearchResult{
id: u64,
name: String,
titles: Vec<String>,
slug: String,
description: String,
views: u64,
interests: u64,
poster_url: String,
cover_url: String,
brand: String,
brand_id: u64,
duration_in_ms: u32,
is_censored: bool,
rating: Option<u32>,
likes: u64,
dislikes: u64,
downloads: u64,
monthly_ranked: Option<u64>,
tags: Vec<String>,
created_at: u64,
released_at: u64,
}
pub struct HanimeProvider {
url: String,
}
impl HanimeProvider {
pub fn new() -> Self {
HanimeProvider {
url: "https://hanime.tv/".to_string(),
}
}
async fn get_video_item(&self, hit: HanimeSearchResult) -> Result<(u64,Video_Item)> {
let id = hit.id.to_string();
let title = hit.name;
let thumb = hit.poster_url;
let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds
let channel = "hanime".to_string(); // Placeholder, adjust as needed
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let url = format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug);
let response = client.get(url).send().await?;
let text = match response.status().is_success() {
true => {
response.text().await?},
false => {
print!("Failed to fetch video item: {}\n\n", response.status());
return Err(format!("Failed to fetch video item: {}", response.status()).into());
} };
let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1];
let mut url_vec = vec![];
for el in urls.split("\"url\":\"").collect::<Vec<&str>>(){
let url = el.split("\"").collect::<Vec<&str>>()[0];
if !url.is_empty() && url.contains("m3u8") {
url_vec.push(url.to_string());
}
}
Ok((hit.created_at, Video_Item::new(id, title, url_vec[0].clone(), channel, thumb, duration)
.tags(hit.tags)
.uploader(hit.brand)
.views(hit.views as u32)
.rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32)
.formats(vec![videos::Video_Format::new(url_vec[0].clone(), "1080".to_string(), "m3u8".to_string())])))
}
async fn get(&self, cache: VideoCache, page: u8, query: String) -> Result<Vec<Video_Item>> {
let index = format!("{}:{}", query, page);
let old_items = match cache.get(&index) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 * 12 {
println!("Cache hit for URL: {}", index);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let search = HanimeSearchRequest::new().page(page-1).search_text(query.clone());
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.post("https://search.htv-services.com/search")
.json(&search)
.send().await?;
let hits = match response.json::<HanimeSearchResponse>().await {
Ok(resp) => resp.hits,
Err(e) => {
println!("Failed to parse HanimeSearchResponse: {}", e);
return Ok(old_items);
}
};
let hits_json: Vec<HanimeSearchResult> = serde_json::from_str(hits.as_str())
.map_err(|e| format!("Failed to parse hits JSON: {}", e))?;
// let timeout_duration = Duration::from_secs(120);
let futures = hits_json.into_iter().map(|el| self.get_video_item(el.clone()));
let results: Vec<Result<(u64,Video_Item)>> = join_all(futures).await;
let mut items: Vec<(u64, Video_Item)> = results
.into_iter()
.filter_map(Result::ok)
.collect();
let video_items: Vec<Video_Item> = items.into_iter().map(|(_, item)| item).collect();
if !video_items.is_empty() {
cache.remove(&index);
cache.insert(index.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
impl Provider for HanimeProvider {
async fn get_videos(
&self,
cache: VideoCache,
_channel: String,
sort: String,
query: Option<String>,
page: String,
per_page: String,
featured: String,
) -> Vec<Video_Item> {
let _ = featured;
let _ = per_page;
let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.get(cache, page.parse::<u8>().unwrap_or(1), q).await,
None => self.get(cache, page.parse::<u8>().unwrap_or(1), "".to_string()).await,
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

View File

@@ -1,6 +1,21 @@
use crate::videos::{Video_Item, Videos}; use crate::{providers::{hanime::HanimeProvider, perverzija::PerverzijaProvider}, util::cache::VideoCache, videos::Video_Item};
pub mod perverzija; pub mod perverzija;
pub mod hanime;
pub trait Provider{ pub trait Provider{
async fn get_videos(&self, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>; async fn get_videos(&self, cache: VideoCache ,channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>;
}
pub enum AnyProvider {
Perverzija(PerverzijaProvider),
Hanime(HanimeProvider),
}
impl Provider for AnyProvider {
async fn get_videos(&self, cache: VideoCache ,channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item> {
match self {
AnyProvider::Perverzija(p) => p.get_videos(cache ,channel, sort, query, page, per_page, featured).await,
AnyProvider::Hanime(p) => p.get_videos(cache ,channel, sort, query, page, per_page, featured).await,
}
}
} }

View File

@@ -1,12 +1,14 @@
use std::vec; use std::vec;
use std::env;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{decode, encode, CharacterSet, EncodeType, ICodedDataTrait}; use htmlentity::entity::{decode, ICodedDataTrait};
use htmlentity::types::{AnyhowResult, Byte}; use reqwest::{Proxy};
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, PageInfo, Video_Embed, Video_Item, Videos}; // Make sure Provider trait is imported use crate::videos::{self, Video_Embed, Video_Item}; // Make sure Provider trait is imported
error_chain! { error_chain! {
foreign_links { foreign_links {
@@ -24,69 +26,196 @@ impl PerverzijaProvider {
url: "https://tube.perverzija.com/".to_string(), url: "https://tube.perverzija.com/".to_string(),
} }
} }
async fn get(&self, page: &u8, featured: String) -> Result<Vec<Video_Item>> { async fn get(&self, cache:VideoCache ,page: u8, featured: String) -> Result<Vec<Video_Item>> {
println!("get");
//TODO
// let mut url = Url::parse("https://example.net")?;
// url.query_pairs_mut().append_pair("foo", "bar");
// url.query_pairs_mut().append_pair("key", "dkhdsihdsaiufds");
// url.query_pairs_mut().append_pair("hello", "world");
// println!("{}", url.as_str());
let mut prefix_uri = "".to_string(); let mut prefix_uri = "".to_string();
if featured == "featured"{ if featured == "featured" {
prefix_uri = "featured-scenes/".to_string(); prefix_uri = "featured-scenes/".to_string();
} }
let mut url = format!("{}{}page/{}/", self.url, prefix_uri, page); let mut url = format!("{}{}page/{}/", self.url, prefix_uri, page);
if page == &1 { if page == 1 {
url = format!("{}{}", self.url, prefix_uri); url = format!("{}{}", self.url, prefix_uri);
} }
let client = reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
// .proxy(Proxy::https("http://192.168.0.101:8080").unwrap())
// .danger_accept_invalid_certs(true)
.build()?;
let response = client.get(url).send().await?;
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(url.clone()).send().await?;
// print!("Response: {:?}\n", response);
if response.status().is_success() { if response.status().is_success() {
let text = response.text().await?; let text = response.text().await?;
let video_items = self.get_video_items_from_html(text.clone()); let video_items: Vec<Video_Item> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items) Ok(video_items)
} else { } else {
Err("Failed to fetch data".into()) let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
println!("FlareSolverr result: {:?}", result);
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response)
} }
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
} }
fn query(&self, query: &str) -> Result<Vec<Video_Item>> { };
println!("Searching for query: {}", query); if !video_items.is_empty() {
let url = format!("{}?s={}", self.url, query); cache.remove(&url);
let client = reqwest::blocking::Client::new(); cache.insert(url.clone(), video_items.clone());
let response = client.get(&url).send()?;
if response.status().is_success() {
let text = response.text().unwrap_or_default();
println!("{}", &text);
Ok(vec![])
} else { } else {
Err("Failed to fetch data".into()) return Ok(old_items);
}
Ok(video_items)
}
}
async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result<Vec<Video_Item>> {
println!("query: {}", query);
let search_string = query.replace(" ", "+");
let mut url = format!(
"{}advanced-search/?_sf_s={}&sf_paged={}",
self.url, search_string, page
);
if page == 1 {
url = format!("{}advanced-search/?_sf_s={}", self.url, search_string);
}
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(url.clone()).send().await?;
// print!("Response: {:?}\n", response);
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<Video_Item> = self.get_video_items_from_html_query(text.clone());
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
println!("FlareSolverr result: {:?}", result);
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html_query(res.solution.response)
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} }
} }
fn get_video_items_from_html(&self, html: String) -> Vec<Video_Item> { fn get_video_items_from_html(&self, html: String) -> Vec<Video_Item> {
// println!("HTML: {}", html);
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<Video_Item> = Vec::new(); let mut items: Vec<Video_Item> = Vec::new();
let video_listing_content = html.split("video-listing-content").collect::<Vec<&str>>()[1];
let raw_html = html.split("video-listing-content").collect::<Vec<&str>>();
let video_listing_content = raw_html[1];
let raw_videos = video_listing_content let raw_videos = video_listing_content
.split("video-item post") .split("video-item post")
.collect::<Vec<&str>>()[1..] .collect::<Vec<&str>>()[1..]
.to_vec(); .to_vec();
for video_segment in &raw_videos { for video_segment in &raw_videos {
let vid = video_segment.split("\n").collect::<Vec<&str>>(); let vid = video_segment.split("\n").collect::<Vec<&str>>();
let mut index = 0; if vid.len() > 20 {
if vid.len() > 10 { println!("Skipping video segment with unexpected length: {}", vid.len());
continue; continue;
} }
for line in vid.clone(){
println!("{}: {}\n\n", index, line);
index += 1;
}
let mut title = vid[1].split(">").collect::<Vec<&str>>()[1] let mut title = vid[1].split(">").collect::<Vec<&str>>()[1]
.split("<") .split("<")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
@@ -96,12 +225,12 @@ impl PerverzijaProvider {
let url = vid[1].split("iframe src=&quot;").collect::<Vec<&str>>()[1] let url = vid[1].split("iframe src=&quot;").collect::<Vec<&str>>()[1]
.split("&quot;") .split("&quot;")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string().replace("index.php", "xs1.php");; .to_string().replace("index.php", "xs1.php");
let id = url.split("data=").collect::<Vec<&str>>()[1] let id = url.split("data=").collect::<Vec<&str>>()[1]
.split("&") .split("&")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(); .to_string();
let raw_duration = match vid.len(){ let raw_duration = match vid.len() {
10 => vid[6].split("time_dur\">").collect::<Vec<&str>>()[1] 10 => vid[6].split("time_dur\">").collect::<Vec<&str>>()[1]
.split("<") .split("<")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
@@ -125,18 +254,132 @@ impl PerverzijaProvider {
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(), .to_string(),
}; };
let mut embed_html = vid[1].split("data-embed='").collect::<Vec<&str>>()[1].split("'").collect::<Vec<&str>>()[0] let embed_html = vid[1].split("data-embed='").collect::<Vec<&str>>()[1]
.split("'")
.collect::<Vec<&str>>()[0]
.to_string(); .to_string();
embed_html = embed_html.replace("index.php", "xs1.php"); // let referer_url = vid[1].split("data-url='").collect::<Vec<&str>>()[1]
// .split("'")
println!("Embed HTML: {}\n\n", embed_html); // .collect::<Vec<&str>>()[0]
println!("Url: {}\n\n", url.clone()); // .to_string();
let referer_url = "https://xtremestream.xyz/".to_string();
let embed = Video_Embed::new(embed_html, url.clone()); let embed = Video_Embed::new(embed_html, url.clone());
let mut video_item =
Video_Item::new(id, title, url.clone(), "perverzija".to_string(), thumb, duration); let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed
video_item.embed = Some(embed); for tag in vid[0].split(" ").collect::<Vec<&str>>(){
let mut format = videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string()); if tag.starts_with("tag-") {
format.add_http_header("Referer".to_string(), url.clone().replace("xs1.php", "index.php")); let tag_name = tag.split("tag-").collect::<Vec<&str>>()[1]
.to_string();
if !tag_name.is_empty() {
tags.push(tag_name.replace("-", " ").to_string());
}
}
}
let mut video_item = Video_Item::new(
id,
title,
embed.source.clone(),
"perverzija".to_string(),
thumb,
duration,
).tags(tags);
// .embed(embed.clone());
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() {
formats.push(format);
} else {
video_item.formats = Some(vec![format]);
}
items.push(video_item);
}
return items;
}
fn get_video_items_from_html_query(&self, html: String) -> Vec<Video_Item> {
let mut items: Vec<Video_Item> = Vec::new();
let video_listing_content = html.split("search-filter-results-").collect::<Vec<&str>>()[1];
let raw_videos = video_listing_content
.split("video-item post")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
let vid = video_segment.split("\n").collect::<Vec<&str>>();
if vid.len() > 20 {
continue;
}
let mut title = vid[3].split("title='").collect::<Vec<&str>>()[1]
.split("'")
.collect::<Vec<&str>>()[0]
.to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let url = vid[4].split("iframe src=&quot;").collect::<Vec<&str>>()[1]
.split("&quot;")
.collect::<Vec<&str>>()[0]
.to_string().replace("index.php","xs1.php");
let id = url.split("data=").collect::<Vec<&str>>()[1]
.split("&")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = match vid.len() {
18 => vid[16].split("time_dur\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string(),
_ => "00:00".to_string(),
};
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let thumb_index = match vid.len() {
18 => 14,
13 => 8,
_ => {
continue;
}
};
let thumb = match vid[thumb_index].contains("srcset=\"") {
true => vid[thumb_index].split(" ").collect::<Vec<&str>>()[0]
.to_string(),
false => vid[thumb_index].split("src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string(),
};
let embed_html = vid[4].split("data-embed='").collect::<Vec<&str>>()[1]
.split("'")
.collect::<Vec<&str>>()[0]
.to_string();
// let referer_url = vid[4].split("data-url='").collect::<Vec<&str>>()[1]
// .split("'")
// .collect::<Vec<&str>>()[0]
// .to_string();
let referer_url = "https://xtremestream.xyz/".to_string();
let embed = Video_Embed::new(embed_html, url.clone());
let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed
for tag in vid[0].split(" ").collect::<Vec<&str>>(){
if tag.starts_with("tag-") {
let tag_name = tag.split("tag-").collect::<Vec<&str>>()[1]
.to_string();
if !tag_name.is_empty() {
tags.push(tag_name.replace("-", " ").to_string());
}
}
}
let mut video_item = Video_Item::new(
id,
title,
url.clone(),
"perverzija".to_string(),
thumb,
duration,
)
.tags(tags);
// .embed(embed.clone());
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() { if let Some(formats) = video_item.formats.as_mut() {
formats.push(format); formats.push(format);
} else { } else {
@@ -148,9 +391,11 @@ impl PerverzijaProvider {
return items; return items;
} }
} }
impl Provider for PerverzijaProvider { impl Provider for PerverzijaProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache,
_channel: String, _channel: String,
sort: String, sort: String,
query: Option<String>, query: Option<String>,
@@ -158,10 +403,11 @@ impl Provider for PerverzijaProvider {
per_page: String, per_page: String,
featured: String, featured: String,
) -> Vec<Video_Item> { ) -> Vec<Video_Item> {
let _ = per_page;
let _ = sort; let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query { let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.query(&q), Some(q) => self.query(cache, page.parse::<u8>().unwrap_or(1), &q).await,
None => self.get(&page.parse::<u8>().unwrap_or(1), featured).await, None => self.get(cache, page.parse::<u8>().unwrap_or(1), featured).await,
}; };
match videos { match videos {
Ok(v) => v, Ok(v) => v,

View File

@@ -16,23 +16,23 @@ pub struct Channel {
pub favicon: String, //"https:\/\/www.google.com/s2/favicons?sz=64&domain=https:\/\/hottubapp.io", pub favicon: String, //"https:\/\/www.google.com/s2/favicons?sz=64&domain=https:\/\/hottubapp.io",
pub status: String, //"active", pub status: String, //"active",
pub categories: Vec<String>, //[], pub categories: Vec<String>, //[],
pub options: Vec<Channel_Option>, pub options: Vec<ChannelOption>,
pub nsfw: bool, //true pub nsfw: bool, //true
} }
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub struct Channel_Option { pub struct ChannelOption {
pub id: String, //"channels", pub id: String, //"channels",
pub title: String, //"Sites", pub title: String, //"Sites",
pub description: String, //"Websites included in search results.", pub description: String, //"Websites included in search results.",
pub systemImage: String, //"network", pub systemImage: String, //"network",
pub colorName: String, //"purple", pub colorName: String, //"purple",
pub options: Vec<Filter_Option>, //[], pub options: Vec<FilterOption>, //[],
pub multiSelect: bool, //true pub multiSelect: bool, //true
} }
#[derive(serde::Serialize)] #[derive(serde::Serialize)]
pub struct Filter_Option{ pub struct FilterOption{
pub id: String, //"sort", pub id: String, //"sort",
pub title: String, //"Sort", pub title: String, //"Sort",
} }

34
src/util/cache.rs Normal file
View File

@@ -0,0 +1,34 @@
use std::time::SystemTime;
use std::sync::{Arc, Mutex};
use crate::videos::Video_Item;
#[derive(Clone)]
pub struct VideoCache{
cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<Video_Item>)>>>, // url -> time+Items
}
impl VideoCache {
pub fn new() -> Self {
VideoCache {
cache: Arc::new(Mutex::new(std::collections::HashMap::new())),
}
}
pub fn get(&self, key: &str) -> Option<(SystemTime, Vec<Video_Item>)> {
let cache = self.cache.lock().ok()?;
cache.get(key).cloned()
}
pub fn insert(&self, key: String, value: Vec<Video_Item>) {
if let Ok(mut cache) = self.cache.lock() {
cache.insert(key.clone(), (SystemTime::now(), value.clone()));
}
}
pub fn remove(&self, key: &str) {
if let Ok(mut cache) = self.cache.lock() {
cache.remove(key);
}
}
}

99
src/util/flaresolverr.rs Normal file
View File

@@ -0,0 +1,99 @@
use std::collections::HashMap;
use reqwest::{Client, Proxy};
use serde_json::json;
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrRequest {
pub cmd: String,
pub url: String,
pub maxTimeout: u32,
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlaresolverrCookie {
name: String, //"cf_clearance",
value: String, //"lnKoXclrIp_mDrWJFfktPGm8GDyxjSpzy9dx0qDTiRg-1748689259-1.2.1.1-AIFERAPCdCSvvdu1mposNdUpKV9wHZXBpSI2L9k9TaKkPcqmomON_XEb6ZtRBtrmQu_DC8AzKllRg2vNzVKOUsvv9ndjQ.vv8Z7cNkgzpIbGFy96kXyAYH2mUk3Q7enZovDlEbK5kpV3Sbmd2M3_bUCBE1WjAMMdXlyNElH1LOpUm149O9hrluXjAffo4SwHI4HO0UckBPWBlBqhznKPgXxU0g8VHLDeYnQKViY8rP2ud4tyzKnJUxuYXzr4aWBNMp6TESp49vesRiel_Y5m.rlTY4zSb517S9iPbEQiYHRI.uH5mMHVI3jvJl0Mx94tPrpFnkhDdmzL3DRSllJe9k786Lf21I9WBoH2cCR3yHw",
domain: String, //".discord.com",
path: String, //"/",
expires: f64, //1780225259.237105,
size: u64, //438,
httpOnly: bool, //true,
secure: bool, //true,
session: bool, //false,
sameSite: Option<String>, //"None",
priority: String, //"Medium",
sameParty: bool, //false,
sourceScheme: String, //"Secure",
sourcePort: u32, //443,
partitionKey: Option<String>, //"https://perverzija.com"
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrSolution {
url: String,
status: u32,
pub response: String,
headers: HashMap<String, String>,
cookies: Vec<FlaresolverrCookie>,
userAgent: String,
}
// impl FlareSolverrSolution {
// fn to_client(&self,){
// let mut headers = header::HeaderMap::new();
// for (h, v) in &self.headers {
// println!("{}: {}", h, v);
// headers.insert(
// header::HeaderName::from_bytes(h.as_bytes()).unwrap(),
// header::HeaderValue::from_str(v).unwrap(),
// );
// }
// // let client = reqwest::Client::builder()
// // .danger_accept_invalid_certs(true)
// // .
// // .build().unwrap();
// }
// }
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrResponse {
status: String,
message: String,
pub solution: FlareSolverrSolution,
startTimestamp: u64,
endTimestamp: u64,
version: String,
}
pub struct Flaresolverr {
url: String
}
impl Flaresolverr {
pub fn new(url: String) -> Self {
Flaresolverr {
url: url
}
}
pub async fn solve(
&self,
request: FlareSolverrRequest,
) -> Result<FlareSolverrResponse, Box<dyn std::error::Error>> {
let client = Client::builder()
.proxy(Proxy::https("http://192.168.0.101:8080").unwrap())
.proxy(Proxy::http("http://192.168.0.101:8080").unwrap())
.danger_accept_invalid_certs(true)
.build()?;
let response = client
.post(&self.url)
.header("Content-Type", "application/json")
.json(&json!({
"cmd": request.cmd,
"url": request.url,
"maxTimeout": request.maxTimeout,
}))
.send().await?;
let body: FlareSolverrResponse = response.json::<FlareSolverrResponse>().await?;
Ok(body)
}
}

View File

@@ -1 +1,3 @@
pub mod time; pub mod time;
pub mod flaresolverr;
pub mod cache;

View File

@@ -1,7 +1,23 @@
use std::collections::HashMap; use std::collections::HashMap;
#[derive(serde::Serialize, serde::Deserialize, Debug)] #[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct Videos_Request { pub struct Videos_Request {
//"versionInstallDate":"2025-06-03T18:20:20Z","languageCode":"en","appInstallDate":"2025-06-03T18:20:20Z","server":"spacemoehre","sexu
pub clientHash: Option<String>, // "a07b23c9b07813c65050e2a4041ca777",
pub blockedKeywords: Option<String>, // "kittens",
pub countryCode: Option<String>, // "DE",
pub clientVersion: Option<String>, // "2.1.4-22b",
pub timestamp: Option<String>, // "1748976686",
pub blockedUploaders: Option<String>, // "",
pub anonId: Option<String>, // "1AB8A060-A47D-47EF-B9CB-63980ED84C8A",
pub debugTools: Option<bool>, // false,
pub versionInstallDate: Option<String>, // "2025-06-03T18:20:20Z",
pub languageCode: Option<String>, // "en",
pub appInstallDate: Option<String>, // "2025-06-03T18:20:20Z",
pub server: Option<String>, // "spacemoehre",
pub sexuality: Option<String>, // "straight",
pub channel: Option<String>, //"youtube", pub channel: Option<String>, //"youtube",
pub sort: Option<String>, //"new", pub sort: Option<String>, //"new",
pub query: Option<String>, //"kittens", pub query: Option<String>, //"kittens",
@@ -20,8 +36,8 @@ pub struct PageInfo {
#[derive(serde::Serialize, Debug, Clone)] #[derive(serde::Serialize, Debug, Clone)]
pub struct Video_Embed{ pub struct Video_Embed{
html: String, pub html: String,
source: String, pub source: String,
} }
impl Video_Embed { impl Video_Embed {
pub fn new(html: String, source: String) -> Self { pub fn new(html: String, source: String) -> Self {
@@ -76,6 +92,42 @@ impl Video_Item {
embed: None, // Placeholder for embed information embed: None, // Placeholder for embed information
} }
} }
pub fn tags(mut self, tags: Vec<String>) -> Self {
self.tags = Some(tags);
self
}
pub fn uploader(mut self, uploader: String) -> Self {
self.uploader = Some(uploader);
self
}
pub fn uploader_url(mut self, uploader_url: String) -> Self {
self.uploaderUrl = Some(uploader_url);
self
}
pub fn verified(mut self, verified: bool) -> Self {
self.verified = Some(verified);
self
}
pub fn views(mut self, views: u32) -> Self {
self.views = Some(views);
self
}
pub fn rating(mut self, rating: f32) -> Self {
self.rating = Some(rating);
self
}
pub fn uploaded_at(mut self, uploaded_at: u64) -> Self {
self.uploadedAt = Some(uploaded_at);
self
}
pub fn formats(mut self, formats: Vec<Video_Format>) -> Self {
self.formats = Some(formats);
self
}
pub fn embed(mut self, embed: Video_Embed) -> Self {
self.embed = Some(embed);
self
}
} }
#[derive(serde::Serialize, Debug, Clone)] #[derive(serde::Serialize, Debug, Clone)]

2
supervisord/burpsuite.sh Normal file
View File

@@ -0,0 +1,2 @@
#!/bin/bash
/headless/.venv/bin/python3 /app/burp/start_burp.py

1
supervisord/hottub.sh Normal file
View File

@@ -0,0 +1 @@
/app/target/release/hottub

View File

@@ -0,0 +1,22 @@
[supervisord]
nodaemon=true
[program:hottub]
command=bash /app/supervisord/hottub.sh
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stderr_logfile=/dev/stderr
directory=/app
[program:vnc]
command=/dockerstartup/vnc_startup.sh --wait
autostart=true
autorestart=true
[program:burpsuite]
command=bash /app/supervisord/burpsuite.sh
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stderr_logfile=/dev/stderr