Compare commits

...

44 Commits

Author SHA1 Message Date
Simon
175c9b748f database support 2025-06-05 18:50:28 +00:00
Simon
6d08362937 database support 2025-06-05 18:50:26 +00:00
Simon
52081698e9 fixed hanime search 2025-06-05 04:22:48 +00:00
Simon
d837028faf sort burp view 2025-06-04 18:52:35 +00:00
Simon
cb03417f5f removed the all channel 2025-06-04 18:47:27 +00:00
Simon
d7fc427696 implemented hanime 2025-06-04 18:33:49 +00:00
Simon
3150e57411 caching 2025-06-04 07:35:55 +00:00
Simon
8d5da3a4dc hotfix 2025-06-03 19:26:26 +00:00
Simon
2ddc5e86e2 hotfix 2025-06-03 18:10:08 +00:00
Simon
2e8b8bea0c implemented tags for videos 2025-06-03 15:34:02 +00:00
Simon
082b3b5c1d fixed query 2025-06-03 13:46:54 +00:00
Simon
a7610e1bb3 cleanup and fixed query 2025-06-03 12:59:31 +00:00
Simon
261c81e391 cleanup and fixing 2025-06-03 12:29:41 +00:00
Simon
1324d58f50 docker-compose 2025-06-03 11:55:10 +00:00
Simon
9399949c36 renamed var 2025-06-03 11:52:27 +00:00
Simon
03e4554131 increased wait time and activated burpsuite for supervisord 2025-06-03 10:44:34 +00:00
Simon
c218828d40 hotfix 2025-06-03 10:40:15 +00:00
Simon
15c5216309 simplified and unsecure ;) 2025-06-03 10:39:28 +00:00
Simon
58cff87274 hotfix 2025-06-03 10:38:16 +00:00
Simon
e51de99853 clear tmp for burp 2025-06-03 10:37:21 +00:00
Simon
6b1746180f hotfix path 2025-06-03 10:33:58 +00:00
Simon
08d7b09e05 update start script 2025-06-03 10:31:19 +00:00
Simon
d74b7b97e6 added jar path 2025-06-03 10:28:03 +00:00
Simon
d1b23dd293 added missing import 2025-06-03 10:26:22 +00:00
Simon
0f9c23168c burp start script 2025-06-03 10:24:22 +00:00
Simon
4cd9661d4b fixed path 2025-06-03 10:09:27 +00:00
Simon
91afe6e48f gnome screenshot for autoburp 2025-06-03 09:58:18 +00:00
Simon
ae312a83fb added start_burp.sh 2025-06-03 09:54:25 +00:00
Simon
4cf29ce201 typo 2025-06-03 09:47:56 +00:00
Simon
8da7b30c07 Dockerfile hotfix 2025-06-03 08:37:01 +00:00
Simon
cae15e7636 auto burp part 1 2025-06-03 08:28:34 +00:00
Simon
d2254128d7 java update 2025-06-03 07:43:31 +00:00
Simon
be83e12bc3 hotfix hottub path 2025-06-03 07:28:23 +00:00
Simon
babaf90762 hotfix hottub supervisord 2025-06-03 07:26:33 +00:00
Simon
860eadcbd4 supervisor and other update 2025-06-03 07:08:35 +00:00
Simon
ae8fd8e922 MOCK API for tests 2025-06-01 18:09:20 +00:00
Simon
918ed1a125 flaresolverr for loading behind cloudflare 2025-06-01 11:16:26 +00:00
Simon
edc7879324 removed proxy 2025-05-31 13:58:46 +00:00
Simon
580751af03 implemented query and flaresolverr 2025-05-31 13:54:27 +00:00
Simon
3fe699b62d removed openssl from ntex cargo.toml 2025-05-31 09:55:34 +00:00
Simon
0cb3531ae4 removed default env logger 2025-05-31 09:53:14 +00:00
Simon
5b9a1b351c more cleanup 2025-05-31 09:47:30 +00:00
20bf6b745b Merge pull request 'some cleanup' (#2) from master into main
Reviewed-on: #2
2025-05-31 11:45:23 +02:00
7fa6bdeb3c Merge pull request 'init' (#1) from master into main
Reviewed-on: #1
2025-05-31 11:32:29 +02:00
34 changed files with 1319 additions and 245 deletions

1
.env Normal file
View File

@@ -0,0 +1 @@
DATABASE_URL=hottub.db

3
.gitignore vendored
View File

@@ -3,6 +3,7 @@
# will have compiled files and executables
debug/
target/
.testing/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
@@ -20,3 +21,5 @@ Cargo.lock
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
hottub.db
migrations/.keep

View File

@@ -6,11 +6,13 @@ edition = "2021"
[dependencies]
async-trait = "0.1.88"
awc = "3.7.0"
diesel = { version = "2.2.10", features = ["sqlite", "r2d2"] }
dotenvy = "0.15.7"
env_logger = "0.11.8"
error-chain = "0.12.4"
futures = "0.3.31"
htmlentity = "1.3.2"
ntex = { version = "2.0", features = ["tokio", "openssl"] }
ntex = { version = "2.0", features = ["tokio"] }
ntex-files = "2.0.0"
once_cell = "1.21.3"
reqwest = { version = "0.12.18", features = ["blocking", "json", "rustls-tls"] }

48
Dockerfile Normal file
View File

@@ -0,0 +1,48 @@
#FROM debian
FROM consol/debian-xfce-vnc:latest
ENV REFRESHED_AT 2025_06_03
# Switch to root user to install additional software
USER 0
RUN apt update
RUN apt install -yq libssl-dev \
wget curl unzip \
openssl \
ca-certificates \
fontconfig \
fonts-dejavu \
libxext6 \
libxrender1 \
libxtst6 \
gnupg \
supervisor \
python3 python3-pip python3-venv\
scrot python3-tk python3-dev \
libx11-6 libx11-dev libxext-dev libxtst6 \
libpng-dev libjpeg-dev libtiff-dev libfreetype6-dev \
x11-xserver-utils \
xserver-xorg \
fluxbox \
xvfb \
gnome-screenshot \
libsqlite3-dev sqlite3 \
&& apt-get clean
RUN mkdir -p /usr/share/man/man1 && \
curl -fsSL https://packages.adoptium.net/artifactory/api/gpg/key/public | gpg --dearmor -o /etc/apt/trusted.gpg.d/adoptium.gpg && \
echo "deb https://packages.adoptium.net/artifactory/deb bullseye main" > /etc/apt/sources.list.d/adoptium.list && \
apt-get update && \
apt-get install -y temurin-21-jdk
RUN java -version
RUN curl https://portswigger.net/burp/releases/download \
-o burpsuite_community.jar
USER 1000
RUN python3 -m venv ~/.venv && bash -c "source ~/.venv/bin/activate && pip3 install pyautogui pillow opencv-python"
RUN echo "source ~/.venv/bin/activate" >> ~/.bashrc

BIN
burp/accept.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 698 B

BIN
burp/close.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
burp/http_history.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 780 B

BIN
burp/next_button.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 526 B

20
burp/project_options.json Normal file
View File

@@ -0,0 +1,20 @@
{
"proxy":{
"request_listeners":[
{
"certificate_mode":"per_host",
"custom_tls_protocols":[
"SSLv3",
"TLSv1",
"TLSv1.1",
"TLSv1.2",
"TLSv1.3"
],
"listen_mode":"all_interfaces",
"listener_port":8080,
"running":true,
"use_custom_tls_protocols":false
}
]
}
}

BIN
burp/proxy.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 780 B

BIN
burp/sort.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 B

BIN
burp/start_burp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 818 B

80
burp/start_burp.py Normal file
View File

@@ -0,0 +1,80 @@
import pyautogui
import time
import os
import subprocess
import glob
BURP_JAR = "/headless/burpsuite_community.jar"
CONFIG_FILE = "/app/burp/project_options.json"
def start_burp():
os.system("rm -rf /tmp/burp*")
burp_process = subprocess.Popen([
"java", "-jar", BURP_JAR,
f"--config-file={CONFIG_FILE}"
])
return burp_process
time.sleep(5)
print("Starting Burp Suite...")
burp_process = start_burp()
button = None
while True:
try:
button = pyautogui.locateCenterOnScreen("/app/burp/next_button.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Next' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/start_burp.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Start Burp' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/accept.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Accept' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/proxy.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Proxy' button...")
pyautogui.click(button)
button = None
try:
button = pyautogui.locateCenterOnScreen("/app/burp/http_history.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'HTTP History' button...")
pyautogui.click(button)
time.sleep(2)
try:
button = pyautogui.locateCenterOnScreen("/app/burp/sort.png", confidence=0.8)
except:
pass
if button:
print("Clicking on the 'Sorting' button...")
pyautogui.click(button)
time.sleep(60*60*24)
burp_process.terminate()
print("Starting Burp Suite...")
burp_process = start_burp()

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

23
docker-compose.yml Normal file
View File

@@ -0,0 +1,23 @@
services:
hottub:
build:
context: .
dockerfile: Dockerfile
container_name: hottub
entrypoint: supervisord
command: ["-c", "/app/supervisord/supervisord.conf"]
volumes:
- /path/to/hottub:/app
environment:
- RUST_LOG=info
- BURP_URL=http://127.0.0.1:8081
restart: unless-stopped
working_dir: /app
ports:
- 6901:6901
- 8080:18080
networks:
traefik_default:
external: true

View File

@@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE videos

View File

@@ -0,0 +1,8 @@
-- Your SQL goes here
CREATE TABLE videos (
id TEXT NOT NULL PRIMARY KEY, -- like url parts to uniquely identify a video
url TEXT NOT NULL,
views INTEGER,
rating INTEGER,
uploader TEXT
)

View File

@@ -1,18 +1,13 @@
use htmlentity::entity::decode;
use htmlentity::entity::ICodedDataTrait;
use futures::channel;
use ntex::http::header;
use ntex::util::Buf;
use ntex::web;
use ntex::web::HttpRequest;
use ntex::web::HttpResponse;
use serde_json::json;
use serde_json::Value;
use std::collections::HashMap;
use crate::providers::hanime::HanimeProvider;
use crate::providers::perverzija::PerverzijaProvider;
use crate::{providers::*, status::*, videos::*};
use crate::util::cache::VideoCache;
use crate::{providers::*, status::*, videos::*, DbPool};
// this function could be located in a different module
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("/status")
@@ -37,86 +32,86 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
// You can now use `method`, `host`, and `port` as needed
status.add_channel(Channel {
id: "all".to_string(),
name: "SpaceMoehre's Hottub".to_string(),
favicon: format!("http://{}/static/favicon.ico", host).to_string(),
premium: false,
description: "Work in Progress".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![
Channel_Option {
id: "channels".to_string(),
title: "Sites".to_string(),
description: "Websites included in search results.".to_string(),
systemImage: "network".to_string(),
colorName: "purple".to_string(),
options: vec![
Filter_Option {
id: "perverzija".to_string(),
title: "Perverzija".to_string(),
},
],
multiSelect: true,
},
Channel_Option {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
Filter_Option {
id: "date".to_string(),
title: "Date".to_string(),
},
Filter_Option {
id: "name".to_string(),
title: "Name".to_string(),
},
],
multiSelect: false,
},
Channel_Option {
id: "duration".to_string(),
title: "Duration".to_string(),
description: "Filter the videos by duration.".to_string(),
systemImage: "timer".to_string(),
colorName: "green".to_string(),
options: vec![
Filter_Option {
id: "short".to_string(),
title: "< 1h".to_string(),
},
Filter_Option {
id: "long".to_string(),
title: "> 1h".to_string(),
},
],
multiSelect: true,
},
Channel_Option {
id: "featured".to_string(),
title: "Featured".to_string(),
description: "Filter Featured Videos.".to_string(),
systemImage: "star".to_string(),
colorName: "red".to_string(),
options: vec![
Filter_Option {
id: "all".to_string(),
title: "No".to_string(),
},
Filter_Option {
id: "featured".to_string(),
title: "Yes".to_string(),
},
],
multiSelect: false,
},
],
nsfw: true,
});
// status.add_channel(Channel {
// id: "all".to_string(),
// name: "SpaceMoehre's Hottub".to_string(),
// favicon: format!("http://{}/static/favicon.ico", host).to_string(),
// premium: false,
// description: "Work in Progress".to_string(),
// status: "active".to_string(),
// categories: vec![],
// options: vec![
// ChannelOption {
// id: "channels".to_string(),
// title: "Sites".to_string(),
// description: "Websites included in search results.".to_string(),
// systemImage: "network".to_string(),
// colorName: "purple".to_string(),
// options: vec![
// FilterOption {
// id: "perverzija".to_string(),
// title: "Perverzija".to_string(),
// },
// ],
// multiSelect: true,
// },
// ChannelOption {
// id: "sort".to_string(),
// title: "Sort".to_string(),
// description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
// systemImage: "list.number".to_string(),
// colorName: "blue".to_string(),
// options: vec![
// FilterOption {
// id: "date".to_string(),
// title: "Date".to_string(),
// },
// FilterOption {
// id: "name".to_string(),
// title: "Name".to_string(),
// },
// ],
// multiSelect: false,
// },
// ChannelOption {
// id: "duration".to_string(),
// title: "Duration".to_string(),
// description: "Filter the videos by duration.".to_string(),
// systemImage: "timer".to_string(),
// colorName: "green".to_string(),
// options: vec![
// FilterOption {
// id: "short".to_string(),
// title: "< 1h".to_string(),
// },
// FilterOption {
// id: "long".to_string(),
// title: "> 1h".to_string(),
// },
// ],
// multiSelect: true,
// },
// ChannelOption {
// id: "featured".to_string(),
// title: "Featured".to_string(),
// description: "Filter Featured Videos.".to_string(),
// systemImage: "star".to_string(),
// colorName: "red".to_string(),
// options: vec![
// FilterOption {
// id: "all".to_string(),
// title: "No".to_string(),
// },
// FilterOption {
// id: "featured".to_string(),
// title: "Yes".to_string(),
// },
// ],
// multiSelect: false,
// },
// ],
// nsfw: true,
// });
status.add_channel(Channel {
id: "perverzija".to_string(),
name: "Perverzija".to_string(),
@@ -126,36 +121,54 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
status: "active".to_string(),
categories: vec![],
options: vec![
Channel_Option {
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
Filter_Option {
FilterOption {
id: "date".to_string(),
title: "Date".to_string(),
},
Filter_Option {
FilterOption {
id: "name".to_string(),
title: "Name".to_string(),
},
],
multiSelect: false,
},
Channel_Option {
ChannelOption {
id: "featured".to_string(),
title: "Featured".to_string(),
description: "Filter Featured Videos.".to_string(),
systemImage: "star".to_string(),
colorName: "red".to_string(),
options: vec![
FilterOption {
id: "all".to_string(),
title: "No".to_string(),
},
FilterOption {
id: "featured".to_string(),
title: "Yes".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "duration".to_string(),
title: "Duration".to_string(),
description: "Filter the videos by duration.".to_string(),
systemImage: "timer".to_string(),
colorName: "green".to_string(),
options: vec![
Filter_Option {
FilterOption {
id: "short".to_string(),
title: "< 1h".to_string(),
},
Filter_Option {
FilterOption {
id: "long".to_string(),
title: "> 1h".to_string(),
},
@@ -165,24 +178,26 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
],
nsfw: true,
});
status.add_channel(Channel {
id: "hanime".to_string(),
name: "Hanime".to_string(),
description: "Free Hentai from Hanime".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hanime.tv".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![],
nsfw: true,
});
status.iconUrl = format!("http://{}/favicon.ico", host).to_string();
Ok(web::HttpResponse::Ok().json(&status))
}
async fn videos_post(
video_request: web::types::Json<Videos_Request>,
cache: web::types::State<VideoCache>,
pool: web::types::State<DbPool>,
) -> Result<impl web::Responder, web::Error> {
let mut format = Video_Format::new(
"https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a"
.to_string(),
"1080p".to_string(),
"m3u8".to_string(),
);
format.add_http_header(
"Referer".to_string(),
"https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a"
.to_string(),
);
let mut videos = Videos {
pageInfo: PageInfo {
hasNextPage: true,
@@ -215,60 +230,19 @@ async fn videos_post(
.parse()
.unwrap();
let featured = video_request.featured.as_deref().unwrap_or("all").to_string();
let provider = PerverzijaProvider::new();
let provider = get_provider(channel.as_str())
.ok_or_else(|| web::error::ErrorBadRequest("Invalid channel".to_string()))?;
let video_items = provider
.get_videos(channel, sort, query, page.to_string(), perPage.to_string(), featured)
.get_videos(cache.get_ref().clone(), pool.get_ref().clone(), channel, sort, query, page.to_string(), perPage.to_string(), featured)
.await;
videos.items = video_items.clone();
Ok(web::HttpResponse::Ok().json(&videos))
}
// async fn videos_get(_req: HttpRequest) -> Result<impl web::Responder, web::Error> {
// let mut http_headers: HashMap<String, String> = HashMap::new();
// // http_headers.insert(
// // "Referer".to_string(),
// // "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a"
// // .to_string(),
// // );
// let mut format = Video_Format::new(
// "https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a"
// .to_string(),
// "1080p".to_string(),
// "m3u8".to_string(),
// );
// format.add_http_header(
// "Referer".to_string(),
// "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a"
// .to_string(),
// );
// let videos = Videos {
// pageInfo: PageInfo {
// hasNextPage: true,
// resultsPerPage: 10,
// },
// items: vec![
// Video_Item{
// duration: 110, // 110,
// views: Some(14622653), // 14622653,
// rating: Some(0.0), // 0.0,
// id: "794a51bb65913debd98f73111705738a".to_string(), // "c85017ca87477168d648727753c4ded8a35f173e22ef93743e707b296becb299",
// title: "BrazzersExxtra &#8211; Give Me A D! The Best Of Cheerleaders".to_string(), // "20 Minutes of Adorable Kittens BEST Compilation",
// // url: "https://tube.perverzija.com/brazzersexxtra-give-me-a-d-the-best-of-cheerleaders/".to_string(),
// // url : "https://pervl2.xtremestream.xyz/player/xs1.php?data=794a51bb65913debd98f73111705738a".to_string(), // "https://www.youtube.com/watch?v=y0sF5xhGreA",
// url : "https://pervl2.xtremestream.xyz/player/index.php?data=794a51bb65913debd98f73111705738a".to_string(),
// channel: "perverzija".to_string(), // "youtube",
// thumb: "https://tube.perverzija.com/wp-content/uploads/2025/05/BrazzersExxtra-Give-Me-A-D-The-Best-Of-Cheerleaders.jpg".to_string(), // "https://i.ytimg.com/vi/y0sF5xhGreA/hqdefault.jpg",
// uploader: Some("Brazzers".to_string()), // "The Pet Collective",
// uploaderUrl: Some("https://brazzers.com".to_string()), // "https://www.youtube.com/@petcollective",
// verified: Some(false), // false,
// tags: Some(vec![]), // [],
// uploadedAt: Some(1741142954), // 1741142954
// formats: Some(vec![format]), // Additional HTTP headers if needed
// }
// ],
// };
// println!("Video: {:?}", videos);
// Ok(web::HttpResponse::Ok().json(&videos))
// }
pub fn get_provider(channel: &str) -> Option<AnyProvider> {
match channel {
"perverzija" => Some(AnyProvider::Perverzija(PerverzijaProvider::new())),
"hanime" => Some(AnyProvider::Hanime(HanimeProvider::new())),
_ => Some(AnyProvider::Perverzija(PerverzijaProvider::new())),
}
}

27
src/db.rs Normal file
View File

@@ -0,0 +1,27 @@
use diesel::prelude::*;
use dotenvy::dotenv;
use std::{env, sync::{Arc, Mutex}};
use crate::models::DBVideo;
pub fn get_video(conn: &mut SqliteConnection, video_id: String) -> Result<Option<String>, diesel::result::Error> {
use crate::schema::videos::dsl::*;
let result = videos
.filter(id.eq(video_id))
.first::<DBVideo>(conn)
.optional()?;
match result{
Some(video) => Ok(Some(video.url)),
None => Ok(None),
}
}
pub fn insert_video(conn: &mut SqliteConnection, new_id: &str, new_url: &str) -> Result<usize, diesel::result::Error> {
use crate::schema::videos::dsl::*;
diesel::insert_into(videos).values(DBVideo{
id: new_id.to_string(),
url: new_url.to_string(),
}).execute(conn)
}

View File

@@ -1,29 +1,53 @@
use ntex_files as fs;
#![allow(non_snake_case)]
#[macro_use]
extern crate diesel;
use diesel::{r2d2::{self, ConnectionManager}, SqliteConnection};
use dotenvy::dotenv;
use ntex_files as fs;
use ntex::web;
use ntex::web::HttpResponse;
use serde::Deserialize;
use serde_json::{json};
use std::thread;
use std::time::Duration;
mod api;
mod status;
mod videos;
mod db;
mod models;
mod providers;
mod schema;
mod status;
mod util;
mod videos;
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
#[ntex::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "ntex=warn");
std::env::set_var("RUST_BACKTRACE", "1");
env_logger::init(); // You need this to actually see logs
dotenv().ok();
// set up database connection pool
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
web::HttpServer::new(|| {
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
web::HttpServer::new(move || {
web::App::new()
.state(pool.clone())
.state(cache.clone())
.wrap(web::middleware::Logger::default())
.service(web::scope("/api").configure(api::config))
.service(fs::Files::new("/", "static"))
.service(
web::resource("/")
.route(web::get().to(|| async {
web::HttpResponse::Found()
.header("Location", "hottub://source?url=hottub.spacemoehre.de")
.finish()
}))
)
.service(fs::Files::new("/", "static").index_file("index.html"))
})
// .bind_openssl(("0.0.0.0", 18080), builder)?
.bind(("0.0.0.0", 18080))?

10
src/models.rs Normal file
View File

@@ -0,0 +1,10 @@
use diesel::prelude::*;
use serde::{Serialize};
#[derive(Debug, Clone, Serialize, Queryable, Insertable)]
#[diesel(table_name = crate::schema::videos)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct DBVideo {
pub id: String,
pub url: String,
}

0
src/providers/all.rs Normal file
View File

292
src/providers/hanime.rs Normal file
View File

@@ -0,0 +1,292 @@
use std::time::Duration;
use std::vec;
use std::env;
use error_chain::error_chain;
use htmlentity::entity::{decode, ICodedDataTrait};
use reqwest::{Proxy};
use futures::future::join_all;
use crate::db;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, Video_Embed, Video_Item};
use crate::DbPool; // Make sure Provider trait is imported
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(reqwest::Error);
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
struct HanimeSearchRequest{
search_text: String,
tags: Vec<String>,
tags_mode: String,
brands: Vec<String>,
blacklist: Vec<String>,
order_by: String,
ordering: String,
page: u8
}
impl HanimeSearchRequest {
pub fn new() -> Self {
HanimeSearchRequest {
search_text: "".to_string(),
tags: vec![],
tags_mode: "AND".to_string(),
brands: vec![],
blacklist: vec![],
order_by: "created_at_unix".to_string(),
ordering: "desc".to_string(),
page: 0
}
}
pub fn tags(mut self, tags: Vec<String>) -> Self {
self.tags = tags;
self
}
pub fn search_text(mut self, search_text: String) -> Self {
self.search_text = search_text;
self
}
pub fn tags_mode(mut self, tags_mode: String) -> Self {
self.tags_mode = tags_mode;
self
}
pub fn brands(mut self, brands: Vec<String>) -> Self {
self.brands = brands;
self
}
pub fn blacklist(mut self, blacklist: Vec<String>) -> Self {
self.blacklist = blacklist;
self
}
pub fn order_by(mut self, order_by: String) -> Self {
self.order_by = order_by;
self
}
pub fn ordering(mut self, ordering: String) -> Self {
self.ordering = ordering;
self
}
pub fn page(mut self, page: u8) -> Self {
self.page = page;
self
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
struct HanimeSearchResponse{
page: u8,
nbPages:u8,
nbHits: u32,
hitsPerPage: u8,
hits: String
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
struct HanimeSearchResult{
id: u64,
name: String,
titles: Vec<String>,
slug: String,
description: String,
views: u64,
interests: u64,
poster_url: String,
cover_url: String,
brand: String,
brand_id: u64,
duration_in_ms: u32,
is_censored: bool,
rating: Option<u32>,
likes: u64,
dislikes: u64,
downloads: u64,
monthly_ranked: Option<u64>,
tags: Vec<String>,
created_at: u64,
released_at: u64,
}
pub struct HanimeProvider {
url: String,
}
impl HanimeProvider {
pub fn new() -> Self {
HanimeProvider {
url: "https://hanime.tv/".to_string(),
}
}
async fn get_video_item(&self, hit: HanimeSearchResult, pool: DbPool) -> Result<Video_Item> {
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug.clone()));
drop(conn);
let id = hit.id.to_string();
let title = hit.name;
let thumb = hit.poster_url;
let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds
let channel = "hanime".to_string(); // Placeholder, adjust as needed
match db_result {
Ok(Some(video_url)) => {
return Ok(Video_Item::new(id, title, video_url.clone(), channel, thumb, duration)
.tags(hit.tags)
.uploader(hit.brand)
.views(hit.views as u32)
.rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32)
.formats(vec![videos::Video_Format::new(video_url.clone(), "1080".to_string(), "m3u8".to_string())]));
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let url = format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug);
let response = client.get(url).send().await?;
let text = match response.status().is_success() {
true => {
response.text().await?
},
false => {
print!("Failed to fetch video item: {}\n\n", response.status());
return Err(format!("Failed to fetch video item: {}", response.status()).into());
}
};
let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1];
let mut url_vec = vec![];
for el in urls.split("\"url\":\"").collect::<Vec<&str>>(){
let url = el.split("\"").collect::<Vec<&str>>()[0];
if !url.is_empty() && url.contains("m3u8") {
url_vec.push(url.to_string());
}
}
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug.clone()), &url_vec[0].clone());
drop(conn);
Ok(Video_Item::new(id, title, url_vec[0].clone(), channel, thumb, duration)
.tags(hit.tags)
.uploader(hit.brand)
.views(hit.views as u32)
.rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32)
.formats(vec![videos::Video_Format::new(url_vec[0].clone(), "1080".to_string(), "m3u8".to_string())]))
}
async fn get(&self, cache: VideoCache, pool: DbPool, page: u8, query: String) -> Result<Vec<Video_Item>> {
let index = format!("{}:{}", query, page);
let old_items = match cache.get(&index) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 * 12 {
println!("Cache hit for URL: {}", index);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let search = HanimeSearchRequest::new().page(page-1).search_text(query.clone());
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.post("https://search.htv-services.com/search")
.json(&search)
.send().await?;
let hits = match response.json::<HanimeSearchResponse>().await {
Ok(resp) => resp.hits,
Err(e) => {
println!("Failed to parse HanimeSearchResponse: {}", e);
return Ok(old_items);
}
};
let hits_json: Vec<HanimeSearchResult> = serde_json::from_str(hits.as_str())
.map_err(|e| format!("Failed to parse hits JSON: {}", e))?;
// let timeout_duration = Duration::from_secs(120);
let futures = hits_json.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone()));
let results: Vec<Result<Video_Item>> = join_all(futures).await;
let video_items: Vec<Video_Item> = results
.into_iter()
.filter_map(Result::ok)
.collect();
if !video_items.is_empty() {
cache.remove(&index);
cache.insert(index.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
impl Provider for HanimeProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
_channel: String,
sort: String,
query: Option<String>,
page: String,
per_page: String,
featured: String,
) -> Vec<Video_Item> {
let _ = featured;
let _ = per_page;
let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), q).await,
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), "".to_string()).await,
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

View File

@@ -1,6 +1,21 @@
use crate::videos::{Video_Item, Videos};
use crate::{providers::{hanime::HanimeProvider, perverzija::PerverzijaProvider}, util::cache::VideoCache, videos::Video_Item, DbPool};
pub mod perverzija;
pub mod hanime;
pub trait Provider{
async fn get_videos(&self, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>;
async fn get_videos(&self, cache: VideoCache, pool: DbPool, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>;
}
pub enum AnyProvider {
Perverzija(PerverzijaProvider),
Hanime(HanimeProvider),
}
impl Provider for AnyProvider {
async fn get_videos(&self, cache: VideoCache, pool:DbPool, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item> {
match self {
AnyProvider::Perverzija(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await,
AnyProvider::Hanime(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await,
}
}
}

View File

@@ -1,12 +1,18 @@
use std::vec;
use std::env;
use error_chain::error_chain;
use htmlentity::entity::{decode, encode, CharacterSet, EncodeType, ICodedDataTrait};
use htmlentity::types::{AnyhowResult, Byte};
use htmlentity::entity::{decode, ICodedDataTrait};
use reqwest::{Proxy};
use futures::future::join_all;
use ntex::web;
use crate::db;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, PageInfo, Video_Embed, Video_Item, Videos}; // Make sure Provider trait is imported
use crate::videos::{self, Video_Embed, Video_Item};
use crate::DbPool; // Make sure Provider trait is imported
error_chain! {
foreign_links {
@@ -24,69 +30,186 @@ impl PerverzijaProvider {
url: "https://tube.perverzija.com/".to_string(),
}
}
async fn get(&self, page: &u8, featured: String) -> Result<Vec<Video_Item>> {
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, featured: String) -> Result<Vec<Video_Item>> {
//TODO
// let mut url = Url::parse("https://example.net")?;
// url.query_pairs_mut().append_pair("foo", "bar");
// url.query_pairs_mut().append_pair("key", "dkhdsihdsaiufds");
// url.query_pairs_mut().append_pair("hello", "world");
// println!("{}", url.as_str());
let mut prefix_uri = "".to_string();
if featured == "featured"{
if featured == "featured" {
prefix_uri = "featured-scenes/".to_string();
}
let mut url = format!("{}{}page/{}/", self.url, prefix_uri, page);
if page == &1 {
if page == 1 {
url = format!("{}{}", self.url, prefix_uri);
}
let client = reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
// .proxy(Proxy::https("http://192.168.0.101:8080").unwrap())
// .danger_accept_invalid_certs(true)
.build()?;
let response = client.get(url).send().await?;
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(url.clone()).send().await?;
// print!("Response: {:?}\n", response);
if response.status().is_success() {
let text = response.text().await?;
let video_items = self.get_video_items_from_html(text.clone());
let video_items: Vec<Video_Item> = self.get_video_items_from_html(text.clone(), pool);
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
Err("Failed to fetch data".into())
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response, pool)
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
fn query(&self, query: &str) -> Result<Vec<Video_Item>> {
println!("Searching for query: {}", query);
let url = format!("{}?s={}", self.url, query);
let client = reqwest::blocking::Client::new();
let response = client.get(&url).send()?;
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str) -> Result<Vec<Video_Item>> {
let search_string = query.replace(" ", "+");
let mut url = format!(
"{}page/{}/?s={}",
self.url, page, search_string
);
if page == 1 {
url = format!("{}?s={}", self.url, search_string);
}
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(url.clone()).send().await?;
if response.status().is_success() {
let text = response.text().unwrap_or_default();
println!("{}", &text);
Ok(vec![])
let text = response.text().await?;
let video_items: Vec<Video_Item> = self.get_video_items_from_html_query(text.clone(), pool).await;
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
Err("Failed to fetch data".into())
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
self.get_video_items_from_html_query(res.solution.response, pool).await
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
}
}
fn get_video_items_from_html(&self, html: String) -> Vec<Video_Item> {
fn get_video_items_from_html(&self, html: String, pool: DbPool) -> Vec<Video_Item> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<Video_Item> = Vec::new();
let raw_html = html.split("video-listing-content").collect::<Vec<&str>>();
let video_listing_content = raw_html[1];
let video_listing_content = html.split("video-listing-content").collect::<Vec<&str>>()[1];
let raw_videos = video_listing_content
.split("video-item post")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
let vid = video_segment.split("\n").collect::<Vec<&str>>();
let mut index = 0;
if vid.len() > 10 {
if vid.len() > 20 {
continue;
}
for line in vid.clone(){
println!("{}: {}\n\n", index, line);
index += 1;
}
let mut title = vid[1].split(">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
@@ -96,16 +219,16 @@ impl PerverzijaProvider {
let url = vid[1].split("iframe src=&quot;").collect::<Vec<&str>>()[1]
.split("&quot;")
.collect::<Vec<&str>>()[0]
.to_string().replace("index.php", "xs1.php");;
.to_string().replace("index.php", "xs1.php");
let id = url.split("data=").collect::<Vec<&str>>()[1]
.split("&")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = match vid.len(){
let raw_duration = match vid.len() {
10 => vid[6].split("time_dur\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string(),
.split("<")
.collect::<Vec<&str>>()[0]
.to_string(),
_ => "00:00".to_string(),
};
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
@@ -125,18 +248,43 @@ impl PerverzijaProvider {
.collect::<Vec<&str>>()[0]
.to_string(),
};
let mut embed_html = vid[1].split("data-embed='").collect::<Vec<&str>>()[1].split("'").collect::<Vec<&str>>()[0]
let embed_html = vid[1].split("data-embed='").collect::<Vec<&str>>()[1]
.split("'")
.collect::<Vec<&str>>()[0]
.to_string();
let id_url = vid[1].split("data-url='").collect::<Vec<&str>>()[1]
.split("'")
.collect::<Vec<&str>>()[0]
.to_string();
embed_html = embed_html.replace("index.php", "xs1.php");
println!("Embed HTML: {}\n\n", embed_html);
println!("Url: {}\n\n", url.clone());
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &id_url, &url);
drop(conn);
let referer_url = "https://xtremestream.xyz/".to_string();
let embed = Video_Embed::new(embed_html, url.clone());
let mut video_item =
Video_Item::new(id, title, url.clone(), "perverzija".to_string(), thumb, duration);
video_item.embed = Some(embed);
let mut format = videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), url.clone().replace("xs1.php", "index.php"));
let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed
for tag in vid[0].split(" ").collect::<Vec<&str>>(){
if tag.starts_with("tag-") {
let tag_name = tag.split("tag-").collect::<Vec<&str>>()[1]
.to_string();
if !tag_name.is_empty() {
tags.push(tag_name.replace("-", " ").to_string());
}
}
}
let mut video_item = Video_Item::new(
id,
title,
embed.source.clone(),
"perverzija".to_string(),
thumb,
duration,
).tags(tags);
// .embed(embed.clone());
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() {
formats.push(format);
} else {
@@ -147,10 +295,176 @@ impl PerverzijaProvider {
return items;
}
async fn get_video_items_from_html_query(&self, html: String, pool:DbPool) -> Vec<Video_Item> {
let raw_videos = html
.split("video-item post")
.collect::<Vec<&str>>()[1..]
.to_vec();
let futures = raw_videos.into_iter().map(|el| self.get_video_item(el, pool.clone()));
let results: Vec<Result<Video_Item>> = join_all(futures).await;
let items: Vec<Video_Item> = results
.into_iter()
.filter_map(Result::ok)
.collect();
return items;
}
async fn get_video_item(&self, snippet: &str, pool: DbPool) -> Result<Video_Item> {
let vid = snippet.split("\n").collect::<Vec<&str>>();
if vid.len() > 30 {
return Err("Unexpected video snippet length".into());
}
// for (index,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
let mut title = vid[5].split(" title=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let thumb = match vid[6].split(" src=\"").collect::<Vec<&str>>().len(){
1=>{
for (index,line) in vid.iter().enumerate() {
println!("Line {}: {}", index, line.to_string().trim());
}
return Err("Failed to parse thumbnail URL".into());
}
_ => vid[6].split(" src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string(),
};
let duration = 0;
let lookup_url = vid[5].split(" href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let referer_url = "https://xtremestream.xyz/".to_string();
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,lookup_url.clone());
match db_result {
Ok(Some(url)) => {
let mut id = url.split("data=").collect::<Vec<&str>>()[1]
.to_string();
if id.contains("&"){
id = id.split("&").collect::<Vec<&str>>()[0].to_string()
}
let mut video_item = Video_Item::new(
id,
title,
url.clone(),
"perverzija".to_string(),
thumb,
duration,
);
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() {
formats.push(format);
} else {
video_item.formats = Some(vec![format]);
}
return Ok(video_item);
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
drop(conn);
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(lookup_url.clone()).send().await?;
let text = match response.status().is_success(){
true => response.text().await?,
false => {
return Err("Failed to fetch video details".into());
}
};
let url = text.split("<iframe src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string().replace("index.php","xs1.php");
if url.starts_with("https://discord.com"){
return Err("Discord link found, skipping video".into());
}
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &lookup_url, &url);
drop(conn);
if !url.contains("xtremestream.xyz"){
return Err("Video URL does not contain xtremestream.xyz".into());
}
let mut id = url.split("data=").collect::<Vec<&str>>()[1]
.to_string();
if id.contains("&"){
id = id.split("&").collect::<Vec<&str>>()[0].to_string()
}
// if !vid[6].contains(" src=\""){
// for (index,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
// }
let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed
for tag in vid[0].split(" ").collect::<Vec<&str>>(){
if tag.starts_with("tag-") {
let tag_name = tag.split("tag-").collect::<Vec<&str>>()[1]
.to_string();
if !tag_name.is_empty() {
tags.push(tag_name.replace("-", " ").to_string());
}
}
}
let mut video_item = Video_Item::new(
id,
title,
url.clone(),
"perverzija".to_string(),
thumb,
duration,
)
.tags(tags);
// .embed(embed.clone());
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() {
formats.push(format);
} else {
video_item.formats = Some(vec![format]);
}
return Ok(video_item);
}
}
impl Provider for PerverzijaProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
_channel: String,
sort: String,
query: Option<String>,
@@ -158,10 +472,11 @@ impl Provider for PerverzijaProvider {
per_page: String,
featured: String,
) -> Vec<Video_Item> {
let _ = per_page;
let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.query(&q),
None => self.get(&page.parse::<u8>().unwrap_or(1), featured).await,
Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q).await,
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), featured).await,
};
match videos {
Ok(v) => v,

8
src/schema.rs Normal file
View File

@@ -0,0 +1,8 @@
// @generated automatically by Diesel CLI.
diesel::table! {
videos (id) {
id -> Text,
url -> Text,
}
}

View File

@@ -16,23 +16,23 @@ pub struct Channel {
pub favicon: String, //"https:\/\/www.google.com/s2/favicons?sz=64&domain=https:\/\/hottubapp.io",
pub status: String, //"active",
pub categories: Vec<String>, //[],
pub options: Vec<Channel_Option>,
pub options: Vec<ChannelOption>,
pub nsfw: bool, //true
}
#[derive(serde::Serialize)]
pub struct Channel_Option {
pub struct ChannelOption {
pub id: String, //"channels",
pub title: String, //"Sites",
pub description: String, //"Websites included in search results.",
pub systemImage: String, //"network",
pub colorName: String, //"purple",
pub options: Vec<Filter_Option>, //[],
pub options: Vec<FilterOption>, //[],
pub multiSelect: bool, //true
}
#[derive(serde::Serialize)]
pub struct Filter_Option{
pub struct FilterOption{
pub id: String, //"sort",
pub title: String, //"Sort",
}

34
src/util/cache.rs Normal file
View File

@@ -0,0 +1,34 @@
use std::time::SystemTime;
use std::sync::{Arc, Mutex};
use crate::videos::Video_Item;
#[derive(Clone)]
pub struct VideoCache{
cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<Video_Item>)>>>, // url -> time+Items
}
impl VideoCache {
pub fn new() -> Self {
VideoCache {
cache: Arc::new(Mutex::new(std::collections::HashMap::new())),
}
}
pub fn get(&self, key: &str) -> Option<(SystemTime, Vec<Video_Item>)> {
let cache = self.cache.lock().ok()?;
cache.get(key).cloned()
}
pub fn insert(&self, key: String, value: Vec<Video_Item>) {
if let Ok(mut cache) = self.cache.lock() {
cache.insert(key.clone(), (SystemTime::now(), value.clone()));
}
}
pub fn remove(&self, key: &str) {
if let Ok(mut cache) = self.cache.lock() {
cache.remove(key);
}
}
}

99
src/util/flaresolverr.rs Normal file
View File

@@ -0,0 +1,99 @@
use std::collections::HashMap;
use reqwest::{Client, Proxy};
use serde_json::json;
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrRequest {
pub cmd: String,
pub url: String,
pub maxTimeout: u32,
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlaresolverrCookie {
name: String, //"cf_clearance",
value: String, //"lnKoXclrIp_mDrWJFfktPGm8GDyxjSpzy9dx0qDTiRg-1748689259-1.2.1.1-AIFERAPCdCSvvdu1mposNdUpKV9wHZXBpSI2L9k9TaKkPcqmomON_XEb6ZtRBtrmQu_DC8AzKllRg2vNzVKOUsvv9ndjQ.vv8Z7cNkgzpIbGFy96kXyAYH2mUk3Q7enZovDlEbK5kpV3Sbmd2M3_bUCBE1WjAMMdXlyNElH1LOpUm149O9hrluXjAffo4SwHI4HO0UckBPWBlBqhznKPgXxU0g8VHLDeYnQKViY8rP2ud4tyzKnJUxuYXzr4aWBNMp6TESp49vesRiel_Y5m.rlTY4zSb517S9iPbEQiYHRI.uH5mMHVI3jvJl0Mx94tPrpFnkhDdmzL3DRSllJe9k786Lf21I9WBoH2cCR3yHw",
domain: String, //".discord.com",
path: String, //"/",
expires: f64, //1780225259.237105,
size: u64, //438,
httpOnly: bool, //true,
secure: bool, //true,
session: bool, //false,
sameSite: Option<String>, //"None",
priority: String, //"Medium",
sameParty: bool, //false,
sourceScheme: String, //"Secure",
sourcePort: u32, //443,
partitionKey: Option<String>, //"https://perverzija.com"
}
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrSolution {
url: String,
status: u32,
pub response: String,
headers: HashMap<String, String>,
cookies: Vec<FlaresolverrCookie>,
userAgent: String,
}
// impl FlareSolverrSolution {
// fn to_client(&self,){
// let mut headers = header::HeaderMap::new();
// for (h, v) in &self.headers {
// println!("{}: {}", h, v);
// headers.insert(
// header::HeaderName::from_bytes(h.as_bytes()).unwrap(),
// header::HeaderValue::from_str(v).unwrap(),
// );
// }
// // let client = reqwest::Client::builder()
// // .danger_accept_invalid_certs(true)
// // .
// // .build().unwrap();
// }
// }
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrResponse {
status: String,
message: String,
pub solution: FlareSolverrSolution,
startTimestamp: u64,
endTimestamp: u64,
version: String,
}
pub struct Flaresolverr {
url: String
}
impl Flaresolverr {
pub fn new(url: String) -> Self {
Flaresolverr {
url: url
}
}
pub async fn solve(
&self,
request: FlareSolverrRequest,
) -> Result<FlareSolverrResponse, Box<dyn std::error::Error>> {
let client = Client::builder()
.proxy(Proxy::https("http://192.168.0.101:8080").unwrap())
.proxy(Proxy::http("http://192.168.0.101:8080").unwrap())
.danger_accept_invalid_certs(true)
.build()?;
let response = client
.post(&self.url)
.header("Content-Type", "application/json")
.json(&json!({
"cmd": request.cmd,
"url": request.url,
"maxTimeout": request.maxTimeout,
}))
.send().await?;
let body: FlareSolverrResponse = response.json::<FlareSolverrResponse>().await?;
Ok(body)
}
}

View File

@@ -1 +1,3 @@
pub mod time;
pub mod flaresolverr;
pub mod cache;

View File

@@ -1,7 +1,23 @@
use std::collections::HashMap;
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct Videos_Request {
//"versionInstallDate":"2025-06-03T18:20:20Z","languageCode":"en","appInstallDate":"2025-06-03T18:20:20Z","server":"spacemoehre","sexu
pub clientHash: Option<String>, // "a07b23c9b07813c65050e2a4041ca777",
pub blockedKeywords: Option<String>, // "kittens",
pub countryCode: Option<String>, // "DE",
pub clientVersion: Option<String>, // "2.1.4-22b",
pub timestamp: Option<String>, // "1748976686",
pub blockedUploaders: Option<String>, // "",
pub anonId: Option<String>, // "1AB8A060-A47D-47EF-B9CB-63980ED84C8A",
pub debugTools: Option<bool>, // false,
pub versionInstallDate: Option<String>, // "2025-06-03T18:20:20Z",
pub languageCode: Option<String>, // "en",
pub appInstallDate: Option<String>, // "2025-06-03T18:20:20Z",
pub server: Option<String>, // "spacemoehre",
pub sexuality: Option<String>, // "straight",
pub channel: Option<String>, //"youtube",
pub sort: Option<String>, //"new",
pub query: Option<String>, //"kittens",
@@ -20,8 +36,8 @@ pub struct PageInfo {
#[derive(serde::Serialize, Debug, Clone)]
pub struct Video_Embed{
html: String,
source: String,
pub html: String,
pub source: String,
}
impl Video_Embed {
pub fn new(html: String, source: String) -> Self {
@@ -76,6 +92,42 @@ impl Video_Item {
embed: None, // Placeholder for embed information
}
}
pub fn tags(mut self, tags: Vec<String>) -> Self {
self.tags = Some(tags);
self
}
pub fn uploader(mut self, uploader: String) -> Self {
self.uploader = Some(uploader);
self
}
pub fn uploader_url(mut self, uploader_url: String) -> Self {
self.uploaderUrl = Some(uploader_url);
self
}
pub fn verified(mut self, verified: bool) -> Self {
self.verified = Some(verified);
self
}
pub fn views(mut self, views: u32) -> Self {
self.views = Some(views);
self
}
pub fn rating(mut self, rating: f32) -> Self {
self.rating = Some(rating);
self
}
pub fn uploaded_at(mut self, uploaded_at: u64) -> Self {
self.uploadedAt = Some(uploaded_at);
self
}
pub fn formats(mut self, formats: Vec<Video_Format>) -> Self {
self.formats = Some(formats);
self
}
pub fn embed(mut self, embed: Video_Embed) -> Self {
self.embed = Some(embed);
self
}
}
#[derive(serde::Serialize, Debug, Clone)]
@@ -105,14 +157,15 @@ pub struct Video_Format {
video_ext: Option<String>,
resolution: Option<String>,
http_headers: Option<HashMap<String, String>>,
}
impl Video_Format {
pub fn new(url: String, quality: String, format: String) -> Self {
Video_Format {
url,
quality,
format,
format_id: None,
format: "mp4".to_string(), // Default format
format_id: Some("mp4-1080".to_string()),
format_note: None,
filesize: None,
asr: None,
@@ -122,16 +175,16 @@ impl Video_Format {
tbr: None,
language: None,
language_preference: None,
ext: None,
ext: Some("mp4".to_string()),
vcodec: None,
acodec: None,
dynamic_range: None,
abr: None,
vbr: None,
container: None,
protocol: None,
audio_ext: None,
video_ext: None,
protocol: Some("m3u8_native".to_string()),
audio_ext: Some("none".to_string()),
video_ext: Some("mp4".to_string()),
resolution: None,
http_headers: None,
}

2
supervisord/burpsuite.sh Normal file
View File

@@ -0,0 +1,2 @@
#!/bin/bash
/headless/.venv/bin/python3 /app/burp/start_burp.py

1
supervisord/hottub.sh Normal file
View File

@@ -0,0 +1 @@
/app/target/release/hottub

View File

@@ -0,0 +1,22 @@
[supervisord]
nodaemon=true
[program:hottub]
command=bash /app/supervisord/hottub.sh
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stderr_logfile=/dev/stderr
directory=/app
[program:vnc]
command=/dockerstartup/vnc_startup.sh --wait
autostart=true
autorestart=true
[program:burpsuite]
command=bash /app/supervisord/burpsuite.sh
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stderr_logfile=/dev/stderr