From 6405cbb2699be0e3c72a1b940507b66a648c6e7b Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 19 Jun 2025 11:12:39 +0000 Subject: [PATCH] add erothots --- burp/start_burp.py | 17 ++- src/api.rs | 14 +- src/providers/erothots.rs | 287 +++++++++++++++++++++++++++++++++++++ src/providers/mod.rs | 7 +- src/providers/spankbang.rs | 40 +++++- 5 files changed, 352 insertions(+), 13 deletions(-) create mode 100644 src/providers/erothots.rs diff --git a/burp/start_burp.py b/burp/start_burp.py index 22d9f1e..5fd8a0e 100644 --- a/burp/start_burp.py +++ b/burp/start_burp.py @@ -2,7 +2,7 @@ import pyautogui import time import os import subprocess -import glob +import datetime BURP_JAR = "/headless/burpsuite_community.jar" CONFIG_FILE = "/app/burp/project_options.json" @@ -19,8 +19,15 @@ time.sleep(5) print("Starting Burp Suite...") burp_process = start_burp() +end_time = datetime.datetime.now() + datetime.timedelta(days=1) button = None while True: + if datetime.datetime.now() > end_time: + print("Burp Suite has been running for 24 hours, restarting...") + burp_process.terminate() + time.sleep(1) + burp_process = start_burp() + end_time = datetime.datetime.now() + datetime.timedelta(days=1) try: button = pyautogui.locateCenterOnScreen("/app/burp/next_button.png", confidence=0.8) except: @@ -73,8 +80,8 @@ while True: print("Clicking on the 'Sorting' button...") pyautogui.click(button) - time.sleep(60*60*24) - burp_process.terminate() - print("Starting Burp Suite...") - burp_process = start_burp() + # time.sleep(60*60*24) + # burp_process.terminate() + # print("Starting Burp Suite...") + # burp_process = start_burp() diff --git a/src/api.rs b/src/api.rs index 254185d..3c7643f 100644 --- a/src/api.rs +++ b/src/api.rs @@ -5,6 +5,7 @@ use ntex::web; use ntex::web::HttpRequest; use tokio::{task, time}; +use crate::providers::erothots::ErothotsProvider; use crate::providers::hanime::HanimeProvider; use crate::providers::perverzija::PerverzijaProvider; use crate::providers::spankbang::SpankbangProvider; @@ -247,10 +248,20 @@ async fn status(req: HttpRequest) -> Result { } ], nsfw: true, + });status.add_channel(Channel { + id: "erothots".to_string(), + name: "Erothots".to_string(), + description: "Free Onlyfans Content".to_string(), + premium: false, + favicon: "https://www.google.com/s2/favicons?sz=64&domain=erothots.co".to_string(), + status: "active".to_string(), + categories: vec![], + options: vec![], + nsfw: true, }); status.add_channel(Channel { id: "spankbang".to_string(), - name: "SpankBang".to_string(), + name: "Work in Progress - SpankBang".to_string(), description: "Popular Porn Videos - SpankBang".to_string(), premium: false, favicon: "https://www.google.com/s2/favicons?sz=64&domain=spankbang.com".to_string(), @@ -367,6 +378,7 @@ pub fn get_provider(channel: &str) -> Option { "perverzija" => Some(AnyProvider::Perverzija(PerverzijaProvider::new())), "hanime" => Some(AnyProvider::Hanime(HanimeProvider::new())), "spankbang" => Some(AnyProvider::Spankbang(SpankbangProvider::new())), + "erothots" => Some(AnyProvider::Erothots(ErothotsProvider::new())), // ErothotsProvider is not implemented yet _ => Some(AnyProvider::Perverzija(PerverzijaProvider::new())), } } \ No newline at end of file diff --git a/src/providers/erothots.rs b/src/providers/erothots.rs new file mode 100644 index 0000000..d634f5c --- /dev/null +++ b/src/providers/erothots.rs @@ -0,0 +1,287 @@ +use std::vec; +use std::env; +use error_chain::error_chain; +use htmlentity::entity::{decode, ICodedDataTrait}; +use reqwest::{Proxy}; +use futures::future::join_all; + +use crate::db; +use crate::providers::Provider; +use crate::util::cache::VideoCache; +use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; +use crate::util::time::parse_time_to_seconds; +use crate::videos::{self, VideoEmbed, VideoItem}; +use crate::DbPool; +use crate::USER_AGENT; // Make sure Provider trait is imported + +error_chain! { + foreign_links { + Io(std::io::Error); + HttpRequest(reqwest::Error); + } +} + + +#[derive(Debug, Clone)] +pub struct ErothotsProvider { + url: String, +} +impl ErothotsProvider { + pub fn new() -> Self { + ErothotsProvider { + url: "https://erothots.co/".to_string(), + } + } + async fn get(&self, cache:VideoCache, page: u8) -> Result> { + + let mut prefix_uri = "videos/hot".to_string(); + let mut url = format!("{}{}?p={}", self.url, prefix_uri, page-1); + if page == 1 { + url = format!("{}{}", self.url, prefix_uri); + } + + let old_items = match cache.get(&url) { + Some((time, items)) => { + if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { + println!("Cache hit for URL: {}", url); + return Ok(items.clone()); + } + else{ + items.clone() + } + } + None => { + vec![] + } + }; + + + let client = match env::var("BURP_URL").as_deref() { + Ok(burp_url) => + reqwest::Client::builder() + .user_agent(USER_AGENT) + .proxy(Proxy::https(burp_url).unwrap()) + .danger_accept_invalid_certs(true) + .build()?, + Err(_) => reqwest::Client::builder() + .user_agent(USER_AGENT) + .danger_accept_invalid_certs(true) + .build()?, + }; + + let response = client.get(url.clone()).send().await?; + // print!("Response: {:?}\n", response); + if response.status().is_success() { + let text = response.text().await?; + let video_items: Vec = self.get_video_items_from_html(text.clone()); + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } else { + let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); + let flare = Flaresolverr::new(flare_url); + let result = flare + .solve(FlareSolverrRequest { + cmd: "request.get".to_string(), + url: url.clone(), + maxTimeout: 60000, + }) + .await; + let video_items = match result { + Ok(res) => { + // println!("FlareSolverr response: {}", res); + self.get_video_items_from_html(res.solution.response) + } + Err(e) => { + println!("Error solving FlareSolverr: {}", e); + return Err("Failed to solve FlareSolverr".into()); + } + }; + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else { + return Ok(old_items); + } + Ok(video_items) + } + } + async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result> { + + let mut url = format!( + "{}videos/{}?p={}", + self.url, query, page-1 + ); + if page == 1 { + url = format!("{}videos/{}", self.url, query); + } + + // Check our Video Cache. If the result is younger than 1 hour, we return it. + let old_items = match cache.get(&url) { + Some((time, items)) => { + if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { + return Ok(items.clone()); + } + else{ + let _ = cache.check().await; + return Ok(items.clone()) + } + } + None => { + vec![] + } + }; + let client = match env::var("BURP_URL").as_deref() { + Ok(burp_url) => + reqwest::Client::builder() + .user_agent(USER_AGENT) + .proxy(Proxy::https(burp_url).unwrap()) + .danger_accept_invalid_certs(true) + .build()?, + Err(_) => reqwest::Client::builder() + .user_agent(USER_AGENT) + .danger_accept_invalid_certs(true) + .build()?, + }; + + let response = client.get(url.clone()).send().await?; + if response.status().is_success() { + let text = response.text().await?; + let video_items: Vec = self.get_video_items_from_html(text.clone()); + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } else { + let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); + let flare = Flaresolverr::new(flare_url); + let result = flare + .solve(FlareSolverrRequest { + cmd: "request.get".to_string(), + url: url.clone(), + maxTimeout: 60000, + }) + .await; + let video_items = match result { + Ok(res) => { + self.get_video_items_from_html(res.solution.response) + } + Err(e) => { + println!("Error solving FlareSolverr: {}", e); + return Err("Failed to solve FlareSolverr".into()); + } + }; + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } + } + + fn get_video_items_from_html(&self, html: String) -> Vec { + if html.is_empty() { + println!("HTML is empty"); + return vec![]; + } + let mut items: Vec = Vec::new(); + let raw_videos = html + .split("
") + .collect::>()[1] + .split("
") + .collect::>()[0]; + for vid in raw_videos.split("") + .collect::>()[1] + .split("") + .collect::>()[0] + .trim(); + let thumb = vid.split("data-src=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .trim(); + let url = vid.split("href=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .trim(); + let raw_dur = vid.split("") + .collect::>()[1] + .split("<") + .collect::>()[0] + .trim(); + let duration: u32 = match parse_time_to_seconds(raw_dur) { + Some(secs) => secs as u32, + None => 0, + }; + + let item = VideoItem::new( + url.to_string(), + title.to_string(), + url.to_string(), + "erothots".to_string(), + thumb.to_string(), + duration, + ); + items.push(item); + } + return items; + } + + // async fn get_video_items_from_html_query(&self, html: String, pool:DbPool) -> Vec { + // let raw_videos = html + // .split("video-item post") + // .collect::>()[1..] + // .to_vec(); + // let futures = raw_videos.into_iter().map(|el| self.get_video_item(el, pool.clone())); + // let results: Vec> = join_all(futures).await; + // let items: Vec = results + // .into_iter() + // .filter_map(Result::ok) + // .collect(); + + // return items; + // } +} + +impl Provider for ErothotsProvider { + async fn get_videos( + &self, + cache: VideoCache, + pool: DbPool, + _channel: String, + sort: String, + query: Option, + page: String, + per_page: String, + featured: String, + ) -> Vec { + let _ = per_page; + let _ = sort; + let videos: std::result::Result, Error> = match query { + Some(q) => self.query(cache, page.parse::().unwrap_or(1), &q).await, + None => self.get(cache, page.parse::().unwrap_or(1)).await, + }; + match videos { + Ok(v) => v, + Err(e) => { + println!("Error fetching videos: {}", e); + vec![] + } + } + } +} diff --git a/src/providers/mod.rs b/src/providers/mod.rs index 204a41e..1baa846 100644 --- a/src/providers/mod.rs +++ b/src/providers/mod.rs @@ -1,9 +1,10 @@ -use crate::{providers::{hanime::HanimeProvider, perverzija::PerverzijaProvider, spankbang::SpankbangProvider}, util::cache::VideoCache, videos::VideoItem, DbPool}; -use tokio::task; +use crate::{providers::{erothots::ErothotsProvider, hanime::HanimeProvider, perverzija::PerverzijaProvider, spankbang::SpankbangProvider}, util::cache::VideoCache, videos::VideoItem, DbPool}; + pub mod perverzija; pub mod hanime; pub mod spankbang; +pub mod erothots; pub trait Provider{ async fn get_videos(&self, cache: VideoCache, pool: DbPool, channel: String, sort: String, query: Option, page: String, per_page: String, featured: String) -> Vec; @@ -14,12 +15,14 @@ pub enum AnyProvider { Perverzija(PerverzijaProvider), Hanime(HanimeProvider), Spankbang(SpankbangProvider), + Erothots(ErothotsProvider), } impl Provider for AnyProvider { async fn get_videos(&self, cache: VideoCache, pool:DbPool, channel: String, sort: String, query: Option, page: String, per_page: String, featured: String) -> Vec { match self { AnyProvider::Perverzija(p) => p.get_videos(cache.clone(), pool.clone(), channel.clone(), sort.clone(), query.clone(), page.clone(), per_page.clone(), featured.clone()).await, AnyProvider::Hanime(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, + AnyProvider::Erothots(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, AnyProvider::Spankbang(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, } } diff --git a/src/providers/spankbang.rs b/src/providers/spankbang.rs index f53460b..b8112c0 100644 --- a/src/providers/spankbang.rs +++ b/src/providers/spankbang.rs @@ -9,6 +9,7 @@ use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; use crate::videos::{self, VideoItem}; use crate::DbPool; use crate::USER_AGENT; // Make sure Provider trait is imported +use std::collections::HashMap; error_chain! { foreign_links { @@ -101,7 +102,7 @@ impl SpankbangProvider { } } - async fn query(&self, cache: VideoCache, page: u8, query: &str,) -> Result> { + async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result> { let url = format!("{}s/{}/{}/", self.url, query.replace(" ", "+"), page); let old_items = match cache.get(&url) { @@ -134,6 +135,26 @@ impl SpankbangProvider { }; let response = client.get(url.clone()).send().await?; + if let Some(cookies) = response.headers().get_all("set-cookie").iter().next() { + for value in response.headers().get_all("set-cookie").iter() { + let mut cookies_map = HashMap::new(); + for value in response.headers().get_all("set-cookie").iter() { + if let Ok(cookie_str) = value.to_str() { + if let Some((k, v)) = cookie_str.split_once('=') { + let key = k.trim(); + let val = v.split(';').next().unwrap_or("").trim(); + cookies_map.insert(key.to_string(), val.to_string()); + } + } + } + let cookies_string = cookies_map + .iter() + .map(|(k, v)| format!("{}={}", k, v)) + .collect::>() + .join("; "); + println!("Cookie: {}", cookies_string); + } + } if response.status().is_success() { let text = response.text().await?; let video_items: Vec = self.get_video_items_from_html(text.clone()); @@ -180,11 +201,20 @@ impl SpankbangProvider { return vec![]; } let mut items: Vec = Vec::new(); - let video_listing_content = html.split("class=\"video-list ").collect::>()[1]; - let raw_videos = video_listing_content + let split_html = html.split("class=\"video-list ").collect::>(); + if split_html.len() < 2 { + println!("Could not find video-list in HTML"); + return items; + } + let video_listing_content = split_html[1]; + let raw_videos_vec = video_listing_content .split("class=\"video-item\"") - .collect::>()[1..] - .to_vec(); + .collect::>(); + if raw_videos_vec.len() < 2 { + println!("Could not find video-item in HTML"); + return items; + } + let raw_videos = raw_videos_vec[1..].to_vec(); for video_segment in &raw_videos { let vid = video_segment.split("\n").collect::>(); //for (index,line) in vid.iter().enumerate(){