diff --git a/burp/start_burp.py b/burp/start_burp.py index 99fd733..0a55236 100644 --- a/burp/start_burp.py +++ b/burp/start_burp.py @@ -24,8 +24,11 @@ button = None proxy_clicked = False history_clicked = False sort_clicked = False +setup = False while True: if datetime.datetime.now() > end_time: + + setup = False print("Burp Suite has been running for 24 hours, restarting...") burp_process.terminate() time.sleep(1) @@ -34,59 +37,62 @@ while True: proxy_clicked = False history_clicked = False sort_clicked = False - - try: - button = pyautogui.locateCenterOnScreen("/app/burp/next_button.png", confidence=0.8) - except: - pass - if button: - print("Clicking on the 'Next' button...") - pyautogui.click(button) - button = None - - try: - button = pyautogui.locateCenterOnScreen("/app/burp/start_burp.png", confidence=0.8) - except: - pass - if button: - print("Clicking on the 'Start Burp' button...") - pyautogui.click(button) - button = None - - try: - button = pyautogui.locateCenterOnScreen("/app/burp/accept.png", confidence=0.8) - except: - pass - if button: - print("Clicking on the 'Accept' button...") - pyautogui.click(button) - button = None - - try: - button = pyautogui.locateCenterOnScreen("/app/burp/proxy.png", confidence=0.8) - except: - pass - if button and not proxy_clicked: - print("Clicking on the 'Proxy' button...") - pyautogui.click(button) - proxy_clicked = True - button = None + if not setup: + try: + button = pyautogui.locateCenterOnScreen("/app/burp/next_button.png", confidence=0.8) + except: + pass + if button: + print("Clicking on the 'Next' button...") + pyautogui.click(button) + button = None - try: - button = pyautogui.locateCenterOnScreen("/app/burp/http_history.png", confidence=0.8) - except: - pass - if button and not history_clicked: - print("Clicking on the 'HTTP History' button...") - pyautogui.click(button) - history_clicked = True - button = None - try: - button = pyautogui.locateCenterOnScreen("/app/burp/sort.png", confidence=0.99) - except: - pass - if button and not sort_clicked: - sort_clicked = True - print("Clicking on the 'Sorting' button...") - pyautogui.click(button) - button = None + try: + button = pyautogui.locateCenterOnScreen("/app/burp/start_burp.png", confidence=0.8) + except: + pass + if button: + print("Clicking on the 'Start Burp' button...") + pyautogui.click(button) + button = None + + try: + button = pyautogui.locateCenterOnScreen("/app/burp/accept.png", confidence=0.8) + except: + pass + if button: + print("Clicking on the 'Accept' button...") + pyautogui.click(button) + button = None + + try: + button = pyautogui.locateCenterOnScreen("/app/burp/proxy.png", confidence=0.8) + except: + pass + if button and not proxy_clicked: + print("Clicking on the 'Proxy' button...") + pyautogui.click(button) + proxy_clicked = True + button = None + + try: + button = pyautogui.locateCenterOnScreen("/app/burp/http_history.png", confidence=0.8) + except: + pass + if button and not history_clicked: + print("Clicking on the 'HTTP History' button...") + pyautogui.click(button) + history_clicked = True + button = None + try: + button = pyautogui.locateCenterOnScreen("/app/burp/sort.png", confidence=0.99) + except: + pass + if button and not sort_clicked: + sort_clicked = True + print("Clicking on the 'Sorting' button...") + pyautogui.click(button) + setup = True + button = None + else: + time.sleep(3600) diff --git a/src/api.rs b/src/api.rs index 52ef855..b594407 100644 --- a/src/api.rs +++ b/src/api.rs @@ -260,6 +260,17 @@ async fn status(req: HttpRequest) -> Result { // options: vec![], // nsfw: true, // }); + status.add_channel(Channel { + id: "fapello".to_string(), + name: "Fapello".to_string(), + description: "Free Onlyfans Content".to_string(), + premium: false, + favicon: "https://www.google.com/s2/favicons?sz=64&domain=fapello.com".to_string(), + status: "active".to_string(), + categories: vec![], + options: vec![], + nsfw: true, + }); status.add_channel(Channel { id: "spankbang".to_string(), name: "Work in Progress - SpankBang".to_string(), diff --git a/src/providers/fapello.rs b/src/providers/fapello.rs new file mode 100644 index 0000000..60d6157 --- /dev/null +++ b/src/providers/fapello.rs @@ -0,0 +1,282 @@ +use std::vec; +use std::env; +use error_chain::error_chain; +use htmlentity::entity::{decode, ICodedDataTrait}; +use reqwest::{Proxy}; +use futures::future::join_all; + +use crate::db; +use crate::providers::Provider; +use crate::util::cache::VideoCache; +use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; +use crate::util::time::parse_time_to_seconds; +use crate::videos::{self, VideoEmbed, VideoItem}; +use crate::DbPool; +use crate::USER_AGENT; // Make sure Provider trait is imported + +error_chain! { + foreign_links { + Io(std::io::Error); + HttpRequest(reqwest::Error); + } +} + + +#[derive(Debug, Clone)] +pub struct FapelloProvider { + url: String, +} +impl FapelloProvider { + pub fn new() -> Self { + FapelloProvider { + url: "https://fapello.com/".to_string(), + } + } + async fn get(&self, cache:VideoCache, page: u8) -> Result> { + + let mut prefix_uri = "videos/".to_string(); + let mut url = format!("{}/ajax/{}page-{}/", self.url, prefix_uri, page); + if page == 1 { + url = format!("{}{}", self.url, prefix_uri); + } + + let old_items = match cache.get(&url) { + Some((time, items)) => { + if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { + println!("Cache hit for URL: {}", url); + return Ok(items.clone()); + } + else{ + items.clone() + } + } + None => { + vec![] + } + }; + + + let client = match env::var("BURP_URL").as_deref() { + Ok(burp_url) => + reqwest::Client::builder() + .user_agent(USER_AGENT) + .proxy(Proxy::https(burp_url).unwrap()) + .danger_accept_invalid_certs(true) + .build()?, + Err(_) => reqwest::Client::builder() + .user_agent(USER_AGENT) + .danger_accept_invalid_certs(true) + .build()?, + }; + + let response = client.get(url.clone()).send().await?; + // print!("Response: {:?}\n", response); + if response.status().is_success() { + let text = response.text().await?; + let video_items: Vec = self.get_video_items_from_html(text.clone()); + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } else { + let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); + let flare = Flaresolverr::new(flare_url); + let result = flare + .solve(FlareSolverrRequest { + cmd: "request.get".to_string(), + url: url.clone(), + maxTimeout: 60000, + }) + .await; + let video_items = match result { + Ok(res) => { + // println!("FlareSolverr response: {}", res); + self.get_video_items_from_html(res.solution.response) + } + Err(e) => { + println!("Error solving FlareSolverr: {}", e); + return Err("Failed to solve FlareSolverr".into()); + } + }; + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else { + return Ok(old_items); + } + Ok(video_items) + } + } + async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result> { + + let mut url = format!( + "{}videos/{}?p={}", + self.url, query, page-1 + ); + if page == 1 { + url = format!("{}videos/{}", self.url, query); + } + + // Check our Video Cache. If the result is younger than 1 hour, we return it. + let old_items = match cache.get(&url) { + Some((time, items)) => { + if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { + return Ok(items.clone()); + } + else{ + let _ = cache.check().await; + return Ok(items.clone()) + } + } + None => { + vec![] + } + }; + let client = match env::var("BURP_URL").as_deref() { + Ok(burp_url) => + reqwest::Client::builder() + .user_agent(USER_AGENT) + .proxy(Proxy::https(burp_url).unwrap()) + .danger_accept_invalid_certs(true) + .build()?, + Err(_) => reqwest::Client::builder() + .user_agent(USER_AGENT) + .danger_accept_invalid_certs(true) + .build()?, + }; + + let response = client.get(url.clone()).send().await?; + if response.status().is_success() { + let text = response.text().await?; + let video_items: Vec = self.get_video_items_from_html(text.clone()); + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } else { + let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); + let flare = Flaresolverr::new(flare_url); + let result = flare + .solve(FlareSolverrRequest { + cmd: "request.get".to_string(), + url: url.clone(), + maxTimeout: 60000, + }) + .await; + let video_items = match result { + Ok(res) => { + self.get_video_items_from_html(res.solution.response) + } + Err(e) => { + println!("Error solving FlareSolverr: {}", e); + return Err("Failed to solve FlareSolverr".into()); + } + }; + if !video_items.is_empty() { + cache.remove(&url); + cache.insert(url.clone(), video_items.clone()); + } else{ + return Ok(old_items); + } + Ok(video_items) + } + } + + fn get_video_items_from_html(&self, html: String) -> Vec { + if html.is_empty() { + println!("HTML is empty"); + return vec![]; + } + let mut items: Vec = Vec::new(); + for vid in html.split("") + .collect::>()[1] + .split("") + .collect::>()[0] + .trim(); + let thumb = vid.split("data-src=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .trim(); + let url = vid.split("href=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .trim(); + let raw_dur = vid.split("") + .collect::>()[1] + .split("<") + .collect::>()[0] + .trim(); + let duration: u32 = match parse_time_to_seconds(raw_dur) { + Some(secs) => secs as u32, + None => 0, + }; + + let item = VideoItem::new( + url.to_string(), + title.to_string(), + url.to_string(), + "erothots".to_string(), + thumb.to_string(), + duration, + ); + items.push(item); + } + return items; + } + + // async fn get_video_items_from_html_query(&self, html: String, pool:DbPool) -> Vec { + // let raw_videos = html + // .split("video-item post") + // .collect::>()[1..] + // .to_vec(); + // let futures = raw_videos.into_iter().map(|el| self.get_video_item(el, pool.clone())); + // let results: Vec> = join_all(futures).await; + // let items: Vec = results + // .into_iter() + // .filter_map(Result::ok) + // .collect(); + + // return items; + // } +} + +impl Provider for FapelloProvider { + async fn get_videos( + &self, + cache: VideoCache, + pool: DbPool, + _channel: String, + sort: String, + query: Option, + page: String, + per_page: String, + featured: String, + ) -> Vec { + let _ = per_page; + let _ = sort; + let videos: std::result::Result, Error> = match query { + Some(q) => self.query(cache, page.parse::().unwrap_or(1), &q).await, + None => self.get(cache, page.parse::().unwrap_or(1)).await, + }; + match videos { + Ok(v) => v, + Err(e) => { + println!("Error fetching videos: {}", e); + vec![] + } + } + } +} diff --git a/src/providers/mod.rs b/src/providers/mod.rs index 1baa846..765609c 100644 --- a/src/providers/mod.rs +++ b/src/providers/mod.rs @@ -1,10 +1,11 @@ -use crate::{providers::{erothots::ErothotsProvider, hanime::HanimeProvider, perverzija::PerverzijaProvider, spankbang::SpankbangProvider}, util::cache::VideoCache, videos::VideoItem, DbPool}; +use crate::{providers::{erothots::ErothotsProvider, fapello::FapelloProvider, hanime::HanimeProvider, perverzija::PerverzijaProvider, spankbang::SpankbangProvider}, util::cache::VideoCache, videos::VideoItem, DbPool}; pub mod perverzija; pub mod hanime; pub mod spankbang; pub mod erothots; +pub mod fapello; pub trait Provider{ async fn get_videos(&self, cache: VideoCache, pool: DbPool, channel: String, sort: String, query: Option, page: String, per_page: String, featured: String) -> Vec; @@ -16,6 +17,7 @@ pub enum AnyProvider { Hanime(HanimeProvider), Spankbang(SpankbangProvider), Erothots(ErothotsProvider), + Fapello(FapelloProvider), // Uncomment if you want to include FapelloProvider } impl Provider for AnyProvider { async fn get_videos(&self, cache: VideoCache, pool:DbPool, channel: String, sort: String, query: Option, page: String, per_page: String, featured: String) -> Vec { @@ -24,6 +26,7 @@ impl Provider for AnyProvider { AnyProvider::Hanime(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, AnyProvider::Erothots(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, AnyProvider::Spankbang(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, + AnyProvider::Fapello(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await, } } }