use std::vec; use std::env; use error_chain::error_chain; use futures::future::join_all; use htmlentity::entity::{decode, ICodedDataTrait}; use reqwest::Client; use reqwest::{Proxy}; use crate::providers::Provider; use crate::util::cache::VideoCache; use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; use crate::videos::{self, VideoItem}; use crate::DbPool; use crate::USER_AGENT; // Make sure Provider trait is imported use std::collections::HashMap; error_chain! { foreign_links { Io(std::io::Error); HttpRequest(reqwest::Error); } } #[derive(Debug, Clone)] pub struct SpankbangProvider { url: String, } impl SpankbangProvider { pub fn new() -> Self { SpankbangProvider { url: "https://spankbang.com/".to_string(), } } async fn get(&self, cache:VideoCache, pool: DbPool, page: u8, sort: String) -> Result> { let mut url = format!("{}{}/{}/", self.url, sort, page); let old_items = match cache.get(&url) { Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { println!("Cache hit for URL: {}", url); return Ok(items.clone()); } else{ items.clone() } } None => { vec![] } }; let client = match env::var("BURP_URL").as_deref() { Ok(burp_url) => reqwest::Client::builder() .user_agent(USER_AGENT) .proxy(Proxy::https(burp_url).unwrap()) .danger_accept_invalid_certs(true) .build()?, Err(_) => reqwest::Client::builder() .user_agent(USER_AGENT) .danger_accept_invalid_certs(true) .build()?, }; let response = client.get(url.clone()).send().await?; println!("Response status: {}", response.status()); if response.status().is_success() { let text = response.text().await?; println!("Response text: {}", text); let video_items: Vec = self.get_video_items_from_html(text.clone(), &client).await; if !video_items.is_empty() { cache.remove(&url); cache.insert(url.clone(), video_items.clone()); } else{ return Ok(old_items); } Ok(video_items) } else { let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); let flare = Flaresolverr::new(flare_url); let result = flare .solve(FlareSolverrRequest { cmd: "request.get".to_string(), url: url.clone(), maxTimeout: 60000, }) .await; let video_items = match result { Ok(res) => { // println!("FlareSolverr response: {}", res); self.get_video_items_from_html(res.solution.response, &client).await } Err(e) => { println!("Error solving FlareSolverr: {}", e); return Err("Failed to solve FlareSolverr".into()); } }; if !video_items.is_empty() { cache.remove(&url); cache.insert(url.clone(), video_items.clone()); } else { return Ok(old_items); } Ok(video_items) } } async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result> { let url = format!("{}s/{}/{}/", self.url, query.replace(" ", "+"), page); let old_items = match cache.get(&url) { Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { println!("Cache hit for URL: {}", url); return Ok(items.clone()); } else{ items.clone() } } None => { vec![] } }; let client = match env::var("BURP_URL").as_deref() { Ok(burp_url) => reqwest::Client::builder() .user_agent(USER_AGENT) .proxy(Proxy::https(burp_url).unwrap()) .danger_accept_invalid_certs(true) .build()?, Err(_) => reqwest::Client::builder() .user_agent(USER_AGENT) .danger_accept_invalid_certs(true) .build()?, }; let response = client.get(url.clone()).send().await?; if let Some(cookies) = response.headers().get_all("set-cookie").iter().next() { for value in response.headers().get_all("set-cookie").iter() { let mut cookies_map = HashMap::new(); for value in response.headers().get_all("set-cookie").iter() { if let Ok(cookie_str) = value.to_str() { if let Some((k, v)) = cookie_str.split_once('=') { let key = k.trim(); let val = v.split(';').next().unwrap_or("").trim(); cookies_map.insert(key.to_string(), val.to_string()); } } } let cookies_string = cookies_map .iter() .map(|(k, v)| format!("{}={}", k, v)) .collect::>() .join("; "); } } if response.status().is_success() { let text = response.text().await?; let video_items: Vec = self.get_video_items_from_html(text.clone(), &client).await; if !video_items.is_empty() { cache.remove(&url); cache.insert(url.clone(), video_items.clone()); } else{ return Ok(old_items); } Ok(video_items) } else { let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set"); let flare = Flaresolverr::new(flare_url); let result = flare .solve(FlareSolverrRequest { cmd: "request.get".to_string(), url: url.clone(), maxTimeout: 60000, }) .await; let video_items = match result { Ok(res) => { // println!("FlareSolverr response: {}", res); self.get_video_items_from_html(res.solution.response, &client).await } Err(e) => { println!("Error solving FlareSolverr: {}", e); return Err("Failed to solve FlareSolverr".into()); } }; if !video_items.is_empty() { cache.remove(&url); cache.insert(url.clone(), video_items.clone()); } else { return Ok(old_items); } Ok(video_items) } } async fn get_video_url(&self, url:String, client:&Client) -> Result { let new_url = "".to_string(); let response = client.get(url.clone()).send().await?; if response.status().is_success() { let text = response.text().await?; let lines = text.split("\n").collect::>(); let url_line = lines.iter() .find(|s| s.trim_start().starts_with(">()[1].split("\"").collect::>()[0].to_string()) } Err(Error::from("Failed to get video URL")) } async fn parse_video_item( &self, html: String, client: &Client, ) -> Result { let vid = html.split("\n").collect::>(); // for (index,line) in vid.iter().enumerate(){ // println!("Line {}: {}\n\n", index, line); // } let title_line = vid.iter() .find(|s| s.trim_start().starts_with(">()[1].split("\"").collect::>()[0].to_string(); title = decode(title.as_bytes()).to_string().unwrap_or(title); let thumb = vid[13].split("data-src=\"").collect::>()[1].split("\"").collect::>()[0].to_string(); let mut preview = "".to_string(); if vid[15].contains("data-preview=\""){ preview = vid[15].split("data-preview=\"").collect::>()[1].split("\"").collect::>()[0].to_string(); } let duration_str = vid.iter().find(|s| s.contains("")).unwrap().split("").collect::>()[1].split("m<").collect::>()[0]; let duration: u32 = duration_str.parse::().unwrap_or(0) * 60; let view_and_rating_str: Vec<&str> = vid.iter().copied().filter(|s| s.contains("")).collect(); let views_str = view_and_rating_str[0].split(">").collect::>()[1].split("K<").collect::>()[0]; let views = (views_str.parse::().unwrap_or(0.0) * 1000.0) as u32; let rate_str = view_and_rating_str[1].split(">").collect::>()[1].split("%<").collect::>()[0]; let rating = rate_str.parse::().unwrap_or(0.0); let url_part = vid.iter().find(|s| s.contains(">()[1].split("\"").collect::>()[0]; let url = match self.get_video_url(self.url.clone() + url_part, client).await { Ok(video_url) => video_url, Err(e) => { return Err("Failed to get video URL".into()); } }; let id = url_part.split("/").collect::>()[0].to_string(); let quality_str = match vid[25].contains("<"){ true => vid[25].split(">").collect::>()[1].split("<").collect::>()[0], false => "SD", }; let quality = match quality_str{ "HD" => "1080", "4k" => "2160", "SD" => "720", _ => "1080", }; let mut format = videos::VideoFormat::new(url.clone(), quality.to_string(), "mp4".to_string()); format.add_http_header("User-Agent".to_string(), USER_AGENT.to_string()); let video_item = VideoItem::new(id, title, url.clone().to_string(), "spankbang".to_string(), thumb, duration) .views(views) .rating(rating) // .formats(vec![format]) .preview(preview); Ok(video_item) } async fn get_video_items_from_html(&self, html: String, client: &Client) -> Vec { if html.is_empty() { println!("HTML is empty"); return vec![]; } let mut items: Vec = Vec::new(); let split_html = html.split("class=\"video-list ").collect::>(); if split_html.len() < 2 { println!("Could not find video-list in HTML"); return items; } let video_listing_content = split_html[1]; let raw_videos_vec = video_listing_content .split("class=\"video-item\"") .collect::>(); if raw_videos_vec.len() < 2 { println!("Could not find video-item in HTML"); return items; } let raw_videos = raw_videos_vec[1..].to_vec(); let futures = raw_videos.into_iter().map(|el| self.parse_video_item(el.to_string(), client)); let results: Vec> = join_all(futures).await; let video_items: Vec = results .into_iter() .filter_map(Result::ok) .collect(); return video_items; } } impl Provider for SpankbangProvider { async fn get_videos( &self, cache: VideoCache, pool: DbPool, _channel: String, mut sort: String, query: Option, page: String, per_page: String, featured: String, ) -> Vec { let _ = per_page; let _ = featured; let _ = pool; if sort == "date"{ sort = "trending_videos".to_string(); } let videos: std::result::Result, Error> = match query { Some(q) => self.query(cache, page.parse::().unwrap_or(1), &q).await, None => self.get(cache, pool, page.parse::().unwrap_or(1), sort).await, }; match videos { Ok(v) => v, Err(e) => { println!("Error fetching videos: {}", e); vec![] } } } }