diff --git a/src/api.rs b/src/api.rs index b235798..e1bc4cb 100644 --- a/src/api.rs +++ b/src/api.rs @@ -345,6 +345,11 @@ async fn videos_post( let per_page_clone = perPage.to_string(); let featured_clone = featured.clone(); task::spawn_local(async move { + if let AnyProvider::Spankbang(_) = provider_clone { + // Spankbang has a delay for the next page + let delay = time::Duration::from_secs(60); + time::sleep(delay).await; + } let _ = provider_clone .get_videos( cache_clone, diff --git a/src/providers/spankbang.rs b/src/providers/spankbang.rs index 33ebfd1..b03542b 100644 --- a/src/providers/spankbang.rs +++ b/src/providers/spankbang.rs @@ -3,8 +3,10 @@ use std::env; use error_chain::error_chain; use futures::future::join_all; use htmlentity::entity::{decode, ICodedDataTrait}; +use ntex::channel::pool; use reqwest::Client; use reqwest::{Proxy}; +use crate::db; use crate::providers::Provider; use crate::util::cache::VideoCache; use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; @@ -64,11 +66,30 @@ impl SpankbangProvider { }; let response = client.get(url.clone()).send().await?; - println!("Response status: {}", response.status()); + let mut cookies_string = String::new(); + if let Some(cookies) = response.headers().get_all("set-cookie").iter().next() { + for value in response.headers().get_all("set-cookie").iter() { + let mut cookies_map = HashMap::new(); + for value in response.headers().get_all("set-cookie").iter() { + if let Ok(cookie_str) = value.to_str() { + if let Some((k, v)) = cookie_str.split_once('=') { + let key = k.trim(); + let val = v.split(';').next().unwrap_or("").trim(); + cookies_map.insert(key.to_string(), val.to_string()); + } + } + } + cookies_string = cookies_map + .iter() + .map(|(k, v)| format!("{}={}", k, v)) + .collect::>() + .join("; "); + } + } if response.status().is_success() { let text = response.text().await?; println!("Response text: {}", text); - let video_items: Vec = self.get_video_items_from_html(text.clone(), &client).await; + let video_items: Vec = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await; if !video_items.is_empty() { @@ -91,7 +112,7 @@ impl SpankbangProvider { let video_items = match result { Ok(res) => { // println!("FlareSolverr response: {}", res); - self.get_video_items_from_html(res.solution.response, &client).await + self.get_video_items_from_html(res.solution.response, &client,String::new(), pool.clone()).await } Err(e) => { println!("Error solving FlareSolverr: {}", e); @@ -108,7 +129,7 @@ impl SpankbangProvider { } } - async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result> { + async fn query(&self, cache: VideoCache, pool: DbPool, page: u8, query: &str) -> Result> { let url = format!("{}s/{}/{}/", self.url, query.replace(" ", "+"), page); let old_items = match cache.get(&url) { @@ -141,6 +162,7 @@ impl SpankbangProvider { }; let response = client.get(url.clone()).send().await?; + let mut cookies_string = String::new(); if let Some(cookies) = response.headers().get_all("set-cookie").iter().next() { for value in response.headers().get_all("set-cookie").iter() { let mut cookies_map = HashMap::new(); @@ -153,7 +175,7 @@ impl SpankbangProvider { } } } - let cookies_string = cookies_map + cookies_string = cookies_map .iter() .map(|(k, v)| format!("{}={}", k, v)) .collect::>() @@ -162,7 +184,7 @@ impl SpankbangProvider { } if response.status().is_success() { let text = response.text().await?; - let video_items: Vec = self.get_video_items_from_html(text.clone(), &client).await; + let video_items: Vec = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await; if !video_items.is_empty() { cache.remove(&url); cache.insert(url.clone(), video_items.clone()); @@ -183,7 +205,7 @@ impl SpankbangProvider { let video_items = match result { Ok(res) => { // println!("FlareSolverr response: {}", res); - self.get_video_items_from_html(res.solution.response, &client).await + self.get_video_items_from_html(res.solution.response, &client, String::new(), pool.clone()).await } Err(e) => { println!("Error solving FlareSolverr: {}", e); @@ -200,16 +222,33 @@ impl SpankbangProvider { } } - async fn get_video_url(&self, url:String, client:&Client) -> Result { - let new_url = "".to_string(); - let response = client.get(url.clone()).send().await?; + async fn get_video_url(&self, url:String, client:&Client, cookies: String, pool: DbPool) -> Result { + + let mut conn = pool.get().expect("couldn't get db connection from pool"); + let db_result = db::get_video(&mut conn,url.clone()); + drop(conn); + match db_result { + Ok(Some(video_url)) => { + return Ok(video_url); + } + Ok(None) => (), + Err(e) => { + println!("Error fetching video from database: {}", e); + // return Err(format!("Error fetching video from database: {}", e).into()); + } + } + let response = client.get(url.clone()).header("Cookie", cookies).send().await?; if response.status().is_success() { let text = response.text().await?; let lines = text.split("\n").collect::>(); let url_line = lines.iter() .find(|s| s.trim_start().starts_with(">()[1].split("\"").collect::>()[0].to_string()) + let new_url = url_line.split("src=\"").collect::>()[1].split("\"").collect::>()[0].to_string(); + let mut conn = pool.get().expect("couldn't get db connection from pool"); + let _ = db::insert_video(&mut conn, &url, &new_url); + drop(conn); + return Ok(new_url) } Err(Error::from("Failed to get video URL")) } @@ -218,6 +257,8 @@ impl SpankbangProvider { &self, html: String, client: &Client, + cookies: String, + pool: DbPool ) -> Result { let vid = html.split("\n").collect::>(); // for (index,line) in vid.iter().enumerate(){ @@ -241,7 +282,7 @@ impl SpankbangProvider { let rate_str = view_and_rating_str[1].split(">").collect::>()[1].split("%<").collect::>()[0]; let rating = rate_str.parse::().unwrap_or(0.0); let url_part = vid.iter().find(|s| s.contains(">()[1].split("\"").collect::>()[0]; - let url = match self.get_video_url(self.url.clone() + url_part, client).await { + let url = match self.get_video_url(self.url.clone() + url_part, client, cookies, pool).await { Ok(video_url) => video_url, Err(e) => { return Err("Failed to get video URL".into()); @@ -259,11 +300,6 @@ impl SpankbangProvider { "SD" => "720", _ => "1080", }; - - - let mut format = - videos::VideoFormat::new(url.clone(), quality.to_string(), "mp4".to_string()); - format.add_http_header("User-Agent".to_string(), USER_AGENT.to_string()); let video_item = VideoItem::new(id, title, url.clone().to_string(), "spankbang".to_string(), thumb, duration) .views(views) @@ -273,7 +309,7 @@ impl SpankbangProvider { Ok(video_item) } - async fn get_video_items_from_html(&self, html: String, client: &Client) -> Vec { + async fn get_video_items_from_html(&self, html: String, client: &Client, cookies:String, pool: DbPool) -> Vec { if html.is_empty() { println!("HTML is empty"); return vec![]; @@ -293,7 +329,7 @@ impl SpankbangProvider { return items; } let raw_videos = raw_videos_vec[1..].to_vec(); - let futures = raw_videos.into_iter().map(|el| self.parse_video_item(el.to_string(), client)); + let futures = raw_videos.into_iter().map(|el| self.parse_video_item(el.to_string(), client, cookies.clone(), pool.clone())); let results: Vec> = join_all(futures).await; let video_items: Vec = results .into_iter() @@ -324,7 +360,7 @@ impl Provider for SpankbangProvider { sort = "trending_videos".to_string(); } let videos: std::result::Result, Error> = match query { - Some(q) => self.query(cache, page.parse::().unwrap_or(1), &q).await, + Some(q) => self.query(cache, pool, page.parse::().unwrap_or(1), &q).await, None => self.get(cache, pool, page.parse::().unwrap_or(1), sort).await, }; match videos {