use crate::DbPool; use crate::api::ClientVersion; use crate::providers::Provider; use crate::status::*; use crate::util::cache::VideoCache; use crate::util::requester::Requester; use crate::util::time::parse_time_to_seconds; use crate::videos::{ServerOptions, VideoFormat, VideoItem}; use async_trait::async_trait; use error_chain::error_chain; use futures::future::join_all; use htmlentity::entity::{ICodedDataTrait, decode}; use std::sync::{Arc, RwLock}; use std::{thread, vec}; use titlecase::Titlecase; error_chain! { foreign_links { Io(std::io::Error); HttpRequest(wreq::Error); } } #[derive(Debug, Clone)] pub struct HqpornerProvider { url: String, stars: Arc>>, categories: Arc>>, } impl HqpornerProvider { pub fn new() -> Self { let provider = HqpornerProvider { url: "https://hqporner.com".to_string(), stars: Arc::new(RwLock::new(vec![])), categories: Arc::new(RwLock::new(vec![])), }; provider.spawn_initial_load(); provider } fn spawn_initial_load(&self) { let url = self.url.clone(); let stars = Arc::clone(&self.stars); let categories = Arc::clone(&self.categories); thread::spawn(move || { // Create a tiny runtime just for these async tasks let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .expect("build tokio runtime"); rt.block_on(async move { if let Err(e) = Self::load_stars(&url, stars).await { eprintln!("load_stars failed: {e}"); } if let Err(e) = Self::load_categories(&url, categories).await { eprintln!("load_categories failed: {e}"); } }); }); } async fn load_stars(base_url: &str, stars: Arc>>) -> Result<()> { let mut requester = Requester::new(); let text = requester .get(format!("{}/girls", &base_url).as_str(), None) .await .unwrap(); let stars_div = text .split("Girls") .collect::>().last().unwrap() .split("") .collect::>()[0]; for stars_element in stars_div.split("
  • >()[1..].to_vec() { let star_id = stars_element.split("href=\"/actress/").collect::>()[1] .split("\"") .collect::>()[0] .to_string(); let star_name = stars_element.split(">()[1] .split(">").collect::>()[1] .split("<") .collect::>()[0] .to_string(); Self::push_unique( &stars, FilterOption { id: star_id, title: star_name, }, ); } return Ok(()); } async fn load_categories(base_url: &str, categories: Arc>>) -> Result<()> { let mut requester = Requester::new(); let text = requester .get(format!("{}/categories", &base_url).as_str(), None) .await .unwrap(); let categories_div = text .split("Categories") .collect::>().last().unwrap() .split("") .collect::>()[0]; for categories_element in categories_div.split("
  • >()[1..].to_vec() { let category_id = categories_element.split("href=\"/category/").collect::>()[1] .split("\"") .collect::>()[0] .to_string(); let category_name = categories_element.split(">()[1] .split(">").collect::>()[1] .split("<") .collect::>()[0] .titlecase(); Self::push_unique( &categories, FilterOption { id: category_id, title: category_name, }, ); } return Ok(()); } fn build_channel(&self, clientversion: ClientVersion) -> Channel { let _ = clientversion; Channel { id: "hqporner".to_string(), name: "HQPorner".to_string(), description: "HD Porn Videos Tube".to_string(), premium: false, favicon: "https://www.google.com/s2/favicons?sz=64&domain=hqporner.com".to_string(), status: "active".to_string(), categories: self.categories.read().unwrap().iter().map(|c| c.title.clone()).collect(), options: vec![], nsfw: true, cacheDuration: None, } } // Push one item with minimal lock time and dedup by id fn push_unique(target: &Arc>>, item: FilterOption) { if let Ok(mut vec) = target.write() { if !vec.iter().any(|x| x.id == item.id) { vec.push(item); // Optional: keep it sorted for nicer UX // vec.sort_by(|a,b| a.title.cmp(&b.title)); } } } async fn get( &self, cache: VideoCache, page: u8, sort: &str, options: ServerOptions, ) -> Result> { let _ = sort; let video_url = format!("{}/hdporn/{}", self.url, page); let old_items = match cache.get(&video_url) { Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { return Ok(items.clone()); } else { items.clone() } } None => { vec![] } }; let mut requester = options.requester.clone().unwrap(); let text = requester.get(&video_url, None).await.unwrap(); let video_items: Vec = self .get_video_items_from_html(text.clone(), &mut requester) .await; if !video_items.is_empty() { cache.remove(&video_url); cache.insert(video_url.clone(), video_items.clone()); } else { return Ok(old_items); } Ok(video_items) } async fn query( &self, cache: VideoCache, page: u8, query: &str, options: ServerOptions, ) -> Result> { let search_string = query.trim().to_string(); let mut video_url = format!("{}/?q={}&p={}", self.url, search_string, page); if let Some(star) = self.stars.read().unwrap().iter().find(|s| s.title.to_ascii_lowercase() == search_string.to_ascii_lowercase()) { video_url = format!("{}/actress/{}/{}", self.url, star.id, page); } if let Some(cat) = self.categories.read().unwrap().iter().find(|c| c.title.to_ascii_lowercase() == search_string.to_ascii_lowercase()) { video_url = format!("{}/category/{}/{}", self.url, cat.id, page); } // Check our Video Cache. If the result is younger than 1 hour, we return it. let old_items = match cache.get(&video_url) { Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { return Ok(items.clone()); } else { let _ = cache.check().await; return Ok(items.clone()); } } None => { vec![] } }; let mut requester = options.requester.clone().unwrap(); let text = requester.get(&video_url, None).await.unwrap(); let video_items: Vec = self .get_video_items_from_html(text.clone(), &mut requester) .await; if !video_items.is_empty() { cache.remove(&video_url); cache.insert(video_url.clone(), video_items.clone()); } else { return Ok(old_items); } Ok(video_items) } async fn get_video_items_from_html( &self, html: String, requester: &mut Requester, ) -> Vec { if html.is_empty() || html.contains("404 Not Found") { return vec![]; } let raw_videos = html.split("id=\"footer\"").collect::>()[0] .split("
    ") .collect::>()[2] .split("
    ") .collect::>()[1..] .to_vec(); let futures = raw_videos .into_iter() .map(|el| self.get_video_item(el.to_string(), requester.clone())); let results: Vec> = join_all(futures).await; let video_items: Vec = results.into_iter().filter_map(Result::ok).collect(); return video_items; } async fn get_video_item( &self, video_segment: String, mut requester: Requester, ) -> Result { let video_url: String = format!( "{}{}", self.url, video_segment.split(">()[1] .split("\"") .collect::>()[0] .to_string() ); let mut title = video_segment .split("
  • ") .collect::>()[0] .split("href=\"/actress/") .collect::>()[1..] .to_vec(); for star_el in stars_elements { let star_id = star_el.split("\"").collect::>()[0].to_string(); let star_name = star_el.split("\">").collect::>()[1] .split("<") .collect::>()[0] .to_string(); tags.push(star_name.clone()); Self::push_unique(&self.stars, FilterOption { id: star_id, title: star_name.clone(), }); } let categories_elements = text.split("This video belongs to the following categories").collect::>()[1] .split("

    ") .collect::>()[0] .split("href=\"/category/") .collect::>()[1..] .to_vec(); for categories_el in categories_elements { let category_id = categories_el.split("\"").collect::>()[0].to_string(); let category_name = categories_el.split("\">").collect::>()[1] .split("<") .collect::>()[0].titlecase(); tags.push(category_name.clone()); Self::push_unique(&self.categories, FilterOption { id: category_id, title: category_name.clone(), }); } let video_url = format!( "https:{}", text.split("url: '/blocks/altplayer.php?i=") .collect::>()[1] .split("'") .collect::>()[0] ); let text2 = requester .get_raw_with_headers( &video_url, vec![("Referer".to_string(), "https://hqporner.com/".to_string())], ) .await .unwrap() .text() .await .unwrap(); match text2.split("