organized removed providers

This commit is contained in:
Simon
2025-08-31 17:51:53 +00:00
parent 7d514895cd
commit 9caec79427
5 changed files with 23 additions and 23 deletions

282
archive/hentaimoon.rs Normal file
View File

@@ -0,0 +1,282 @@
use crate::util::parse_abbreviated_number;
use crate::DbPool;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::env;
use std::vec;
use wreq::{Client, Proxy};
use wreq_util::Emulation;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct HentaimoonProvider {
url: String,
}
impl HentaimoonProvider {
pub fn new() -> Self {
HentaimoonProvider {
url: "https://hentai-moon.com".to_string(),
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: &str,
) -> Result<Vec<VideoItem>> {
let sort_string = match sort {
"popular" => "/most-popular",
"top-rated" => "/top-rated",
_ => "/latest-updates/",
};
let list_str = match sort {
"popular" => "list_videos_common_videos_list",
"top-rated" => "list_videos_common_videos_list",
_ => "list_videos_most_recent_videos",
};
let video_url = format!("{}{}?mode=async^&function=get_block^&block_id={}^&from={}", self.url, sort_string, list_str, page);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let proxy = Proxy::all("http://192.168.0.103:8081").unwrap();
let client = Client::builder().cert_verification(false).emulation(Emulation::Firefox136).build()?;
let mut response = client.get(video_url.clone())
// .proxy(proxy.clone())
.send().await?;
if response.status().is_redirection(){
response = client.get(response.headers()["Location"].to_str().unwrap())
// .proxy(proxy.clone())
.send().await?;
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: video_url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response)
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
) -> Result<Vec<VideoItem>> {
let search_string = query.to_lowercase().trim().replace(" ", "-");
let video_url = format!("{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&q={}&category_ids=&sort_by=&from_videos={}&from_albums={}&", self.url, search_string, search_string, page, page);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let proxy = Proxy::all("http://192.168.0.103:8081").unwrap();
let client = Client::builder().cert_verification(false).emulation(Emulation::Firefox136).build()?;
let mut response = client.get(video_url.clone())
// .proxy(proxy.clone())
.send().await?;
if response.status().is_redirection(){
response = client.get(self.url.clone() + response.headers()["Location"].to_str().unwrap())
// .proxy(proxy.clone())
.send().await?;
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: video_url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => self.get_video_items_from_html(res.solution.response),
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<VideoItem> = Vec::new();
let raw_videos = html.split("<div class=\"pagination\"").collect::<Vec<&str>>()[0]
.split("<div class=\"item \">")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let video_url: String = video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0].to_string();
let mut title = video_segment.split("\" title=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
// html decode
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_url.split("/").collect::<Vec<&str>>()[4].to_string();
let raw_duration = video_segment.split("<div class=\"duration\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let thumb = video_segment.split("<img class=\"thumb ").collect::<Vec<&str>>()[1]
.split("data-original=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let views_part = video_segment.split("<div class=\"views\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
let views = parse_abbreviated_number(&views_part).unwrap_or(0) as u32;
let video_item = VideoItem::new(
id,
title,
video_url.to_string(),
"hentaimoon".to_string(),
thumb,
duration,
)
.views(views)
;
items.push(video_item);
}
return items;
}
}
impl Provider for HentaimoonProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = options;
let _ = per_page;
let _ = pool;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q,)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort)
.await
}
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

197
archive/noodlemagazine.rs Normal file
View File

@@ -0,0 +1,197 @@
use crate::util::parse_abbreviated_number;
use crate::DbPool;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::env;
use std::vec;
use wreq::{Client, Proxy};
use wreq_util::Emulation;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct NoodlemagazineProvider {
url: String,
}
impl NoodlemagazineProvider {
pub fn new() -> Self {
NoodlemagazineProvider {
url: "https://noodlemagazine.com".to_string(),
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = format!("{}/popular/recent?p={}", self.url, page-1);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.replace(" ", "%20");
let video_url = format!("{}/video/{}?p={}", self.url, search_string, page);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<VideoItem> = Vec::new();
let raw_videos = html.split("- Made with <svg ").collect::<Vec<&str>>()[0]
.split("<div class=\"item\">")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let video_url: String = format!("{}{}",self.url, video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0].to_string());
let mut title = video_segment.split("\"title\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
// html decode
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_url.split("/").collect::<Vec<&str>>()[4].to_string();
let raw_duration = video_segment.split("#clock-o").collect::<Vec<&str>>()[1]
.split("</svg>").collect::<Vec<&str>>()[1]
.split("<").collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let thumb = video_segment.split("<img ").collect::<Vec<&str>>()[1]
.split("data-src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let views_part = video_segment.split("#eye").collect::<Vec<&str>>()[1]
.split("</svg>").collect::<Vec<&str>>()[1]
.split("<").collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let views = parse_abbreviated_number(&views_part).unwrap_or(0) as u32;
let video_item = VideoItem::new(
id,
title,
video_url.to_string(),
"noodlemagazine".to_string(),
thumb,
duration,
)
.views(views)
;
items.push(video_item);
}
return items;
}
}
impl Provider for NoodlemagazineProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = per_page;
let _ = pool;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q,options)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), options)
.await
}
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

380
archive/spankbang.rs Normal file
View File

@@ -0,0 +1,380 @@
use std::vec;
use std::env;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{decode, ICodedDataTrait};
use crate::db;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::videos::ServerOptions;
use crate::videos::{VideoItem};
use crate::DbPool;
use std::collections::HashMap;
use wreq::Client;
use wreq_util::Emulation;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct SpankbangProvider {
url: String,
}
impl SpankbangProvider {
pub fn new() -> Self {
SpankbangProvider {
url: "https://spankbang.com/".to_string()
}
}
async fn get(&self, cache:VideoCache, pool: DbPool, page: u8, sort: String) -> Result<Vec<VideoItem>> {
let url = format!("{}{}/{}/", self.url, sort, page);
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = Client::builder()
.emulation(Emulation::Firefox136)
.cert_verification(false)
.build()?;
let response = client.get(url.clone()).send().await?;
let mut cookies_string = String::new();
if let Some(_) = response.headers().get_all("set-cookie").iter().next() {
for _ in response.headers().get_all("set-cookie").iter() {
let mut cookies_map = HashMap::new();
for value in response.headers().get_all("set-cookie").iter() {
if let Ok(cookie_str) = value.to_str() {
if let Some((k, v)) = cookie_str.split_once('=') {
let key = k.trim();
let val = v.split(';').next().unwrap_or("").trim();
cookies_map.insert(key.to_string(), val.to_string());
}
}
}
cookies_string = cookies_map
.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join("; ");
}
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await;
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response, &client,String::new(), pool.clone()).await
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
async fn query(&self, cache: VideoCache, pool: DbPool, page: u8, query: &str) -> Result<Vec<VideoItem>> {
let url = format!("{}s/{}/{}/", self.url, query.replace(" ", "+"), page);
let old_items = match cache.get(&url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
}
else{
items.clone()
}
}
None => {
vec![]
}
};
let client = Client::builder()
.emulation(Emulation::Firefox136)
.cert_verification(false)
.build()?;
let response = client.get(url.clone()).send().await?;
let mut cookies_string = String::new();
if let Some(_) = response.headers().get_all("set-cookie").iter().next() {
for _ in response.headers().get_all("set-cookie").iter() {
let mut cookies_map = HashMap::new();
for value in response.headers().get_all("set-cookie").iter() {
if let Ok(cookie_str) = value.to_str() {
if let Some((k, v)) = cookie_str.split_once('=') {
let key = k.trim();
let val = v.split(';').next().unwrap_or("").trim();
cookies_map.insert(key.to_string(), val.to_string());
}
}
}
cookies_string = cookies_map
.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<String>>()
.join("; ");
}
}
if response.status().is_success() {
let text = response.text().await?;
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), &client, cookies_string, pool.clone()).await;
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else{
return Ok(old_items);
}
Ok(video_items)
} else {
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
let flare = Flaresolverr::new(flare_url);
let result = flare
.solve(FlareSolverrRequest {
cmd: "request.get".to_string(),
url: url.clone(),
maxTimeout: 60000,
})
.await;
let video_items = match result {
Ok(res) => {
// println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response, &client, String::new(), pool.clone()).await
}
Err(e) => {
println!("Error solving FlareSolverr: {}", e);
return Err("Failed to solve FlareSolverr".into());
}
};
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
}
async fn get_video_url(&self, url:String, client:&Client, cookies: String, pool: DbPool) -> Result<String> {
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,url.clone());
drop(conn);
match db_result {
Ok(Some(video_url)) => {
return Ok(video_url);
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
let response = client.get(url.clone()).header("Cookie", cookies.clone()).send().await?;
let mut response = response;
while response.status().as_u16() == 429 {
// println!("Received 429 Too Many Requests. Waiting 10 seconds before retrying...");
ntex::time::sleep(ntex::time::Seconds(60)).await;
response = client.get(url.clone()).header("Cookie", cookies.clone()).send().await?;
}
if response.status().is_success() {
let text = response.text().await?;
let lines = text.split("\n").collect::<Vec<&str>>();
let url_line = lines.iter()
.find(|s| s.trim_start().starts_with("<source src=") && s.contains("type=\"video/mp4\""))
.unwrap_or(&"");
let new_url = url_line.split("src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &url, &new_url);
drop(conn);
return Ok(new_url)
}
Err(Error::from("Failed to get video URL"))
}
async fn parse_video_item(
&self,
mut html: String,
client: &Client,
cookies: String,
pool: DbPool
) -> Result<VideoItem> {
if html.contains("<!-- Video list block -->") {
html = html.split("<!-- Video list block -->").collect::<Vec<&str>>()[0].to_string();
}
let vid = html.split("\n").collect::<Vec<&str>>();
if vid.len() > 200 {
return Err("Video item has too many lines".into());
}
// for (index ,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let title_line = vid.iter()
.find(|s| s.trim_start().starts_with("<a href=") && s.contains("title="))
.unwrap_or(&"");
let mut title = title_line.split("title=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let thumb_line = vid.iter()
.find(|s| s.trim_start().starts_with("data-src=") && s.contains(".jpg\""))
.unwrap_or(&"");
let thumb = thumb_line.split("data-src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// let preview_line = vid.iter()
// .find(|s: &&&str| s.trim_start().starts_with("<source data-src=") && s.contains("mp4"))
// .unwrap_or(&"");
// let mut preview = "".to_string();
// if vid[15].contains("data-preview=\""){
// preview = vid[15].split("data-preview=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// }
// else{
// preview = preview_line.split("data-src=\"").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0].to_string();
// }
let duration_str = vid[64].split("m").collect::<Vec<&str>>()[0];
let duration: u32 = duration_str.parse::<u32>().unwrap_or(0) * 60;
// let view_and_rating_str: Vec<&str> = vid.iter().copied().filter(|s| s.contains("<span class=\"md:text-body-md\">")).collect();
// let views_str = view_and_rating_str[0].split(">").collect::<Vec<&str>>()[1].split("K<").collect::<Vec<&str>>()[0];
// let views = (views_str.parse::<f32>().unwrap_or(0.0) * 1000.0) as u32;
// let rate_str = view_and_rating_str[1].split(">").collect::<Vec<&str>>()[1].split("%<").collect::<Vec<&str>>()[0];
// let rating = rate_str.parse::<f32>().unwrap_or(0.0);
let url_part = vid.iter().find(|s| s.contains("<a href=\"/")).unwrap().split("<a href=\"/").collect::<Vec<&str>>()[1].split("\"").collect::<Vec<&str>>()[0];
let url = match self.get_video_url(self.url.clone() + url_part, client, cookies, pool).await {
Ok(video_url) => video_url,
Err(e) => {
print!("Error fetching video URL: {}", e);
return Err("Failed to get video URL".into());
}
};
let id = url_part.split("/").collect::<Vec<&str>>()[0].to_string();
// let quality_str = match vid[25].contains("<"){
// true => vid[25].split(">").collect::<Vec<&str>>()[1].split("<").collect::<Vec<&str>>()[0],
// false => "SD",
// };
// let quality = match quality_str{
// "HD" => "1080",
// "4k" => "2160",
// "SD" => "720",
// _ => "1080",
// };
let video_item = VideoItem::new(id, title, url.clone().to_string(), "spankbang".to_string(), thumb, duration)
// .views(views)
// .rating(rating)
// .formats(vec![format])
// .preview(preview)
;
Ok(video_item)
}
async fn get_video_items_from_html(&self, html: String, client: &Client, cookies:String, pool: DbPool) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let items: Vec<VideoItem> = Vec::new();
let split_html = html.split("\"video-list").collect::<Vec<&str>>();
if split_html.len() < 2 {
println!("Could not find video-list in HTML");
return items;
}
let video_listing_content = format!("{}{}", split_html[1], split_html.get(2).unwrap_or(&""));
let raw_videos_vec = video_listing_content
.split("data-testid=\"video-item\"")
.collect::<Vec<&str>>();
if raw_videos_vec.len() < 2 {
println!("Could not find video-item in HTML");
return items;
}
let raw_videos = raw_videos_vec[1..].to_vec();
let futures = raw_videos.into_iter().map(|el| self.parse_video_item(el.to_string(), client, cookies.clone(), pool.clone()));
let results: Vec<Result<VideoItem>> = join_all(futures).await;
let video_items: Vec<VideoItem> = results
.into_iter()
.filter_map(Result::ok)
.collect();
return video_items;
}
}
impl Provider for SpankbangProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
mut sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = options;
let _ = per_page;
let _ = pool;
if sort == "date"{
sort = "trending_videos".to_string();
}
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q).await,
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), sort).await,
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}