database support

This commit is contained in:
Simon
2025-06-05 18:50:28 +00:00
parent 6d08362937
commit 175c9b748f
15 changed files with 333 additions and 140 deletions

1
.env Normal file
View File

@@ -0,0 +1 @@
DATABASE_URL=hottub.db

2
.gitignore vendored
View File

@@ -21,3 +21,5 @@ Cargo.lock
# and can be added to the global gitignore or merged into this file. For a more nuclear # and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/ #.idea/
hottub.db
migrations/.keep

View File

@@ -6,6 +6,8 @@ edition = "2021"
[dependencies] [dependencies]
async-trait = "0.1.88" async-trait = "0.1.88"
awc = "3.7.0" awc = "3.7.0"
diesel = { version = "2.2.10", features = ["sqlite", "r2d2"] }
dotenvy = "0.15.7"
env_logger = "0.11.8" env_logger = "0.11.8"
error-chain = "0.12.4" error-chain = "0.12.4"
futures = "0.3.31" futures = "0.3.31"

View File

@@ -26,6 +26,7 @@ RUN apt install -yq libssl-dev \
fluxbox \ fluxbox \
xvfb \ xvfb \
gnome-screenshot \ gnome-screenshot \
libsqlite3-dev sqlite3 \
&& apt-get clean && apt-get clean

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

View File

@@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
DROP TABLE videos

View File

@@ -0,0 +1,8 @@
-- Your SQL goes here
CREATE TABLE videos (
id TEXT NOT NULL PRIMARY KEY, -- like url parts to uniquely identify a video
url TEXT NOT NULL,
views INTEGER,
rating INTEGER,
uploader TEXT
)

View File

@@ -6,7 +6,7 @@ use ntex::web::HttpRequest;
use crate::providers::hanime::HanimeProvider; use crate::providers::hanime::HanimeProvider;
use crate::providers::perverzija::PerverzijaProvider; use crate::providers::perverzija::PerverzijaProvider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::{providers::*, status::*, videos::*}; use crate::{providers::*, status::*, videos::*, DbPool};
pub fn config(cfg: &mut web::ServiceConfig) { pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
@@ -195,7 +195,8 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
async fn videos_post( async fn videos_post(
video_request: web::types::Json<Videos_Request>, video_request: web::types::Json<Videos_Request>,
cache: web::types::State<VideoCache> cache: web::types::State<VideoCache>,
pool: web::types::State<DbPool>,
) -> Result<impl web::Responder, web::Error> { ) -> Result<impl web::Responder, web::Error> {
let mut videos = Videos { let mut videos = Videos {
pageInfo: PageInfo { pageInfo: PageInfo {
@@ -232,7 +233,7 @@ async fn videos_post(
let provider = get_provider(channel.as_str()) let provider = get_provider(channel.as_str())
.ok_or_else(|| web::error::ErrorBadRequest("Invalid channel".to_string()))?; .ok_or_else(|| web::error::ErrorBadRequest("Invalid channel".to_string()))?;
let video_items = provider let video_items = provider
.get_videos(cache.get_ref().clone(), channel, sort, query, page.to_string(), perPage.to_string(), featured) .get_videos(cache.get_ref().clone(), pool.get_ref().clone(), channel, sort, query, page.to_string(), perPage.to_string(), featured)
.await; .await;
videos.items = video_items.clone(); videos.items = video_items.clone();
Ok(web::HttpResponse::Ok().json(&videos)) Ok(web::HttpResponse::Ok().json(&videos))

27
src/db.rs Normal file
View File

@@ -0,0 +1,27 @@
use diesel::prelude::*;
use dotenvy::dotenv;
use std::{env, sync::{Arc, Mutex}};
use crate::models::DBVideo;
pub fn get_video(conn: &mut SqliteConnection, video_id: String) -> Result<Option<String>, diesel::result::Error> {
use crate::schema::videos::dsl::*;
let result = videos
.filter(id.eq(video_id))
.first::<DBVideo>(conn)
.optional()?;
match result{
Some(video) => Ok(Some(video.url)),
None => Ok(None),
}
}
pub fn insert_video(conn: &mut SqliteConnection, new_id: &str, new_url: &str) -> Result<usize, diesel::result::Error> {
use crate::schema::videos::dsl::*;
diesel::insert_into(videos).values(DBVideo{
id: new_id.to_string(),
url: new_url.to_string(),
}).execute(conn)
}

View File

@@ -1,26 +1,53 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
#[macro_use]
extern crate diesel;
use diesel::{r2d2::{self, ConnectionManager}, SqliteConnection};
use dotenvy::dotenv;
use ntex_files as fs; use ntex_files as fs;
use ntex::web; use ntex::web;
mod api;
mod status;
mod videos;
mod providers;
mod util;
mod api;
mod db;
mod models;
mod providers;
mod schema;
mod status;
mod util;
mod videos;
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
#[ntex::main] #[ntex::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_BACKTRACE", "1"); std::env::set_var("RUST_BACKTRACE", "1");
env_logger::init(); // You need this to actually see logs env_logger::init(); // You need this to actually see logs
dotenv().ok();
// set up database connection pool
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
let pool = r2d2::Pool::builder()
.build(manager)
.expect("Failed to create pool.");
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new(); let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
web::HttpServer::new(move || { web::HttpServer::new(move || {
web::App::new() web::App::new()
.state(pool.clone())
.state(cache.clone()) .state(cache.clone())
.wrap(web::middleware::Logger::default()) .wrap(web::middleware::Logger::default())
.service(web::scope("/api").configure(api::config)) .service(web::scope("/api").configure(api::config))
.service(fs::Files::new("/", "static")) .service(
web::resource("/")
.route(web::get().to(|| async {
web::HttpResponse::Found()
.header("Location", "hottub://source?url=hottub.spacemoehre.de")
.finish()
}))
)
.service(fs::Files::new("/", "static").index_file("index.html"))
}) })
// .bind_openssl(("0.0.0.0", 18080), builder)? // .bind_openssl(("0.0.0.0", 18080), builder)?
.bind(("0.0.0.0", 18080))? .bind(("0.0.0.0", 18080))?

10
src/models.rs Normal file
View File

@@ -0,0 +1,10 @@
use diesel::prelude::*;
use serde::{Serialize};
#[derive(Debug, Clone, Serialize, Queryable, Insertable)]
#[diesel(table_name = crate::schema::videos)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct DBVideo {
pub id: String,
pub url: String,
}

View File

@@ -6,11 +6,13 @@ use htmlentity::entity::{decode, ICodedDataTrait};
use reqwest::{Proxy}; use reqwest::{Proxy};
use futures::future::join_all; use futures::future::join_all;
use crate::db;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, Video_Embed, Video_Item}; // Make sure Provider trait is imported use crate::videos::{self, Video_Embed, Video_Item};
use crate::DbPool; // Make sure Provider trait is imported
error_chain! { error_chain! {
foreign_links { foreign_links {
@@ -125,12 +127,30 @@ impl HanimeProvider {
} }
} }
async fn get_video_item(&self, hit: HanimeSearchResult) -> Result<(u64,Video_Item)> { async fn get_video_item(&self, hit: HanimeSearchResult, pool: DbPool) -> Result<Video_Item> {
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug.clone()));
drop(conn);
let id = hit.id.to_string(); let id = hit.id.to_string();
let title = hit.name; let title = hit.name;
let thumb = hit.poster_url; let thumb = hit.poster_url;
let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds let duration = (hit.duration_in_ms / 1000) as u32; // Convert ms to seconds
let channel = "hanime".to_string(); // Placeholder, adjust as needed let channel = "hanime".to_string(); // Placeholder, adjust as needed
match db_result {
Ok(Some(video_url)) => {
return Ok(Video_Item::new(id, title, video_url.clone(), channel, thumb, duration)
.tags(hit.tags)
.uploader(hit.brand)
.views(hit.views as u32)
.rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32)
.formats(vec![videos::Video_Format::new(video_url.clone(), "1080".to_string(), "m3u8".to_string())]));
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
let client = match env::var("BURP_URL").as_deref() { let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) => Ok(burp_url) =>
@@ -149,11 +169,14 @@ impl HanimeProvider {
let text = match response.status().is_success() { let text = match response.status().is_success() {
true => { true => {
response.text().await?}, response.text().await?
},
false => { false => {
print!("Failed to fetch video item: {}\n\n", response.status()); print!("Failed to fetch video item: {}\n\n", response.status());
return Err(format!("Failed to fetch video item: {}", response.status()).into()); return Err(format!("Failed to fetch video item: {}", response.status()).into());
} }; }
};
let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1]; let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1];
let mut url_vec = vec![]; let mut url_vec = vec![];
@@ -163,16 +186,19 @@ impl HanimeProvider {
url_vec.push(url.to_string()); url_vec.push(url.to_string());
} }
} }
Ok((hit.created_at, Video_Item::new(id, title, url_vec[0].clone(), channel, thumb, duration) let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug.clone()), &url_vec[0].clone());
drop(conn);
Ok(Video_Item::new(id, title, url_vec[0].clone(), channel, thumb, duration)
.tags(hit.tags) .tags(hit.tags)
.uploader(hit.brand) .uploader(hit.brand)
.views(hit.views as u32) .views(hit.views as u32)
.rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32) .rating((hit.likes as f32 / (hit.likes + hit.dislikes)as f32) * 100 as f32)
.formats(vec![videos::Video_Format::new(url_vec[0].clone(), "1080".to_string(), "m3u8".to_string())]))) .formats(vec![videos::Video_Format::new(url_vec[0].clone(), "1080".to_string(), "m3u8".to_string())]))
} }
async fn get(&self, cache: VideoCache, page: u8, query: String) -> Result<Vec<Video_Item>> { async fn get(&self, cache: VideoCache, pool: DbPool, page: u8, query: String) -> Result<Vec<Video_Item>> {
let index = format!("{}:{}", query, page); let index = format!("{}:{}", query, page);
let old_items = match cache.get(&index) { let old_items = match cache.get(&index) {
@@ -219,13 +245,12 @@ impl HanimeProvider {
let hits_json: Vec<HanimeSearchResult> = serde_json::from_str(hits.as_str()) let hits_json: Vec<HanimeSearchResult> = serde_json::from_str(hits.as_str())
.map_err(|e| format!("Failed to parse hits JSON: {}", e))?; .map_err(|e| format!("Failed to parse hits JSON: {}", e))?;
// let timeout_duration = Duration::from_secs(120); // let timeout_duration = Duration::from_secs(120);
let futures = hits_json.into_iter().map(|el| self.get_video_item(el.clone())); let futures = hits_json.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone()));
let results: Vec<Result<(u64,Video_Item)>> = join_all(futures).await; let results: Vec<Result<Video_Item>> = join_all(futures).await;
let mut items: Vec<(u64, Video_Item)> = results let video_items: Vec<Video_Item> = results
.into_iter() .into_iter()
.filter_map(Result::ok) .filter_map(Result::ok)
.collect(); .collect();
let video_items: Vec<Video_Item> = items.into_iter().map(|(_, item)| item).collect();
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&index); cache.remove(&index);
cache.insert(index.clone(), video_items.clone()); cache.insert(index.clone(), video_items.clone());
@@ -241,6 +266,7 @@ impl Provider for HanimeProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache, cache: VideoCache,
pool: DbPool,
_channel: String, _channel: String,
sort: String, sort: String,
query: Option<String>, query: Option<String>,
@@ -252,8 +278,8 @@ impl Provider for HanimeProvider {
let _ = per_page; let _ = per_page;
let _ = sort; let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query { let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.get(cache, page.parse::<u8>().unwrap_or(1), q).await, Some(q) => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), q).await,
None => self.get(cache, page.parse::<u8>().unwrap_or(1), "".to_string()).await, None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), "".to_string()).await,
}; };
match videos { match videos {
Ok(v) => v, Ok(v) => v,

View File

@@ -1,10 +1,10 @@
use crate::{providers::{hanime::HanimeProvider, perverzija::PerverzijaProvider}, util::cache::VideoCache, videos::Video_Item}; use crate::{providers::{hanime::HanimeProvider, perverzija::PerverzijaProvider}, util::cache::VideoCache, videos::Video_Item, DbPool};
pub mod perverzija; pub mod perverzija;
pub mod hanime; pub mod hanime;
pub trait Provider{ pub trait Provider{
async fn get_videos(&self, cache: VideoCache ,channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>; async fn get_videos(&self, cache: VideoCache, pool: DbPool, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item>;
} }
pub enum AnyProvider { pub enum AnyProvider {
@@ -12,10 +12,10 @@ pub enum AnyProvider {
Hanime(HanimeProvider), Hanime(HanimeProvider),
} }
impl Provider for AnyProvider { impl Provider for AnyProvider {
async fn get_videos(&self, cache: VideoCache ,channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item> { async fn get_videos(&self, cache: VideoCache, pool:DbPool, channel: String, sort: String, query: Option<String>, page: String, per_page: String, featured: String) -> Vec<Video_Item> {
match self { match self {
AnyProvider::Perverzija(p) => p.get_videos(cache ,channel, sort, query, page, per_page, featured).await, AnyProvider::Perverzija(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await,
AnyProvider::Hanime(p) => p.get_videos(cache ,channel, sort, query, page, per_page, featured).await, AnyProvider::Hanime(p) => p.get_videos(cache, pool, channel, sort, query, page, per_page, featured).await,
} }
} }
} }

View File

@@ -3,12 +3,16 @@ use std::env;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{decode, ICodedDataTrait}; use htmlentity::entity::{decode, ICodedDataTrait};
use reqwest::{Proxy}; use reqwest::{Proxy};
use futures::future::join_all;
use ntex::web;
use crate::db;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr}; use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{self, Video_Embed, Video_Item}; // Make sure Provider trait is imported use crate::videos::{self, Video_Embed, Video_Item};
use crate::DbPool; // Make sure Provider trait is imported
error_chain! { error_chain! {
foreign_links { foreign_links {
@@ -26,9 +30,7 @@ impl PerverzijaProvider {
url: "https://tube.perverzija.com/".to_string(), url: "https://tube.perverzija.com/".to_string(),
} }
} }
async fn get(&self, cache:VideoCache ,page: u8, featured: String) -> Result<Vec<Video_Item>> { async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, featured: String) -> Result<Vec<Video_Item>> {
println!("get");
//TODO //TODO
// let mut url = Url::parse("https://example.net")?; // let mut url = Url::parse("https://example.net")?;
// url.query_pairs_mut().append_pair("foo", "bar"); // url.query_pairs_mut().append_pair("foo", "bar");
@@ -78,7 +80,7 @@ impl PerverzijaProvider {
// print!("Response: {:?}\n", response); // print!("Response: {:?}\n", response);
if response.status().is_success() { if response.status().is_success() {
let text = response.text().await?; let text = response.text().await?;
let video_items: Vec<Video_Item> = self.get_video_items_from_html(text.clone()); let video_items: Vec<Video_Item> = self.get_video_items_from_html(text.clone(), pool);
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.remove(&url);
cache.insert(url.clone(), video_items.clone()); cache.insert(url.clone(), video_items.clone());
@@ -96,11 +98,10 @@ impl PerverzijaProvider {
maxTimeout: 60000, maxTimeout: 60000,
}) })
.await; .await;
println!("FlareSolverr result: {:?}", result);
let video_items = match result { let video_items = match result {
Ok(res) => { Ok(res) => {
// println!("FlareSolverr response: {}", res); // println!("FlareSolverr response: {}", res);
self.get_video_items_from_html(res.solution.response) self.get_video_items_from_html(res.solution.response, pool)
} }
Err(e) => { Err(e) => {
println!("Error solving FlareSolverr: {}", e); println!("Error solving FlareSolverr: {}", e);
@@ -116,22 +117,20 @@ impl PerverzijaProvider {
Ok(video_items) Ok(video_items)
} }
} }
async fn query(&self, cache: VideoCache, page: u8, query: &str) -> Result<Vec<Video_Item>> { async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str) -> Result<Vec<Video_Item>> {
println!("query: {}", query);
let search_string = query.replace(" ", "+"); let search_string = query.replace(" ", "+");
let mut url = format!( let mut url = format!(
"{}advanced-search/?_sf_s={}&sf_paged={}", "{}page/{}/?s={}",
self.url, search_string, page self.url, page, search_string
); );
if page == 1 { if page == 1 {
url = format!("{}advanced-search/?_sf_s={}", self.url, search_string); url = format!("{}?s={}", self.url, search_string);
} }
// Check our Video Cache. If the result is younger than 1 hour, we return it. // Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&url) { let old_items = match cache.get(&url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone()); return Ok(items.clone());
} }
else{ else{
@@ -156,10 +155,9 @@ impl PerverzijaProvider {
}; };
let response = client.get(url.clone()).send().await?; let response = client.get(url.clone()).send().await?;
// print!("Response: {:?}\n", response);
if response.status().is_success() { if response.status().is_success() {
let text = response.text().await?; let text = response.text().await?;
let video_items: Vec<Video_Item> = self.get_video_items_from_html_query(text.clone()); let video_items: Vec<Video_Item> = self.get_video_items_from_html_query(text.clone(), pool).await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.remove(&url);
cache.insert(url.clone(), video_items.clone()); cache.insert(url.clone(), video_items.clone());
@@ -177,11 +175,9 @@ impl PerverzijaProvider {
maxTimeout: 60000, maxTimeout: 60000,
}) })
.await; .await;
println!("FlareSolverr result: {:?}", result);
let video_items = match result { let video_items = match result {
Ok(res) => { Ok(res) => {
// println!("FlareSolverr response: {}", res); self.get_video_items_from_html_query(res.solution.response, pool).await
self.get_video_items_from_html_query(res.solution.response)
} }
Err(e) => { Err(e) => {
println!("Error solving FlareSolverr: {}", e); println!("Error solving FlareSolverr: {}", e);
@@ -198,8 +194,7 @@ impl PerverzijaProvider {
} }
} }
fn get_video_items_from_html(&self, html: String) -> Vec<Video_Item> { fn get_video_items_from_html(&self, html: String, pool: DbPool) -> Vec<Video_Item> {
// println!("HTML: {}", html);
if html.is_empty() { if html.is_empty() {
println!("HTML is empty"); println!("HTML is empty");
return vec![]; return vec![];
@@ -213,7 +208,6 @@ impl PerverzijaProvider {
for video_segment in &raw_videos { for video_segment in &raw_videos {
let vid = video_segment.split("\n").collect::<Vec<&str>>(); let vid = video_segment.split("\n").collect::<Vec<&str>>();
if vid.len() > 20 { if vid.len() > 20 {
println!("Skipping video segment with unexpected length: {}", vid.len());
continue; continue;
} }
let mut title = vid[1].split(">").collect::<Vec<&str>>()[1] let mut title = vid[1].split(">").collect::<Vec<&str>>()[1]
@@ -258,10 +252,14 @@ impl PerverzijaProvider {
.split("'") .split("'")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(); .to_string();
// let referer_url = vid[1].split("data-url='").collect::<Vec<&str>>()[1] let id_url = vid[1].split("data-url='").collect::<Vec<&str>>()[1]
// .split("'") .split("'")
// .collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
// .to_string(); .to_string();
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &id_url, &url);
drop(conn);
let referer_url = "https://xtremestream.xyz/".to_string(); let referer_url = "https://xtremestream.xyz/".to_string();
let embed = Video_Embed::new(embed_html, url.clone()); let embed = Video_Embed::new(embed_html, url.clone());
@@ -298,64 +296,137 @@ impl PerverzijaProvider {
return items; return items;
} }
fn get_video_items_from_html_query(&self, html: String) -> Vec<Video_Item> { async fn get_video_items_from_html_query(&self, html: String, pool:DbPool) -> Vec<Video_Item> {
let mut items: Vec<Video_Item> = Vec::new(); let raw_videos = html
let video_listing_content = html.split("search-filter-results-").collect::<Vec<&str>>()[1];
let raw_videos = video_listing_content
.split("video-item post") .split("video-item post")
.collect::<Vec<&str>>()[1..] .collect::<Vec<&str>>()[1..]
.to_vec(); .to_vec();
for video_segment in &raw_videos { let futures = raw_videos.into_iter().map(|el| self.get_video_item(el, pool.clone()));
let vid = video_segment.split("\n").collect::<Vec<&str>>(); let results: Vec<Result<Video_Item>> = join_all(futures).await;
if vid.len() > 20 { let items: Vec<Video_Item> = results
continue; .into_iter()
.filter_map(Result::ok)
.collect();
return items;
} }
let mut title = vid[3].split("title='").collect::<Vec<&str>>()[1]
.split("'") async fn get_video_item(&self, snippet: &str, pool: DbPool) -> Result<Video_Item> {
let vid = snippet.split("\n").collect::<Vec<&str>>();
if vid.len() > 30 {
return Err("Unexpected video snippet length".into());
}
// for (index,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
let mut title = vid[5].split(" title=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(); .to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title); title = decode(title.as_bytes()).to_string().unwrap_or(title);
let url = vid[4].split("iframe src=&quot;").collect::<Vec<&str>>()[1]
.split("&quot;") let thumb = match vid[6].split(" src=\"").collect::<Vec<&str>>().len(){
.collect::<Vec<&str>>()[0] 1=>{
.to_string().replace("index.php","xs1.php"); for (index,line) in vid.iter().enumerate() {
let id = url.split("data=").collect::<Vec<&str>>()[1] println!("Line {}: {}", index, line.to_string().trim());
.split("&")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = match vid.len() {
18 => vid[16].split("time_dur\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string(),
_ => "00:00".to_string(),
};
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let thumb_index = match vid.len() {
18 => 14,
13 => 8,
_ => {
continue;
} }
}; return Err("Failed to parse thumbnail URL".into());
let thumb = match vid[thumb_index].contains("srcset=\"") { }
true => vid[thumb_index].split(" ").collect::<Vec<&str>>()[0] _ => vid[6].split(" src=\"").collect::<Vec<&str>>()[1]
.to_string(),
false => vid[thumb_index].split("src=\"").collect::<Vec<&str>>()[1]
.split("\"") .split("\"")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(), .to_string(),
}; };
let embed_html = vid[4].split("data-embed='").collect::<Vec<&str>>()[1] let duration = 0;
.split("'")
let lookup_url = vid[5].split(" href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(); .to_string();
// let referer_url = vid[4].split("data-url='").collect::<Vec<&str>>()[1]
// .split("'")
// .collect::<Vec<&str>>()[0]
// .to_string();
let referer_url = "https://xtremestream.xyz/".to_string(); let referer_url = "https://xtremestream.xyz/".to_string();
let embed = Video_Embed::new(embed_html, url.clone());
let mut conn = pool.get().expect("couldn't get db connection from pool");
let db_result = db::get_video(&mut conn,lookup_url.clone());
match db_result {
Ok(Some(url)) => {
let mut id = url.split("data=").collect::<Vec<&str>>()[1]
.to_string();
if id.contains("&"){
id = id.split("&").collect::<Vec<&str>>()[0].to_string()
}
let mut video_item = Video_Item::new(
id,
title,
url.clone(),
"perverzija".to_string(),
thumb,
duration,
);
let mut format =
videos::Video_Format::new(url.clone(), "1080".to_string(), "m3u8".to_string());
format.add_http_header("Referer".to_string(), referer_url.clone());
if let Some(formats) = video_item.formats.as_mut() {
formats.push(format);
} else {
video_item.formats = Some(vec![format]);
}
return Ok(video_item);
}
Ok(None) => (),
Err(e) => {
println!("Error fetching video from database: {}", e);
// return Err(format!("Error fetching video from database: {}", e).into());
}
}
drop(conn);
let client = match env::var("BURP_URL").as_deref() {
Ok(burp_url) =>
reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.proxy(Proxy::https(burp_url).unwrap())
.danger_accept_invalid_certs(true)
.build()?,
Err(_) => reqwest::Client::builder()
.user_agent("Mozilla/5.0 (iPhone; CPU iPhone OS 14_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/33.0 Mobile/15E148 Safari/605.1.15")
.danger_accept_invalid_certs(true)
.build()?,
};
let response = client.get(lookup_url.clone()).send().await?;
let text = match response.status().is_success(){
true => response.text().await?,
false => {
return Err("Failed to fetch video details".into());
}
};
let url = text.split("<iframe src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string().replace("index.php","xs1.php");
if url.starts_with("https://discord.com"){
return Err("Discord link found, skipping video".into());
}
let mut conn = pool.get().expect("couldn't get db connection from pool");
let _ = db::insert_video(&mut conn, &lookup_url, &url);
drop(conn);
if !url.contains("xtremestream.xyz"){
return Err("Video URL does not contain xtremestream.xyz".into());
}
let mut id = url.split("data=").collect::<Vec<&str>>()[1]
.to_string();
if id.contains("&"){
id = id.split("&").collect::<Vec<&str>>()[0].to_string()
}
// if !vid[6].contains(" src=\""){
// for (index,line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
// }
let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed let mut tags: Vec<String> = Vec::new(); // Placeholder for tags, adjust as needed
for tag in vid[0].split(" ").collect::<Vec<&str>>(){ for tag in vid[0].split(" ").collect::<Vec<&str>>(){
if tag.starts_with("tag-") { if tag.starts_with("tag-") {
@@ -385,10 +456,7 @@ impl PerverzijaProvider {
} else { } else {
video_item.formats = Some(vec![format]); video_item.formats = Some(vec![format]);
} }
items.push(video_item); return Ok(video_item);
}
return items;
} }
} }
@@ -396,6 +464,7 @@ impl Provider for PerverzijaProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache, cache: VideoCache,
pool: DbPool,
_channel: String, _channel: String,
sort: String, sort: String,
query: Option<String>, query: Option<String>,
@@ -406,8 +475,8 @@ impl Provider for PerverzijaProvider {
let _ = per_page; let _ = per_page;
let _ = sort; let _ = sort;
let videos: std::result::Result<Vec<Video_Item>, Error> = match query { let videos: std::result::Result<Vec<Video_Item>, Error> = match query {
Some(q) => self.query(cache, page.parse::<u8>().unwrap_or(1), &q).await, Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q).await,
None => self.get(cache, page.parse::<u8>().unwrap_or(1), featured).await, None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), featured).await,
}; };
match videos { match videos {
Ok(v) => v, Ok(v) => v,

8
src/schema.rs Normal file
View File

@@ -0,0 +1,8 @@
// @generated automatically by Diesel CLI.
diesel::table! {
videos (id) {
id -> Text,
url -> Text,
}
}