sxyprn
This commit is contained in:
@@ -757,7 +757,7 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
nsfw: true,
|
||||
});
|
||||
|
||||
// porn00
|
||||
// freshporno
|
||||
status.add_channel(Channel {
|
||||
id: "freshporno".to_string(),
|
||||
name: "Freshporno".to_string(),
|
||||
|
||||
@@ -10,6 +10,7 @@ use ntex_files as fs;
|
||||
use ntex::web;
|
||||
|
||||
mod api;
|
||||
mod proxy;
|
||||
mod db;
|
||||
mod models;
|
||||
mod providers;
|
||||
@@ -17,6 +18,7 @@ mod schema;
|
||||
mod status;
|
||||
mod util;
|
||||
mod videos;
|
||||
mod proxies;
|
||||
|
||||
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
||||
|
||||
@@ -43,7 +45,7 @@ async fn main() -> std::io::Result<()> {
|
||||
.expect("Failed to create pool.");
|
||||
|
||||
let mut requester = util::requester::Requester::new();
|
||||
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) == "1".to_string());
|
||||
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string());
|
||||
|
||||
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
|
||||
|
||||
@@ -55,6 +57,7 @@ async fn main() -> std::io::Result<()> {
|
||||
.state(requester.clone())
|
||||
.wrap(web::middleware::Logger::default())
|
||||
.service(web::scope("/api").configure(api::config))
|
||||
.service(web::scope("/proxy").configure(proxy::config))
|
||||
.service(
|
||||
web::resource("/")
|
||||
.route(web::get().to(|| async {
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::env;
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use futures::future::join_all;
|
||||
use scraper::ElementRef;
|
||||
use wreq::Client;
|
||||
use wreq::Proxy;
|
||||
use wreq_util::Emulation;
|
||||
@@ -15,28 +16,7 @@ use crate::videos::ServerOptions;
|
||||
use crate::videos::{VideoItem};
|
||||
use crate::DbPool;
|
||||
use crate::util::requester::Requester;
|
||||
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
|
||||
/// Extracts digits from a string and sums them.
|
||||
fn ssut51(arg: &str) -> u32 {
|
||||
arg.chars()
|
||||
.filter(|c| c.is_ascii_digit())
|
||||
.map(|c| c.to_digit(10).unwrap())
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Encodes a token: "<sum1>-<host>-<sum2>" using Base64 URL-safe variant.
|
||||
fn boo(sum1: u32, sum2: u32, host: &str) -> String {
|
||||
let raw = format!("{}-{}-{}", sum1, host, sum2);
|
||||
let encoded = general_purpose::STANDARD.encode(raw);
|
||||
|
||||
// Replace + → -, / → _, = → .
|
||||
encoded
|
||||
.replace('+', "-")
|
||||
.replace('/', "_")
|
||||
.replace('=', ".")
|
||||
}
|
||||
use scraper::{Html, Selector};
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
@@ -46,6 +26,15 @@ error_chain! {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn has_blacklisted_class(element: &ElementRef, blacklist: &[&str]) -> bool {
|
||||
element
|
||||
.value()
|
||||
.attr("class")
|
||||
.map(|classes| classes.split_whitespace().any(|c| blacklist.contains(&c)))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SxyprnProvider {
|
||||
url: String,
|
||||
@@ -58,12 +47,22 @@ impl SxyprnProvider {
|
||||
}
|
||||
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
|
||||
let sort_string = match sort.as_str() {
|
||||
"views" => "views",
|
||||
"rating" => "rating",
|
||||
"orgasmic" => "orgasmic",
|
||||
_ => "latest",
|
||||
};
|
||||
// Extract needed fields from options at the start
|
||||
let language = options.language.clone().unwrap();
|
||||
let filter = options.filter.clone().unwrap();
|
||||
let filter_string = match filter.as_str() {
|
||||
"other" => "other",
|
||||
"all" => "all",
|
||||
_ => "top",
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let url_str = format!("{}/blog/all/{}.html", self.url, ((page as u32)-1)*20);
|
||||
let url_str = format!("{}/blog/all/{}.html?fl={}&sm={}", self.url, ((page as u32)-1)*20, filter_string, sort_string);
|
||||
|
||||
let old_items = match cache.get(&url_str) {
|
||||
Some((time, items)) => {
|
||||
@@ -92,14 +91,18 @@ impl SxyprnProvider {
|
||||
}
|
||||
|
||||
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
let sort_string = match sort.as_str() {
|
||||
"views" => "views",
|
||||
"rating" => "trending",
|
||||
"orgasmic" => "orgasmic",
|
||||
_ => "latest",
|
||||
};
|
||||
// Extract needed fields from options at the start
|
||||
let language = options.language.clone().unwrap();
|
||||
let filter = options.filter.clone().unwrap();
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let search_string = query.replace(" ", "%20");
|
||||
let search_string = query.replace(" ", "-");
|
||||
let url_str = format!(
|
||||
"{}/{}/search/{}?page={}&sort={}",
|
||||
self.url, language, search_string, page, sort
|
||||
"{}/{}.html?page={}&sm={}",
|
||||
self.url, search_string, page, sort_string
|
||||
);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&url_str) {
|
||||
@@ -136,116 +139,78 @@ impl SxyprnProvider {
|
||||
.split("post_el_small'")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
let mut urls: Vec<String> = vec![];
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
for video_segment in &raw_videos {
|
||||
let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
for (index, line) in vid.iter().enumerate() {
|
||||
println!("Line {}: {}", index, line.to_string().trim());
|
||||
}
|
||||
|
||||
let url_str = video_segment.split("data-url='").collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
urls.push(url_str.clone());
|
||||
break;
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line.to_string().trim());
|
||||
// }
|
||||
// println!("\n\n\n");
|
||||
|
||||
}
|
||||
let futures = urls.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone(), requester.clone()));
|
||||
let results: Vec<Result<VideoItem>> = join_all(futures).await;
|
||||
let video_items: Vec<VideoItem> = results
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.collect();
|
||||
let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}",video_segment.split("/post/").collect::<Vec<&str>>()[1]
|
||||
.split("'").collect::<Vec<&str>>()[0]
|
||||
.to_string());
|
||||
|
||||
return video_items;
|
||||
}
|
||||
let mut title_parts = video_segment.split("post_text").collect::<Vec<&str>>()[1].split("style=''>").collect::<Vec<&str>>()[1]
|
||||
.split("</div>")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
;
|
||||
println!("Title parts: {}", title_parts);
|
||||
let document = Html::parse_document(title_parts);
|
||||
let selector = Selector::parse("*").unwrap();
|
||||
|
||||
async fn get_video_item(&self, url_str: String, pool: DbPool, mut requester: Requester) -> Result<VideoItem> {
|
||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
||||
let db_result = db::get_video(&mut conn,url_str.clone());
|
||||
match db_result {
|
||||
Ok(Some(entry)) => {
|
||||
let video_item: VideoItem = serde_json::from_str(entry.as_str()).unwrap();
|
||||
return Ok(video_item)
|
||||
}
|
||||
Ok(None) => {
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error fetching video from database: {}", e);
|
||||
}
|
||||
}
|
||||
drop(conn);
|
||||
let vid = requester.get(&url_str).await.unwrap().to_string();
|
||||
let mut title = vid.split("<title>").collect::<Vec<&str>>()[1]
|
||||
.split(" #")
|
||||
.collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
let thumb = format!("https:{}", vid.split("<meta property='og:image' content='").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string());
|
||||
|
||||
let raw_duration = vid.split("duration:<b>").collect::<Vec<&str>>()[1]
|
||||
.split("</b>")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let duration = raw_duration.parse::<u32>().unwrap_or(0);
|
||||
|
||||
let id = url_str.split("/").collect::<Vec<&str>>().last().unwrap().replace(".html", "")
|
||||
.to_string();
|
||||
let mut tags = vec![];
|
||||
if vid.split("splitter_block_header").collect::<Vec<&str>>()[0].contains("hash_link"){
|
||||
for tag_snippet in vid.split("splitter_block_header").collect::<Vec<&str>>()[0].split("hash_link").collect::<Vec<&str>>()[1..].to_vec(){
|
||||
let tag = tag_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
if !tag.is_empty(){
|
||||
tags.push(tag.replace("#", ""));
|
||||
let mut texts = Vec::new();
|
||||
for element in document.select(&selector) {
|
||||
let text = element.text().collect::<Vec<_>>().join(" ");
|
||||
if !text.trim().is_empty() {
|
||||
texts.push(text.trim().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
let video_url_pre_str = vid.split("data-vnfo").collect::<Vec<&str>>()[1]
|
||||
.split("\":\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"").collect::<Vec<&str>>()[0]
|
||||
.replace("\\", "")
|
||||
.to_string();
|
||||
println!("Video URL pre str: {}", video_url_pre_str);
|
||||
let video_request = requester.get(&url_str).await.unwrap();
|
||||
let mut video_url_parts = vid.split("m3u8").collect::<Vec<&str>>()[1]
|
||||
.split("https").collect::<Vec<&str>>()[0]
|
||||
.split("|").collect::<Vec<&str>>();
|
||||
video_url_parts.reverse();
|
||||
let video_url = format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
|
||||
video_url_parts[1],
|
||||
video_url_parts[2],
|
||||
video_url_parts[3],
|
||||
video_url_parts[4],
|
||||
video_url_parts[5],
|
||||
video_url_parts[6],
|
||||
video_url_parts[7]
|
||||
);
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.clone(),
|
||||
"sxyprn".to_string(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.tags(tags)
|
||||
;
|
||||
|
||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
||||
let insert_result = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item)?);
|
||||
match insert_result{
|
||||
Ok(_) => (),
|
||||
Err(e) => {println!("{:?}", e); }
|
||||
}
|
||||
drop(conn);
|
||||
println!("Texts: {:?}", texts);
|
||||
|
||||
let mut title = texts[0].clone();
|
||||
// html decode
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title).replace(" "," ");
|
||||
|
||||
return Ok(video_item);
|
||||
// println!("Title: {}", title);
|
||||
let id = video_url.split("/").collect::<Vec<&str>>()[6].to_string();
|
||||
|
||||
let thumb = format!("https:{}",video_segment.split("<img class='mini_post_vid_thumb lazyload'").collect::<Vec<&str>>()[1]
|
||||
.split("data-src='").collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string());
|
||||
|
||||
let preview = format!("https:{}",video_segment
|
||||
.split("class='hvp_player'").collect::<Vec<&str>>()[1]
|
||||
.split(" src='").collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string());
|
||||
|
||||
let views= video_segment
|
||||
.split("<strong>·</strong> ").collect::<Vec<&str>>()[1]
|
||||
.split(" ")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.to_string(),
|
||||
"sxyprn".to_string(),
|
||||
thumb,
|
||||
0,
|
||||
)
|
||||
.preview(preview)
|
||||
.views(views.parse::<u32>().unwrap_or(0))
|
||||
;
|
||||
items.push(video_item);
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Provider for SxyprnProvider {
|
||||
|
||||
41
src/proxies/mod.rs
Normal file
41
src/proxies/mod.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use ntex::web;
|
||||
|
||||
use crate::{proxies::sxyprn::SxyprnProxy, util::{cache::VideoCache, requester::Requester}};
|
||||
|
||||
pub mod sxyprn;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AnyProxy {
|
||||
Sxyprn(SxyprnProxy),
|
||||
}
|
||||
|
||||
pub trait Proxy {
|
||||
async fn get_video_url(
|
||||
&self,
|
||||
url: String,
|
||||
requester: web::types::State<Requester>,
|
||||
) -> String;
|
||||
}
|
||||
|
||||
|
||||
impl Proxy for AnyProxy {
|
||||
|
||||
async fn get_video_url(
|
||||
&self,
|
||||
url: String,
|
||||
requester: web::types::State<Requester>,
|
||||
) -> String {
|
||||
println!(
|
||||
"/proxy/video_url: url={:?}, provider={:?}",
|
||||
url, self
|
||||
);
|
||||
match self {
|
||||
AnyProxy::Sxyprn(p) => {
|
||||
p.get_video_url(
|
||||
url,
|
||||
requester,
|
||||
).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
74
src/proxies/sxyprn.rs
Normal file
74
src/proxies/sxyprn.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
use ntex::web;
|
||||
|
||||
use crate::{proxies::Proxy, util::requester::Requester};
|
||||
|
||||
/// Extracts digits from a string and sums them.
|
||||
fn ssut51(arg: &str) -> u32 {
|
||||
arg.chars()
|
||||
.filter(|c| c.is_ascii_digit())
|
||||
.map(|c| c.to_digit(10).unwrap())
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Encodes a token: "<sum1>-<host>-<sum2>" using Base64 URL-safe variant.
|
||||
fn boo(sum1: u32, sum2: u32) -> String {
|
||||
let raw = format!("{}-{}-{}", sum1, "sxyprn.com", sum2);
|
||||
let encoded = general_purpose::STANDARD.encode(raw);
|
||||
|
||||
// Replace + → -, / → _, = → .
|
||||
encoded
|
||||
.replace('+', "-")
|
||||
.replace('/', "_")
|
||||
.replace('=', ".")
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SxyprnProxy {
|
||||
}
|
||||
|
||||
impl SxyprnProxy {
|
||||
pub fn new() -> Self {
|
||||
SxyprnProxy {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_video_url(
|
||||
&self,
|
||||
url: String,
|
||||
requester: web::types::State<Requester>,
|
||||
) -> String {
|
||||
let mut requester = requester.get_ref().clone();
|
||||
let url = "https://sxyprn.com/".to_string() + &url;
|
||||
let text = requester.get(&url).await.unwrap_or("".to_string());
|
||||
if text.is_empty() {
|
||||
return "".to_string();
|
||||
}
|
||||
let data_string = text.split("data-vnfo='").collect::<Vec<&str>>()[1]
|
||||
.split("\":\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"}").collect::<Vec<&str>>()[0].replace("\\","");
|
||||
|
||||
let mut tmp = data_string
|
||||
.split("/")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
tmp[1] = format!("{}8/{}", tmp[1], boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())));
|
||||
//preda
|
||||
tmp[5] = format!(
|
||||
"{}",
|
||||
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
|
||||
);
|
||||
let sxyprn_video_url = format!("https://sxyprn.com{}",tmp.join("/"));
|
||||
|
||||
let response = requester.get_raw(&sxyprn_video_url).await;
|
||||
match response {
|
||||
Ok(resp) => {
|
||||
return resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string();
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Error fetching video URL: {}", e);
|
||||
}
|
||||
}
|
||||
return "".to_string();
|
||||
}
|
||||
}
|
||||
41
src/proxy.rs
Normal file
41
src/proxy.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use ntex::web::{self, HttpRequest};
|
||||
|
||||
use crate::proxies::sxyprn::SxyprnProxy;
|
||||
use crate::util::{cache::VideoCache, requester::Requester};
|
||||
use crate::proxies::*;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("/sxyprn/{endpoint}*")
|
||||
.route(web::post().to(sxyprn))
|
||||
.route(web::get().to(sxyprn)),
|
||||
)
|
||||
// .service(
|
||||
// web::resource("/videos")
|
||||
// // .route(web::get().to(videos_get))
|
||||
// .route(web::post().to(videos_post)),
|
||||
// )
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
async fn sxyprn(req: HttpRequest,
|
||||
requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> {
|
||||
let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap();
|
||||
let endpoint = req.match_info().query("endpoint").to_string();
|
||||
println!("/proxy/sxyprn: endpoint={:?}", endpoint);
|
||||
let video_url = match proxy.get_video_url(endpoint, requester).await{
|
||||
url if url != "" => url,
|
||||
_ => "Error".to_string(),
|
||||
};
|
||||
Ok(web::HttpResponse::Found()
|
||||
.header("Location", video_url)
|
||||
.finish())
|
||||
}
|
||||
|
||||
fn get_proxy(proxy: &str) -> Option<AnyProxy> {
|
||||
match proxy {
|
||||
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ use wreq::header::HeaderValue;
|
||||
use wreq::redirect::Policy;
|
||||
use wreq::Client;
|
||||
use wreq::Proxy;
|
||||
use wreq::Response;
|
||||
use wreq::Version;
|
||||
use wreq_util::Emulation;
|
||||
use std::env;
|
||||
@@ -57,6 +58,27 @@ impl Requester {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
|
||||
|
||||
let client = Client::builder()
|
||||
.cert_verification(false)
|
||||
.emulation(Emulation::Firefox136)
|
||||
.cookie_store(true)
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
let mut request = client.get(url).version(Version::HTTP_11);
|
||||
let proxy;
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
proxy = Proxy::all(&proxy_url).unwrap();
|
||||
request = request.proxy(proxy.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Directly propagate the error from send()
|
||||
request.send().await
|
||||
}
|
||||
|
||||
pub async fn get(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut request = self.client.get(url).version(Version::HTTP_11);
|
||||
let mut proxy;
|
||||
|
||||
Reference in New Issue
Block a user