sxyprn
This commit is contained in:
@@ -22,3 +22,4 @@ percent-encoding = "2.3.2"
|
|||||||
capitalize = "0.3.4"
|
capitalize = "0.3.4"
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
scraper = "0.24.0"
|
||||||
|
|||||||
@@ -757,7 +757,7 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
|||||||
nsfw: true,
|
nsfw: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// porn00
|
// freshporno
|
||||||
status.add_channel(Channel {
|
status.add_channel(Channel {
|
||||||
id: "freshporno".to_string(),
|
id: "freshporno".to_string(),
|
||||||
name: "Freshporno".to_string(),
|
name: "Freshporno".to_string(),
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use ntex_files as fs;
|
|||||||
use ntex::web;
|
use ntex::web;
|
||||||
|
|
||||||
mod api;
|
mod api;
|
||||||
|
mod proxy;
|
||||||
mod db;
|
mod db;
|
||||||
mod models;
|
mod models;
|
||||||
mod providers;
|
mod providers;
|
||||||
@@ -17,6 +18,7 @@ mod schema;
|
|||||||
mod status;
|
mod status;
|
||||||
mod util;
|
mod util;
|
||||||
mod videos;
|
mod videos;
|
||||||
|
mod proxies;
|
||||||
|
|
||||||
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
||||||
|
|
||||||
@@ -43,7 +45,7 @@ async fn main() -> std::io::Result<()> {
|
|||||||
.expect("Failed to create pool.");
|
.expect("Failed to create pool.");
|
||||||
|
|
||||||
let mut requester = util::requester::Requester::new();
|
let mut requester = util::requester::Requester::new();
|
||||||
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) == "1".to_string());
|
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string());
|
||||||
|
|
||||||
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
|
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
|
||||||
|
|
||||||
@@ -55,6 +57,7 @@ async fn main() -> std::io::Result<()> {
|
|||||||
.state(requester.clone())
|
.state(requester.clone())
|
||||||
.wrap(web::middleware::Logger::default())
|
.wrap(web::middleware::Logger::default())
|
||||||
.service(web::scope("/api").configure(api::config))
|
.service(web::scope("/api").configure(api::config))
|
||||||
|
.service(web::scope("/proxy").configure(proxy::config))
|
||||||
.service(
|
.service(
|
||||||
web::resource("/")
|
web::resource("/")
|
||||||
.route(web::get().to(|| async {
|
.route(web::get().to(|| async {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use std::env;
|
|||||||
use error_chain::error_chain;
|
use error_chain::error_chain;
|
||||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
|
use scraper::ElementRef;
|
||||||
use wreq::Client;
|
use wreq::Client;
|
||||||
use wreq::Proxy;
|
use wreq::Proxy;
|
||||||
use wreq_util::Emulation;
|
use wreq_util::Emulation;
|
||||||
@@ -15,28 +16,7 @@ use crate::videos::ServerOptions;
|
|||||||
use crate::videos::{VideoItem};
|
use crate::videos::{VideoItem};
|
||||||
use crate::DbPool;
|
use crate::DbPool;
|
||||||
use crate::util::requester::Requester;
|
use crate::util::requester::Requester;
|
||||||
|
use scraper::{Html, Selector};
|
||||||
use base64::{engine::general_purpose, Engine as _};
|
|
||||||
|
|
||||||
/// Extracts digits from a string and sums them.
|
|
||||||
fn ssut51(arg: &str) -> u32 {
|
|
||||||
arg.chars()
|
|
||||||
.filter(|c| c.is_ascii_digit())
|
|
||||||
.map(|c| c.to_digit(10).unwrap())
|
|
||||||
.sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Encodes a token: "<sum1>-<host>-<sum2>" using Base64 URL-safe variant.
|
|
||||||
fn boo(sum1: u32, sum2: u32, host: &str) -> String {
|
|
||||||
let raw = format!("{}-{}-{}", sum1, host, sum2);
|
|
||||||
let encoded = general_purpose::STANDARD.encode(raw);
|
|
||||||
|
|
||||||
// Replace + → -, / → _, = → .
|
|
||||||
encoded
|
|
||||||
.replace('+', "-")
|
|
||||||
.replace('/', "_")
|
|
||||||
.replace('=', ".")
|
|
||||||
}
|
|
||||||
|
|
||||||
error_chain! {
|
error_chain! {
|
||||||
foreign_links {
|
foreign_links {
|
||||||
@@ -46,6 +26,15 @@ error_chain! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn has_blacklisted_class(element: &ElementRef, blacklist: &[&str]) -> bool {
|
||||||
|
element
|
||||||
|
.value()
|
||||||
|
.attr("class")
|
||||||
|
.map(|classes| classes.split_whitespace().any(|c| blacklist.contains(&c)))
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct SxyprnProvider {
|
pub struct SxyprnProvider {
|
||||||
url: String,
|
url: String,
|
||||||
@@ -58,12 +47,22 @@ impl SxyprnProvider {
|
|||||||
}
|
}
|
||||||
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||||
|
|
||||||
|
let sort_string = match sort.as_str() {
|
||||||
|
"views" => "views",
|
||||||
|
"rating" => "rating",
|
||||||
|
"orgasmic" => "orgasmic",
|
||||||
|
_ => "latest",
|
||||||
|
};
|
||||||
// Extract needed fields from options at the start
|
// Extract needed fields from options at the start
|
||||||
let language = options.language.clone().unwrap();
|
|
||||||
let filter = options.filter.clone().unwrap();
|
let filter = options.filter.clone().unwrap();
|
||||||
|
let filter_string = match filter.as_str() {
|
||||||
|
"other" => "other",
|
||||||
|
"all" => "all",
|
||||||
|
_ => "top",
|
||||||
|
};
|
||||||
let mut requester = options.requester.clone().unwrap();
|
let mut requester = options.requester.clone().unwrap();
|
||||||
|
|
||||||
let url_str = format!("{}/blog/all/{}.html", self.url, ((page as u32)-1)*20);
|
let url_str = format!("{}/blog/all/{}.html?fl={}&sm={}", self.url, ((page as u32)-1)*20, filter_string, sort_string);
|
||||||
|
|
||||||
let old_items = match cache.get(&url_str) {
|
let old_items = match cache.get(&url_str) {
|
||||||
Some((time, items)) => {
|
Some((time, items)) => {
|
||||||
@@ -92,14 +91,18 @@ impl SxyprnProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||||
|
let sort_string = match sort.as_str() {
|
||||||
|
"views" => "views",
|
||||||
|
"rating" => "trending",
|
||||||
|
"orgasmic" => "orgasmic",
|
||||||
|
_ => "latest",
|
||||||
|
};
|
||||||
// Extract needed fields from options at the start
|
// Extract needed fields from options at the start
|
||||||
let language = options.language.clone().unwrap();
|
|
||||||
let filter = options.filter.clone().unwrap();
|
|
||||||
let mut requester = options.requester.clone().unwrap();
|
let mut requester = options.requester.clone().unwrap();
|
||||||
let search_string = query.replace(" ", "%20");
|
let search_string = query.replace(" ", "-");
|
||||||
let url_str = format!(
|
let url_str = format!(
|
||||||
"{}/{}/search/{}?page={}&sort={}",
|
"{}/{}.html?page={}&sm={}",
|
||||||
self.url, language, search_string, page, sort
|
self.url, search_string, page, sort_string
|
||||||
);
|
);
|
||||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||||
let old_items = match cache.get(&url_str) {
|
let old_items = match cache.get(&url_str) {
|
||||||
@@ -136,116 +139,78 @@ impl SxyprnProvider {
|
|||||||
.split("post_el_small'")
|
.split("post_el_small'")
|
||||||
.collect::<Vec<&str>>()[1..]
|
.collect::<Vec<&str>>()[1..]
|
||||||
.to_vec();
|
.to_vec();
|
||||||
let mut urls: Vec<String> = vec![];
|
let mut items: Vec<VideoItem> = Vec::new();
|
||||||
for video_segment in &raw_videos {
|
for video_segment in &raw_videos {
|
||||||
let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||||
for (index, line) in vid.iter().enumerate() {
|
// for (index, line) in vid.iter().enumerate() {
|
||||||
println!("Line {}: {}", index, line.to_string().trim());
|
// println!("Line {}: {}", index, line.to_string().trim());
|
||||||
}
|
// }
|
||||||
|
// println!("\n\n\n");
|
||||||
|
|
||||||
let url_str = video_segment.split("data-url='").collect::<Vec<&str>>()[1]
|
let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}",video_segment.split("/post/").collect::<Vec<&str>>()[1]
|
||||||
.split("'")
|
.split("'").collect::<Vec<&str>>()[0]
|
||||||
|
.to_string());
|
||||||
|
|
||||||
|
let mut title_parts = video_segment.split("post_text").collect::<Vec<&str>>()[1].split("style=''>").collect::<Vec<&str>>()[1]
|
||||||
|
.split("</div>")
|
||||||
.collect::<Vec<&str>>()[0]
|
.collect::<Vec<&str>>()[0]
|
||||||
.to_string();
|
;
|
||||||
urls.push(url_str.clone());
|
println!("Title parts: {}", title_parts);
|
||||||
break;
|
let document = Html::parse_document(title_parts);
|
||||||
|
let selector = Selector::parse("*").unwrap();
|
||||||
|
|
||||||
|
let mut texts = Vec::new();
|
||||||
|
for element in document.select(&selector) {
|
||||||
|
let text = element.text().collect::<Vec<_>>().join(" ");
|
||||||
|
if !text.trim().is_empty() {
|
||||||
|
texts.push(text.trim().to_string());
|
||||||
}
|
}
|
||||||
let futures = urls.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone(), requester.clone()));
|
|
||||||
let results: Vec<Result<VideoItem>> = join_all(futures).await;
|
|
||||||
let video_items: Vec<VideoItem> = results
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(Result::ok)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
return video_items;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_video_item(&self, url_str: String, pool: DbPool, mut requester: Requester) -> Result<VideoItem> {
|
println!("Texts: {:?}", texts);
|
||||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
|
||||||
let db_result = db::get_video(&mut conn,url_str.clone());
|
let mut title = texts[0].clone();
|
||||||
match db_result {
|
// html decode
|
||||||
Ok(Some(entry)) => {
|
title = decode(title.as_bytes()).to_string().unwrap_or(title).replace(" "," ");
|
||||||
let video_item: VideoItem = serde_json::from_str(entry.as_str()).unwrap();
|
|
||||||
return Ok(video_item)
|
// println!("Title: {}", title);
|
||||||
}
|
let id = video_url.split("/").collect::<Vec<&str>>()[6].to_string();
|
||||||
Ok(None) => {
|
|
||||||
}
|
let thumb = format!("https:{}",video_segment.split("<img class='mini_post_vid_thumb lazyload'").collect::<Vec<&str>>()[1]
|
||||||
Err(e) => {
|
.split("data-src='").collect::<Vec<&str>>()[1]
|
||||||
println!("Error fetching video from database: {}", e);
|
.split("'")
|
||||||
}
|
|
||||||
}
|
|
||||||
drop(conn);
|
|
||||||
let vid = requester.get(&url_str).await.unwrap().to_string();
|
|
||||||
let mut title = vid.split("<title>").collect::<Vec<&str>>()[1]
|
|
||||||
.split(" #")
|
|
||||||
.collect::<Vec<&str>>()[0].trim()
|
|
||||||
.to_string();
|
|
||||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
|
||||||
let thumb = format!("https:{}", vid.split("<meta property='og:image' content='").collect::<Vec<&str>>()[1]
|
|
||||||
.split("\"")
|
|
||||||
.collect::<Vec<&str>>()[0]
|
.collect::<Vec<&str>>()[0]
|
||||||
.to_string());
|
.to_string());
|
||||||
|
|
||||||
let raw_duration = vid.split("duration:<b>").collect::<Vec<&str>>()[1]
|
let preview = format!("https:{}",video_segment
|
||||||
.split("</b>")
|
.split("class='hvp_player'").collect::<Vec<&str>>()[1]
|
||||||
|
.split(" src='").collect::<Vec<&str>>()[1]
|
||||||
|
.split("'")
|
||||||
|
.collect::<Vec<&str>>()[0]
|
||||||
|
.to_string());
|
||||||
|
|
||||||
|
let views= video_segment
|
||||||
|
.split("<strong>·</strong> ").collect::<Vec<&str>>()[1]
|
||||||
|
.split(" ")
|
||||||
.collect::<Vec<&str>>()[0]
|
.collect::<Vec<&str>>()[0]
|
||||||
.to_string();
|
.to_string();
|
||||||
let duration = raw_duration.parse::<u32>().unwrap_or(0);
|
|
||||||
|
|
||||||
let id = url_str.split("/").collect::<Vec<&str>>().last().unwrap().replace(".html", "")
|
|
||||||
.to_string();
|
|
||||||
let mut tags = vec![];
|
|
||||||
if vid.split("splitter_block_header").collect::<Vec<&str>>()[0].contains("hash_link"){
|
|
||||||
for tag_snippet in vid.split("splitter_block_header").collect::<Vec<&str>>()[0].split("hash_link").collect::<Vec<&str>>()[1..].to_vec(){
|
|
||||||
let tag = tag_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
|
||||||
.to_string();
|
|
||||||
if !tag.is_empty(){
|
|
||||||
tags.push(tag.replace("#", ""));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let video_url_pre_str = vid.split("data-vnfo").collect::<Vec<&str>>()[1]
|
|
||||||
.split("\":\"").collect::<Vec<&str>>()[1]
|
|
||||||
.split("\"").collect::<Vec<&str>>()[0]
|
|
||||||
.replace("\\", "")
|
|
||||||
.to_string();
|
|
||||||
println!("Video URL pre str: {}", video_url_pre_str);
|
|
||||||
let video_request = requester.get(&url_str).await.unwrap();
|
|
||||||
let mut video_url_parts = vid.split("m3u8").collect::<Vec<&str>>()[1]
|
|
||||||
.split("https").collect::<Vec<&str>>()[0]
|
|
||||||
.split("|").collect::<Vec<&str>>();
|
|
||||||
video_url_parts.reverse();
|
|
||||||
let video_url = format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
|
|
||||||
video_url_parts[1],
|
|
||||||
video_url_parts[2],
|
|
||||||
video_url_parts[3],
|
|
||||||
video_url_parts[4],
|
|
||||||
video_url_parts[5],
|
|
||||||
video_url_parts[6],
|
|
||||||
video_url_parts[7]
|
|
||||||
);
|
|
||||||
let video_item = VideoItem::new(
|
let video_item = VideoItem::new(
|
||||||
id,
|
id,
|
||||||
title,
|
title,
|
||||||
video_url.clone(),
|
video_url.to_string(),
|
||||||
"sxyprn".to_string(),
|
"sxyprn".to_string(),
|
||||||
thumb,
|
thumb,
|
||||||
duration,
|
0,
|
||||||
)
|
)
|
||||||
.tags(tags)
|
.preview(preview)
|
||||||
|
.views(views.parse::<u32>().unwrap_or(0))
|
||||||
;
|
;
|
||||||
|
items.push(video_item);
|
||||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
|
||||||
let insert_result = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item)?);
|
|
||||||
match insert_result{
|
|
||||||
Ok(_) => (),
|
|
||||||
Err(e) => {println!("{:?}", e); }
|
|
||||||
}
|
}
|
||||||
drop(conn);
|
return items;
|
||||||
|
|
||||||
return Ok(video_item);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Provider for SxyprnProvider {
|
impl Provider for SxyprnProvider {
|
||||||
|
|||||||
41
src/proxies/mod.rs
Normal file
41
src/proxies/mod.rs
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
use ntex::web;
|
||||||
|
|
||||||
|
use crate::{proxies::sxyprn::SxyprnProxy, util::{cache::VideoCache, requester::Requester}};
|
||||||
|
|
||||||
|
pub mod sxyprn;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum AnyProxy {
|
||||||
|
Sxyprn(SxyprnProxy),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Proxy {
|
||||||
|
async fn get_video_url(
|
||||||
|
&self,
|
||||||
|
url: String,
|
||||||
|
requester: web::types::State<Requester>,
|
||||||
|
) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Proxy for AnyProxy {
|
||||||
|
|
||||||
|
async fn get_video_url(
|
||||||
|
&self,
|
||||||
|
url: String,
|
||||||
|
requester: web::types::State<Requester>,
|
||||||
|
) -> String {
|
||||||
|
println!(
|
||||||
|
"/proxy/video_url: url={:?}, provider={:?}",
|
||||||
|
url, self
|
||||||
|
);
|
||||||
|
match self {
|
||||||
|
AnyProxy::Sxyprn(p) => {
|
||||||
|
p.get_video_url(
|
||||||
|
url,
|
||||||
|
requester,
|
||||||
|
).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
74
src/proxies/sxyprn.rs
Normal file
74
src/proxies/sxyprn.rs
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
|
use ntex::web;
|
||||||
|
|
||||||
|
use crate::{proxies::Proxy, util::requester::Requester};
|
||||||
|
|
||||||
|
/// Extracts digits from a string and sums them.
|
||||||
|
fn ssut51(arg: &str) -> u32 {
|
||||||
|
arg.chars()
|
||||||
|
.filter(|c| c.is_ascii_digit())
|
||||||
|
.map(|c| c.to_digit(10).unwrap())
|
||||||
|
.sum()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes a token: "<sum1>-<host>-<sum2>" using Base64 URL-safe variant.
|
||||||
|
fn boo(sum1: u32, sum2: u32) -> String {
|
||||||
|
let raw = format!("{}-{}-{}", sum1, "sxyprn.com", sum2);
|
||||||
|
let encoded = general_purpose::STANDARD.encode(raw);
|
||||||
|
|
||||||
|
// Replace + → -, / → _, = → .
|
||||||
|
encoded
|
||||||
|
.replace('+', "-")
|
||||||
|
.replace('/', "_")
|
||||||
|
.replace('=', ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SxyprnProxy {
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SxyprnProxy {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
SxyprnProxy {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_video_url(
|
||||||
|
&self,
|
||||||
|
url: String,
|
||||||
|
requester: web::types::State<Requester>,
|
||||||
|
) -> String {
|
||||||
|
let mut requester = requester.get_ref().clone();
|
||||||
|
let url = "https://sxyprn.com/".to_string() + &url;
|
||||||
|
let text = requester.get(&url).await.unwrap_or("".to_string());
|
||||||
|
if text.is_empty() {
|
||||||
|
return "".to_string();
|
||||||
|
}
|
||||||
|
let data_string = text.split("data-vnfo='").collect::<Vec<&str>>()[1]
|
||||||
|
.split("\":\"").collect::<Vec<&str>>()[1]
|
||||||
|
.split("\"}").collect::<Vec<&str>>()[0].replace("\\","");
|
||||||
|
|
||||||
|
let mut tmp = data_string
|
||||||
|
.split("/")
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
tmp[1] = format!("{}8/{}", tmp[1], boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())));
|
||||||
|
//preda
|
||||||
|
tmp[5] = format!(
|
||||||
|
"{}",
|
||||||
|
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
|
||||||
|
);
|
||||||
|
let sxyprn_video_url = format!("https://sxyprn.com{}",tmp.join("/"));
|
||||||
|
|
||||||
|
let response = requester.get_raw(&sxyprn_video_url).await;
|
||||||
|
match response {
|
||||||
|
Ok(resp) => {
|
||||||
|
return resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string();
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
println!("Error fetching video URL: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "".to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
41
src/proxy.rs
Normal file
41
src/proxy.rs
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
use ntex::web::{self, HttpRequest};
|
||||||
|
|
||||||
|
use crate::proxies::sxyprn::SxyprnProxy;
|
||||||
|
use crate::util::{cache::VideoCache, requester::Requester};
|
||||||
|
use crate::proxies::*;
|
||||||
|
|
||||||
|
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(
|
||||||
|
web::resource("/sxyprn/{endpoint}*")
|
||||||
|
.route(web::post().to(sxyprn))
|
||||||
|
.route(web::get().to(sxyprn)),
|
||||||
|
)
|
||||||
|
// .service(
|
||||||
|
// web::resource("/videos")
|
||||||
|
// // .route(web::get().to(videos_get))
|
||||||
|
// .route(web::post().to(videos_post)),
|
||||||
|
// )
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async fn sxyprn(req: HttpRequest,
|
||||||
|
requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> {
|
||||||
|
let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap();
|
||||||
|
let endpoint = req.match_info().query("endpoint").to_string();
|
||||||
|
println!("/proxy/sxyprn: endpoint={:?}", endpoint);
|
||||||
|
let video_url = match proxy.get_video_url(endpoint, requester).await{
|
||||||
|
url if url != "" => url,
|
||||||
|
_ => "Error".to_string(),
|
||||||
|
};
|
||||||
|
Ok(web::HttpResponse::Found()
|
||||||
|
.header("Location", video_url)
|
||||||
|
.finish())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_proxy(proxy: &str) -> Option<AnyProxy> {
|
||||||
|
match proxy {
|
||||||
|
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,6 +2,7 @@ use wreq::header::HeaderValue;
|
|||||||
use wreq::redirect::Policy;
|
use wreq::redirect::Policy;
|
||||||
use wreq::Client;
|
use wreq::Client;
|
||||||
use wreq::Proxy;
|
use wreq::Proxy;
|
||||||
|
use wreq::Response;
|
||||||
use wreq::Version;
|
use wreq::Version;
|
||||||
use wreq_util::Emulation;
|
use wreq_util::Emulation;
|
||||||
use std::env;
|
use std::env;
|
||||||
@@ -57,6 +58,27 @@ impl Requester {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
|
||||||
|
|
||||||
|
let client = Client::builder()
|
||||||
|
.cert_verification(false)
|
||||||
|
.emulation(Emulation::Firefox136)
|
||||||
|
.cookie_store(true)
|
||||||
|
.build()
|
||||||
|
.expect("Failed to create HTTP client");
|
||||||
|
let mut request = client.get(url).version(Version::HTTP_11);
|
||||||
|
let proxy;
|
||||||
|
if self.proxy {
|
||||||
|
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||||
|
proxy = Proxy::all(&proxy_url).unwrap();
|
||||||
|
request = request.proxy(proxy.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Directly propagate the error from send()
|
||||||
|
request.send().await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn get(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
pub async fn get(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||||
let mut request = self.client.get(url).version(Version::HTTP_11);
|
let mut request = self.client.get(url).version(Version::HTTP_11);
|
||||||
let mut proxy;
|
let mut proxy;
|
||||||
|
|||||||
Reference in New Issue
Block a user