beeg fixed

This commit is contained in:
Simon
2026-02-09 18:27:26 +00:00
parent 7b90c05a29
commit 5baca567cb

View File

@@ -258,6 +258,36 @@ def stream_video():
if debug_enabled:
dbg(f"upstream status={resp.status_code} content_type={resp.headers.get('Content-Type')} content_length={resp.headers.get('Content-Length')}")
content_iter = None
first_chunk = b""
if request.method != 'HEAD':
content_iter = resp.iter_content(chunk_size=1024 * 16)
try:
first_chunk = next(content_iter)
except StopIteration:
first_chunk = b""
def looks_like_m3u8(chunk):
if not chunk:
return False
sample = chunk.lstrip(b'\xef\xbb\xbf')
return b'#EXTM3U' in sample[:1024]
if looks_like_m3u8(first_chunk):
remaining = b"".join(chunk for chunk in content_iter if chunk)
body_bytes = first_chunk + remaining
base_url = resp.url
encoding = resp.encoding
resp.close()
dbg("detected m3u8 by content sniff")
return proxy_hls_playlist(
target_url,
referer_hint=referer,
prefetched_body=body_bytes,
prefetched_base_url=base_url,
prefetched_encoding=encoding,
)
hop_by_hop = {
'connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization',
'te', 'trailers', 'transfer-encoding', 'upgrade'
@@ -291,7 +321,9 @@ def stream_video():
def generate():
try:
for chunk in resp.iter_content(chunk_size=1024 * 16):
if first_chunk:
yield first_chunk
for chunk in content_iter or resp.iter_content(chunk_size=1024 * 16):
if chunk:
yield chunk
finally:
@@ -299,26 +331,21 @@ def stream_video():
return Response(generate(), status=resp.status_code, headers=forwarded_headers)
def proxy_hls_playlist(playlist_url, referer_hint=None):
dbg(f"proxy_hls_playlist url={playlist_url} referer_hint={referer_hint}")
headers = build_upstream_headers(referer_hint or "")
if 'User-Agent' not in headers:
headers['User-Agent'] = 'Mozilla/5.0'
if 'Accept' not in headers:
headers['Accept'] = '*/*'
resp = session.get(playlist_url, headers=headers, timeout=30)
resp.raise_for_status()
base_url = resp.url
if referer_hint:
referer = referer_hint
else:
referer = f"{urllib.parse.urlparse(base_url).scheme}://{urllib.parse.urlparse(base_url).netloc}/"
def decode_playlist_body(body_bytes, encoding=None):
if not body_bytes:
return ""
enc = encoding or "utf-8"
try:
return body_bytes.decode(enc, errors="replace")
except LookupError:
return body_bytes.decode("utf-8", errors="replace")
def rewrite_hls_playlist(body_text, base_url, referer):
def proxied_url(target):
absolute = urljoin(base_url, target)
return f"/api/stream?url={urllib.parse.quote(absolute, safe='')}&referer={urllib.parse.quote(referer, safe='')}"
lines = resp.text.splitlines()
lines = body_text.splitlines()
rewritten = []
for line in lines:
stripped = line.strip()
@@ -333,12 +360,36 @@ def stream_video():
rewritten.append(line)
continue
rewritten.append(proxied_url(stripped))
if request.method == 'HEAD':
return Response("", status=200, content_type='application/vnd.apple.mpegurl')
body = "\n".join(rewritten)
return Response(body, status=200, content_type='application/vnd.apple.mpegurl')
def proxy_hls_playlist(playlist_url, referer_hint=None, prefetched_body=None, prefetched_base_url=None, prefetched_encoding=None):
dbg(f"proxy_hls_playlist url={playlist_url} referer_hint={referer_hint}")
base_url = prefetched_base_url or playlist_url
body_text = None
if prefetched_body is None:
headers = build_upstream_headers(referer_hint or "")
if 'User-Agent' not in headers:
headers['User-Agent'] = 'Mozilla/5.0'
if 'Accept' not in headers:
headers['Accept'] = '*/*'
resp = session.get(playlist_url, headers=headers, timeout=30)
resp.raise_for_status()
base_url = resp.url
body_text = resp.text
else:
body_text = decode_playlist_body(prefetched_body, prefetched_encoding)
if referer_hint:
referer = referer_hint
else:
referer = f"{urllib.parse.urlparse(base_url).scheme}://{urllib.parse.urlparse(base_url).netloc}/"
if request.method == 'HEAD':
return Response("", status=200, content_type='application/vnd.apple.mpegurl')
return rewrite_hls_playlist(body_text, base_url, referer)
if is_hls(video_url):
try:
dbg("detected input as hls")