From 83a23de7c2be108f0abbf8e4e9e22c22c812c7b2 Mon Sep 17 00:00:00 2001 From: gratenes <69170374+Gratenes@users.noreply.github.com> Date: Sun, 8 Sep 2024 22:00:51 -0400 Subject: [PATCH] reformat + handle more variations --- src/logic/v2.ts | 147 ++++++++++++++++++++++++++++++------------------ 1 file changed, 91 insertions(+), 56 deletions(-) diff --git a/src/logic/v2.ts b/src/logic/v2.ts index 6e7a72f..764501c 100644 --- a/src/logic/v2.ts +++ b/src/logic/v2.ts @@ -1,100 +1,135 @@ -import {getUrl} from "../utils"; +import { getUrl } from "../utils"; const m3u8ContentTypes: string[] = [ - 'application/vnd.', - 'video/mp2t', - 'application/x-mpegurl', - 'application/mpegurl', - 'application/x-mpegurl', - 'application/vnd.apple.mpegurl', - 'audio/mpegurl', - 'audio/x-mpegurl', - 'video/x-mpegurl', - 'application/vnd.apple.mpegurl.audio', - 'application/vnd.apple.mpegurl.video' + "application/vnd.", + "video/mp2t", + "application/x-mpegurl", + "application/mpegurl", + "application/x-mpegurl", + "application/vnd.apple.mpegurl", + "audio/mpegurl", + "audio/x-mpegurl", + "video/x-mpegurl", + "application/vnd.apple.mpegurl.audio", + "application/vnd.apple.mpegurl.video", ]; +export const M3u8ProxyV2 = async ( + request: Request +): Promise => { + const url = new URL(request.url); -export const M3u8ProxyV2 = async (request: Request): Promise => { - const url = new URL(request.url) + const scrapeUrlString = url.searchParams.get("url"); + const scrapeHeadersString = url.searchParams.get("headers"); - const scrapeUrlString = url.searchParams.get("url") - const scrapeHeadersString = url.searchParams.get("headers") - - let scrapeHeadersObject: ScrapeHeaders = null + let scrapeHeadersObject: ScrapeHeaders = null; if (scrapeHeadersString) { try { - scrapeHeadersObject = JSON.parse(scrapeHeadersString) + scrapeHeadersObject = JSON.parse(scrapeHeadersString); } catch (e) { - console.log(e) - console.log("[M3u8 Proxy V2] Malformed scrape headers, could no parse using DEFAULT headers") - scrapeHeadersObject = null + console.log(e); + console.log( + "[M3u8 Proxy V2] Malformed scrape headers, could no parse using DEFAULT headers" + ); + scrapeHeadersObject = null; } } if (!scrapeUrlString) { - return new Response(JSON.stringify({ - success: false, - message: "no scrape url provided" - }), { - status: 400, - }) + return new Response( + JSON.stringify({ + success: false, + message: "no scrape url provided", + }), + { + status: 400, + } + ); } - const scrapeUrl = new URL(scrapeUrlString) + const scrapeUrl = new URL(scrapeUrlString); const headers: { - [key: string]: string + [key: string]: string; } = { "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Methods": "GET, HEAD, POST, PUT, DELETE, OPTIONS", "Access-Control-Allow-Headers": "Content-Type", ...(typeof scrapeHeadersObject == "object" ? scrapeHeadersObject : {}), - } + }; - console.log(headers) + const rangeHeader = request.headers.get("Range"); + if (rangeHeader) { + headers["Range"] = rangeHeader; + } const response = await fetch(scrapeUrlString, { headers: headers, - }) - + }); // get the content type of the response - const responseContentType = response.headers.get('Content-Type')?.toLowerCase() - let responseBody: BodyInit | null = response.body - - if (responseContentType && m3u8ContentTypes.some(name => responseContentType.includes(name))) { - const m3u8File = await response.text() - const m3u8FileChunks = m3u8File.split("\n") - const m3u8AdjustedChunks: string[] = [] + const responseContentType = response.headers + .get("Content-Type") + ?.toLowerCase(); + let responseBody: BodyInit | null = response.body; + + const isM3u8 = + scrapeUrl.pathname.endsWith(".m3u8") || + (responseContentType && + m3u8ContentTypes.some((name) => responseContentType.includes(name))); + + console.log(`Is m3u8: ${isM3u8}`); + + if (isM3u8) { + const m3u8File = await response.text(); + const m3u8FileChunks = m3u8File.split("\n"); + const m3u8AdjustedChunks: string[] = []; for (const line of m3u8FileChunks) { // lines that start with #'s are non data lines (they hold info like bitrate and other stuff) if (line.startsWith("#") || !line.trim()) { - m3u8AdjustedChunks.push(line) + if (line.startsWith('#EXT-X-MAP:URI="')) { + const url = getUrl( + line.replace('#EXT-X-MAP:URI="', "").replace('"', ""), + scrapeUrl + ); + const searchParams = new URLSearchParams(); + searchParams.set("url", url.toString()); + if (scrapeHeadersString) + searchParams.set("headers", scrapeHeadersString); + + m3u8AdjustedChunks.push( + `#EXT-X-MAP:URI="/v2?${searchParams.toString()}"` + ); + } else { + m3u8AdjustedChunks.push(line); + } continue; } - const url = getUrl(line, scrapeUrl) - const searchParams = new URLSearchParams() + const url = getUrl(line, scrapeUrl); + const searchParams = new URLSearchParams(); - searchParams.set('url', url.toString()) - if (scrapeHeadersString) searchParams.set('headers', scrapeHeadersString) + searchParams.set("url", url.toString()); + if (scrapeHeadersString) searchParams.set("headers", scrapeHeadersString); - m3u8AdjustedChunks.push(`/v2?${searchParams.toString()}`) + m3u8AdjustedChunks.push(`/v2?${searchParams.toString()}`); } - responseBody = m3u8AdjustedChunks.join("\n") + responseBody = m3u8AdjustedChunks.join("\n"); } - const responseHeaders = new Headers(response.headers) - responseHeaders.set("Access-Control-Allow-Origin", "*") + const responseHeaders = new Headers(response.headers); + responseHeaders.set("Access-Control-Allow-Origin", "*"); return new Response(responseBody, { status: response.status, statusText: response.statusText, headers: responseHeaders, - }) -} - -type ScrapeHeaders = string | null | { - [key: string]: string -} \ No newline at end of file + }); +}; + +type ScrapeHeaders = + | string + | null + | { + [key: string]: string; + };