Skip to content

Commit

Permalink
进度条优化 直播录制优化 socks5代理支持
Browse files Browse the repository at this point in the history
  • Loading branch information
xhlove committed Nov 7, 2021
1 parent be7dfa6 commit 66243a1
Show file tree
Hide file tree
Showing 12 changed files with 79 additions and 71 deletions.
22 changes: 10 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,10 @@ A: 之前在编写软件过程中,出现过bug,某个循环异常,开启

### 代理

目前`--proxy`选项仅支持设定`http`代理,示例 -> `http://127.0.0.1:10809`
目前`--proxy`选项支持设定`socks5``http`代理,示例

后续会尝试支持`socks5`代理
- `socks5://127.0.0.1:10808`
- `http://127.0.0.1:10809`

### m3u8解密

Expand Down Expand Up @@ -222,12 +223,11 @@ positional arguments:
optional arguments:
-v, --version print version and exit
-h, --help print help message and exit
-live, --live live mode
-live-duration LIVE_DURATION, --live-duration LIVE_DURATION
--live live mode
--live-duration LIVE_DURATION
live record time, format HH:MM:SS, example 00:00:30
will record about 30s
-name NAME, --name NAME
specific stream base name
--name NAME specific stream base name
--base-url BASE_URL set base url for Stream
--resolution {,270,360,480,540,576,720,1080,2160}
auto choose target quality
Expand All @@ -244,11 +244,8 @@ optional arguments:
--limit-per-host LIMIT_PER_HOST
increase the value if your connection to the stream
host is poor, suggest >100 for DASH stream
--user-agent USER_AGENT
set user-agent headers for request
--referer REFERER set custom referer for request
--headers HEADERS set custom headers for request, separators is |, e.g.
"header1:value1|header2:value2"
--headers HEADERS read headers from headers.json, you can also use
custom config
--url-patch URL_PATCH
add some custom strings for all segments link
--overwrite overwrite output files
Expand All @@ -263,7 +260,8 @@ optional arguments:
--b64key B64KEY base64 format aes key, only for HLS standard
AES-128-CBC encryption
--hexiv HEXIV hex format aes iv
--proxy PROXY use http proxy, e.g. http://127.0.0.1:1080
--proxy PROXY use socks/http proxy, e.g. socks5://127.0.0.1:10808 or
http://127.0.0.1:10809
--disable-auto-exit disable auto exit after download end, GUI will use
this option
--parse-only parse only, not to download
Expand Down
12 changes: 5 additions & 7 deletions XstreamDL_CLI/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,9 @@ def print_version():
parser = ArgumentParser(prog='XstreamDL-CLI', usage='XstreamDL-CLI [OPTION]... URL/FILE/FOLDER...', description='A downloader that download the HLS/DASH stream', add_help=False)
parser.add_argument('-v', '--version', action='store_true', help='print version and exit')
parser.add_argument('-h', '--help', action='store_true', help='print help message and exit')
parser.add_argument('-live', '--live', action='store_true', help='live mode')
parser.add_argument('-live-duration', '--live-duration', default='', help='live record time, format HH:MM:SS, example 00:00:30 will record about 30s')
parser.add_argument('-name', '--name', default='', help='specific stream base name')
parser.add_argument('--live', action='store_true', help='live mode')
parser.add_argument('--live-duration', default='', help='live record time, format HH:MM:SS, example 00:00:30 will record about 30s')
parser.add_argument('--name', default='', help='specific stream base name')
parser.add_argument('--base-url', default='', help='set base url for Stream')
parser.add_argument('--resolution', default='', choices=['', '270', '360', '480', '540', '576', '720', '1080', '2160'], help='auto choose target quality')
parser.add_argument('--best-quality', action='store_true', help='auto choose best quality for dash streams')
Expand All @@ -102,9 +102,7 @@ def print_version():
parser.add_argument('--select', action='store_true', help='show stream to select and download, default is to download all')
parser.add_argument('--disable-force-close', action='store_true', help='default make all connections closed securely, but it will make DL speed slower')
parser.add_argument('--limit-per-host', default=4, help='increase the value if your connection to the stream host is poor, suggest >100 for DASH stream')
parser.add_argument('--user-agent', default='', help='set user-agent headers for request')
parser.add_argument('--referer', default='', help='set custom referer for request')
parser.add_argument('--headers', default='', help='set custom headers for request, separators is |, e.g. "header1:value1|header2:value2"')
parser.add_argument('--headers', default='headers.json', help='read headers from headers.json, you can also use custom config')
parser.add_argument('--url-patch', default='', help='add some custom strings for all segments link')
parser.add_argument('--overwrite', action='store_true', help='overwrite output files')
parser.add_argument('--raw-concat', action='store_true', help='concat content as raw')
Expand All @@ -114,7 +112,7 @@ def print_version():
parser.add_argument('--key', default=None, help='<id>:<k>, <id> is either a track ID in decimal or a 128-bit KID in hex, <k> is a 128-bit key in hex')
parser.add_argument('--b64key', default=None, help='base64 format aes key, only for HLS standard AES-128-CBC encryption')
parser.add_argument('--hexiv', default=None, help='hex format aes iv')
parser.add_argument('--proxy', default=None, help='use http proxy, e.g. http://127.0.0.1:1080')
parser.add_argument('--proxy', default='', help='use socks/http proxy, e.g. socks5://127.0.0.1:10808 or http://127.0.0.1:10809')
parser.add_argument('--disable-auto-exit', action='store_true', help='disable auto exit after download end, GUI will use this option')
parser.add_argument('--parse-only', action='store_true', help='parse only, not to download')
parser.add_argument('--show-init', action='store_true', help='show initialization to help you identify same name stream')
Expand Down
7 changes: 2 additions & 5 deletions XstreamDL_CLI/cmdargs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from typing import Union
from pathlib import Path


Expand All @@ -21,9 +20,7 @@ def __init__(self):
self.select = None # type: bool
self.disable_force_close = None # type: bool
self.limit_per_host = None # type: int
self.user_agent = None # type: str
self.referer = None # type: str
self.headers = None # type: Union[str, dict]
self.headers = None # type: str
self.url_patch = None # type: str
self.overwrite = None # type: bool
self.raw_concat = None # type: bool
Expand All @@ -33,7 +30,7 @@ def __init__(self):
self.key = None # type: str
self.b64key = None # type: str
self.hexiv = None # type: str
self.proxy = None # type: Union[str, None]
self.proxy = None # type: str
self.disable_auto_exit = None # type: bool
self.parse_only = None # type: bool
self.show_init = None # type: bool
Expand Down
19 changes: 17 additions & 2 deletions XstreamDL_CLI/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from asyncio import new_event_loop
from asyncio import AbstractEventLoop, Future, Task
from aiohttp import ClientSession, ClientResponse, TCPConnector, client_exceptions
from aiohttp_socks import ProxyConnector
from concurrent.futures._base import TimeoutError, CancelledError
from XstreamDL_CLI.cmdargs import CmdArgs
from XstreamDL_CLI.models.stream import Stream
Expand Down Expand Up @@ -131,6 +132,16 @@ def get_connector(args: CmdArgs):
connector在一个ClientSession使用后可能就会关闭
若需要再次使用则需要重新生成
'''
if args.proxy != '':
return ProxyConnector.from_url(
args.proxy,
ttl_dns_cache=500,
ssl=False,
limit_per_host=args.limit_per_host,
limit=500,
force_close=not args.disable_force_close,
enable_cleanup_closed=not args.disable_force_close
)
return TCPConnector(
ttl_dns_cache=500,
ssl=False,
Expand Down Expand Up @@ -173,6 +184,10 @@ def update_total_size(self, total_size: int):
self.total_size = total_size
self.update_progress(self.downloaded_count, self.total_size, self.downloaded_size)

def decrease_total_count(self):
self.total_count -= 1
self.update_progress(self.downloaded_count, self.total_size, self.downloaded_size)

def add_downloaded_size(self, downloaded_size: int):
self.downloaded_size += downloaded_size
self.update_progress(self.downloaded_count, self.total_size, self.downloaded_size)
Expand Down Expand Up @@ -392,15 +407,15 @@ def cancel_all_task() -> None:
return results

async def download(self, client: ClientSession, stream: Stream, segment: Segment):
proxy, headers = self.args.proxy, self.args.headers
status, flag = 'EXIT', True
try:
async with client.get(segment.url + self.args.url_patch, proxy=proxy, headers=headers) as resp: # type: ClientResponse
async with client.get(segment.url + self.args.url_patch, headers=self.args.headers) as resp: # type: ClientResponse
_flag = True
self.logger.debug(f'{segment.name} status {resp.status}, {segment.url + self.args.url_patch}')
if resp.status in [403, 404]:
status = 'STATUS_SKIP'
flag = False
self.xprogress.decrease_total_count()
segment.skip_concat = True
if resp.status == 405:
status = 'STATUS_CODE_ERROR'
Expand Down
10 changes: 7 additions & 3 deletions XstreamDL_CLI/extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from logging import Logger
from aiohttp.connector import TCPConnector
from aiohttp import ClientSession, ClientResponse
from aiohttp_socks import ProxyConnector
from XstreamDL_CLI.cmdargs import CmdArgs
from XstreamDL_CLI.models.stream import Stream
from XstreamDL_CLI.extractors.hls.parser import HLSParser
Expand Down Expand Up @@ -74,9 +75,12 @@ def fetch_metadata(self, uri: str, parent_stream: Stream = None):
return streams

async def fetch(self, url: str) -> str:
proxy, headers = self.args.proxy, self.args.headers
async with ClientSession(connector=TCPConnector(ssl=False)) as client: # type: ClientSession
async with client.get(url, proxy=proxy, headers=headers) as resp: # type: ClientResponse
if self.args.proxy != '':
connector = ProxyConnector.from_url(self.args.proxy, ssl=False)
else:
connector = TCPConnector(ssl=False)
async with ClientSession(connector=connector) as client: # type: ClientSession
async with client.get(url, headers=self.args.headers) as resp: # type: ClientResponse
return str(resp.url), self.load_raw2text(await resp.read())

def raw2streams(self, uri_type: str, uri: str, content: str, parent_stream: Stream) -> List[Stream]:
Expand Down
12 changes: 10 additions & 2 deletions XstreamDL_CLI/extractors/dash/mpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def __init__(self, name: str):
# only use when type is 'dynamic' which specifies the smallest period between potential changes to the MPD
self.minimumUpdatePeriod = None # type: str
# time of client to fetch the mpd content
self.publishTime = None # type: str
self.availabilityStartTime = None # type: str
self.publishTime = None # type: datetime
self.availabilityStartTime = None # type: datetime
self.timeShiftBufferDepth = None # type: str
self.suggestedPresentationDelay = None # type: str

Expand All @@ -36,5 +36,13 @@ def generate(self):
except Exception:
try:
self.availabilityStartTime = datetime.strptime(self.availabilityStartTime, '%Y-%m-%dT%H:%M:%SZ')
except Exception:
pass
if isinstance(self.publishTime, str):
try:
self.publishTime = datetime.strptime(self.publishTime, '%Y-%m-%dT%H:%M:%S.%fZ')
except Exception:
try:
self.publishTime = datetime.strptime(self.publishTime, '%Y-%m-%dT%H:%M:%SZ')
except Exception:
pass
4 changes: 2 additions & 2 deletions XstreamDL_CLI/extractors/dash/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,8 +316,8 @@ def walk_s(self, segmenttimeline: SegmentTimeline, st: SegmentTemplate, represen
# SegmentTimeline.S.t / timescale + (mpd.availabilityStartTime + Period.start) <= time.time()
base_time = None # type: int
assert isinstance(self.root.availabilityStartTime, datetime), 'report mpd to me'
current_utctime = datetime.utcnow().timestamp()
presentation_start = (period.start - st.presentationTimeOffset / st.timescale + 30) * 1000
current_utctime = self.root.publishTime.timestamp()
presentation_start = (period.start - st.presentationTimeOffset / st.timescale) * 1000
start_utctime = (self.root.availabilityStartTime + timedelta(milliseconds=presentation_start)).timestamp()
self.logger.debug(f'mpd.presentationTimeOffset {st.presentationTimeOffset} timescale {st.timescale}')
self.logger.debug(f'mpd.availabilityStartTime {self.root.availabilityStartTime} Period.start {period.start}')
Expand Down
10 changes: 7 additions & 3 deletions XstreamDL_CLI/extractors/hls/ext/xkey.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@

import asyncio
from logging import Logger
from aiohttp_socks import ProxyConnector
from aiohttp import ClientSession, ClientResponse
from aiohttp.connector import TCPConnector

Expand Down Expand Up @@ -82,9 +83,12 @@ def gen_hls_key_uri(self, home_url: str, base_url: str):
return 'http', base_url + '/' + self.uri

async def fetch(self, url: str, args: CmdArgs) -> bytes:
proxy, headers = args.proxy, args.headers
async with ClientSession(connector=TCPConnector(ssl=False)) as client: # type: ClientSession
async with client.get(url, proxy=proxy, headers=headers) as resp: # type: ClientResponse
if self.args.proxy != '':
connector = ProxyConnector.from_url(self.args.proxy, ssl=False)
else:
connector = TCPConnector(ssl=False)
async with ClientSession(connector=connector) as client: # type: ClientSession
async with client.get(url, headers=args.headers) as resp: # type: ClientResponse
return await resp.content.read()

def load(self, args: CmdArgs, custom_xkey: 'XKey', logger: Logger):
Expand Down
46 changes: 13 additions & 33 deletions XstreamDL_CLI/headers/default.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,26 @@
import sys
import json
from pathlib import Path
from logging import Logger
from XstreamDL_CLI.cmdargs import CmdArgs


class Headers:
def __init__(self, logger: Logger):
self.logger = logger
self.referer = ''
self.user_agent = (
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/88.0.4324.190 Safari/537.36'
)
self.headers = {}

def get(self, args: CmdArgs) -> dict:
self.__generate(args)
return self.headers

def __generate(self, args: CmdArgs):
'''
- 不指定user-agent 使用默认值
- 指定user-agent 使用指定值 如果为"" 那么user-agent就是""
- 不指定referer 就不设定referer 否则设定
'''
if args.user_agent != '':
self.headers['user-agent'] = args.user_agent
if getattr(sys, 'frozen', False):
config_path = Path(sys.executable).parent / args.headers
else:
self.headers['user-agent'] = self.user_agent
if args.referer != '':
self.headers['referer'] = args.referer
if args.headers != '':
self.__add_headers(args.headers)
config_path = Path(__file__).parent.parent / args.headers
if config_path.exists() is False:
self.logger.warning(f'{config_path.stem} is not exists, put your config file to {config_path.parent.resolve().as_posix()}')
return
try:
self.headers = json.loads(config_path.read_text(encoding='utf-8'))
except Exception as e:
self.logger.error(f'try to load {config_path.resolve().as_posix()} failed', exc_info=e)
self.logger.debug(f'use headers:\n{json.dumps(self.headers, ensure_ascii=False, indent=4)}')

def __add_headers(self, text: str):
text = text.strip()
for one_header in text.split('|'):
data = one_header.strip().split(':', maxsplit=1)
if len(data) == '':
continue
if len(data) == 1:
self.headers[data[0]] = ''
else:
self.headers[data[0]] = data[1]
return self.headers
4 changes: 2 additions & 2 deletions XstreamDL_CLI/models/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ def live_segments_extend(self, segments: List[Segment], has_init: bool):
self.segments.extend(_segments)

def calc(self):
self.duration = sum([segment.duration for segment in self.segments])
self.filesize = sum([segment.filesize for segment in self.segments])
self.duration = sum([segment.duration for segment in self.segments if segment.skip_concat is False])
self.filesize = sum([segment.filesize for segment in self.segments if segment.skip_concat is False])
self.filesize = self.filesize / 1024 / 1024

def get_name(self):
Expand Down
3 changes: 3 additions & 0 deletions headers.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36"
}
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
aiohttp
aiohttp_socks
argparse
pycryptodome

0 comments on commit 66243a1

Please sign in to comment.