mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-03-09 12:50:23 -05:00
chore: replace hard-coded limit with command line option
This commit is contained in:
parent
050b6c1f29
commit
148b36a039
3 changed files with 17 additions and 10 deletions
|
@ -1822,17 +1822,17 @@ def process_ie_result(self, ie_result, download=True, extra_info=None):
|
|||
result_type = ie_result.get('_type', 'video')
|
||||
|
||||
if result_type in ('url', 'url_transparent'):
|
||||
if 'extraction_depth' in extra_info:
|
||||
extra_info['extraction_depth'] = 1 + extra_info.get('extraction_depth', 0)
|
||||
else:
|
||||
extra_info['extraction_depth'] = 0
|
||||
if self.params.get('max_extraction_depth', -1) > 0:
|
||||
if 'extraction_depth' in extra_info:
|
||||
extra_info['extraction_depth'] = 1 + extra_info.get('extraction_depth', 0)
|
||||
else:
|
||||
extra_info['extraction_depth'] = 0
|
||||
|
||||
# TODO: make command line arg with large or infinite default
|
||||
if extra_info['extraction_depth'] >= 20:
|
||||
raise ExtractorError(
|
||||
f"Too many hops for URL: {ie_result['url']}",
|
||||
expected=True,
|
||||
)
|
||||
if extra_info['extraction_depth'] >= self.params.get('max_extraction_depth'):
|
||||
raise ExtractorError(
|
||||
f"Reached maximum extraction depth for URL: {ie_result['url']}",
|
||||
expected=True,
|
||||
)
|
||||
|
||||
ie_result['url'] = sanitize_url(
|
||||
ie_result['url'], scheme='http' if self.params.get('prefer_insecure') else 'https')
|
||||
|
|
|
@ -268,6 +268,7 @@ def parse_retries(name, value):
|
|||
opts.retries = parse_retries('download', opts.retries)
|
||||
opts.fragment_retries = parse_retries('fragment', opts.fragment_retries)
|
||||
opts.extractor_retries = parse_retries('extractor', opts.extractor_retries)
|
||||
opts.max_extraction_depth = parse_retries('extractor', opts.max_extraction_depth)
|
||||
opts.file_access_retries = parse_retries('file access', opts.file_access_retries)
|
||||
|
||||
# Retry sleep function
|
||||
|
@ -841,6 +842,7 @@ def parse_options(argv=None):
|
|||
'file_access_retries': opts.file_access_retries,
|
||||
'fragment_retries': opts.fragment_retries,
|
||||
'extractor_retries': opts.extractor_retries,
|
||||
'max_extraction_depth': opts.max_extraction_depth,
|
||||
'retry_sleep_functions': opts.retry_sleep,
|
||||
'skip_unavailable_fragments': opts.skip_unavailable_fragments,
|
||||
'keep_fragments': opts.keep_fragments,
|
||||
|
|
|
@ -1883,6 +1883,11 @@ def _alias_callback(option, opt_str, value, parser, opts, nargs):
|
|||
'--extractor-retries',
|
||||
dest='extractor_retries', metavar='RETRIES', default=3,
|
||||
help='Number of retries for known extractor errors (default is %default), or "infinite"')
|
||||
extractor.add_option(
|
||||
'--max-extraction-depth',
|
||||
dest='max_extraction_depth', default=-1,
|
||||
help='Maximum depth when recursing into non-video url chains (default is unlimited)',
|
||||
)
|
||||
extractor.add_option(
|
||||
'--allow-dynamic-mpd', '--no-ignore-dynamic-mpd',
|
||||
action='store_true', dest='dynamic_mpd', default=True,
|
||||
|
|
Loading…
Reference in a new issue