mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-03-09 12:50:23 -05:00
Its lazy now at least?
This commit is contained in:
parent
40f772c300
commit
8c297d184c
6 changed files with 62 additions and 44 deletions
|
@ -28,7 +28,7 @@
|
||||||
from .cookies import load_cookies
|
from .cookies import load_cookies
|
||||||
from .downloader import FFmpegFD, get_suitable_downloader, shorten_protocol_name
|
from .downloader import FFmpegFD, get_suitable_downloader, shorten_protocol_name
|
||||||
from .downloader.rtmp import rtmpdump_version
|
from .downloader.rtmp import rtmpdump_version
|
||||||
from .extractor import gen_extractor_classes, get_info_extractor
|
from .extractor import gen_extractor_classes, get_info_extractor, import_extractors
|
||||||
from .extractor.common import UnsupportedURLIE
|
from .extractor.common import UnsupportedURLIE
|
||||||
from .extractor.openload import PhantomJSwrapper
|
from .extractor.openload import PhantomJSwrapper
|
||||||
from .globals import (
|
from .globals import (
|
||||||
|
@ -3777,6 +3777,7 @@ def get_encoding(stream):
|
||||||
if not IN_CLI.get():
|
if not IN_CLI.get():
|
||||||
write_debug(f'params: {self.params}')
|
write_debug(f'params: {self.params}')
|
||||||
|
|
||||||
|
import_extractors()
|
||||||
lazy_extractors = LAZY_EXTRACTORS.get()
|
lazy_extractors = LAZY_EXTRACTORS.get()
|
||||||
if lazy_extractors is None:
|
if lazy_extractors is None:
|
||||||
write_debug('Lazy loading extractors is disabled')
|
write_debug('Lazy loading extractors is disabled')
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
from .downloader.external import get_external_downloader
|
from .downloader.external import get_external_downloader
|
||||||
from .extractor import list_extractor_classes
|
from .extractor import list_extractor_classes
|
||||||
from .extractor.adobepass import MSO_INFO
|
from .extractor.adobepass import MSO_INFO
|
||||||
from .globals import IN_CLI
|
from .globals import IN_CLI, plugin_dirs
|
||||||
from .options import parseOpts
|
from .options import parseOpts
|
||||||
from .plugins import load_all_plugin_types
|
from .plugins import load_all_plugin_types
|
||||||
from .postprocessor.ffmpeg import (
|
from .postprocessor.ffmpeg import (
|
||||||
|
@ -931,6 +931,7 @@ def _real_main(argv=None):
|
||||||
FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
|
FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
|
||||||
|
|
||||||
# load all plugins into the global lookup
|
# load all plugins into the global lookup
|
||||||
|
plugin_dirs.set(opts.plugin_dirs)
|
||||||
load_all_plugin_types()
|
load_all_plugin_types()
|
||||||
|
|
||||||
with YoutubeDL(ydl_opts) as ydl:
|
with YoutubeDL(ydl_opts) as ydl:
|
||||||
|
|
|
@ -1,16 +1,17 @@
|
||||||
from .extractors import *
|
from ..compat.compat_utils import passthrough_module
|
||||||
from ..globals import extractors as _extractor_classes
|
from ..globals import extractors as _extractors_context
|
||||||
|
|
||||||
# from ..compat.compat_utils import passthrough_module
|
|
||||||
# passthrough_module(__name__, '.extractors')
|
passthrough_module(__name__, '.extractors')
|
||||||
# del passthrough_module
|
del passthrough_module
|
||||||
|
|
||||||
|
|
||||||
def gen_extractor_classes():
|
def gen_extractor_classes():
|
||||||
""" Return a list of supported extractors.
|
""" Return a list of supported extractors.
|
||||||
The order does matter; the first extractor matched is the one handling the URL.
|
The order does matter; the first extractor matched is the one handling the URL.
|
||||||
"""
|
"""
|
||||||
return list(_extractor_classes.get().values())
|
import_extractors()
|
||||||
|
return list(_extractors_context.get().values())
|
||||||
|
|
||||||
|
|
||||||
def gen_extractors():
|
def gen_extractors():
|
||||||
|
@ -37,4 +38,9 @@ def list_extractors(age_limit=None):
|
||||||
|
|
||||||
def get_info_extractor(ie_name):
|
def get_info_extractor(ie_name):
|
||||||
"""Returns the info extractor class with the given ie_name"""
|
"""Returns the info extractor class with the given ie_name"""
|
||||||
return _extractor_classes.get()[f'{ie_name}IE']
|
import_extractors()
|
||||||
|
return _extractors_context.get()[f'{ie_name}IE']
|
||||||
|
|
||||||
|
|
||||||
|
def import_extractors():
|
||||||
|
from . import extractors # noqa: F401
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from ..globals import LAZY_EXTRACTORS, extractors
|
from ..globals import LAZY_EXTRACTORS, extractors as _extractors_context
|
||||||
|
|
||||||
_CLASS_LOOKUP = None
|
_CLASS_LOOKUP = None
|
||||||
if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'):
|
if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'):
|
||||||
|
@ -21,5 +21,14 @@
|
||||||
}
|
}
|
||||||
_CLASS_LOOKUP['GenericIE'] = _extractors.GenericIE
|
_CLASS_LOOKUP['GenericIE'] = _extractors.GenericIE
|
||||||
|
|
||||||
extractors.set(_CLASS_LOOKUP)
|
# We want to append to the main lookup
|
||||||
globals().update(_CLASS_LOOKUP)
|
_current = _extractors_context.get()
|
||||||
|
for name, ie in _CLASS_LOOKUP.items():
|
||||||
|
_current.setdefault(name, ie)
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
value = _CLASS_LOOKUP.get(name)
|
||||||
|
if not value:
|
||||||
|
raise AttributeError(f'module {__name__} has no attribute {name}')
|
||||||
|
return value
|
||||||
|
|
|
@ -7,9 +7,9 @@
|
||||||
extractors = ContextVar('extractors', default={})
|
extractors = ContextVar('extractors', default={})
|
||||||
IN_CLI = ContextVar('IN_CLI', default=False)
|
IN_CLI = ContextVar('IN_CLI', default=False)
|
||||||
# `False`=force, `None`=disabled, `True`=enabled
|
# `False`=force, `None`=disabled, `True`=enabled
|
||||||
LAZY_EXTRACTORS = ContextVar('LAZY_EXTRACTORS')
|
LAZY_EXTRACTORS = ContextVar('LAZY_EXTRACTORS', default=False)
|
||||||
|
|
||||||
plugin_dirs = ContextVar('plugin_dirs')
|
plugin_dirs = ContextVar('plugin_dirs', default=(..., ))
|
||||||
plugin_ies = ContextVar('plugin_ies', default={})
|
plugin_ies = ContextVar('plugin_ies', default={})
|
||||||
plugin_overrides = ContextVar('plugin_overrides', default=defaultdict(list))
|
plugin_overrides = ContextVar('plugin_overrides', default=defaultdict(list))
|
||||||
plugin_pps = ContextVar('plugin_pps', default={})
|
plugin_pps = ContextVar('plugin_pps', default={})
|
||||||
|
|
|
@ -28,11 +28,13 @@
|
||||||
get_system_config_dirs,
|
get_system_config_dirs,
|
||||||
get_user_config_dirs,
|
get_user_config_dirs,
|
||||||
merge_dicts,
|
merge_dicts,
|
||||||
|
orderedSet,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
|
|
||||||
PACKAGE_NAME = 'yt_dlp_plugins'
|
PACKAGE_NAME = 'yt_dlp_plugins'
|
||||||
COMPAT_PACKAGE_NAME = 'ytdlp_plugins'
|
COMPAT_PACKAGE_NAME = 'ytdlp_plugins'
|
||||||
|
_BASE_PACKAGE_PATH = Path(__file__).parent
|
||||||
|
|
||||||
|
|
||||||
class PluginType(enum.Enum):
|
class PluginType(enum.Enum):
|
||||||
|
@ -55,41 +57,43 @@ def exec_module(self, module):
|
||||||
|
|
||||||
@functools.cache
|
@functools.cache
|
||||||
def dirs_in_zip(archive):
|
def dirs_in_zip(archive):
|
||||||
with contextlib.suppress(FileNotFoundError):
|
try:
|
||||||
with ZipFile(archive) as zip:
|
with ZipFile(archive) as zip_:
|
||||||
return set(itertools.chain.from_iterable(
|
return set(itertools.chain.from_iterable(
|
||||||
Path(file).parents for file in zip.namelist()))
|
Path(file).parents for file in zip_.namelist()))
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
write_string(f'WARNING: Could not read zip file {archive}: {e}\n')
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
|
|
||||||
def default_plugin_paths():
|
def default_plugin_paths():
|
||||||
seen = set()
|
def _get_package_paths(*root_paths, containing_folder):
|
||||||
|
for config_dir in orderedSet(map(Path, root_paths), lazy=True):
|
||||||
def _get_unique_package_paths(*root_paths, containing_folder):
|
# We need to filter the base path added when running __main__.py directly
|
||||||
for config_dir in map(Path, root_paths):
|
if config_dir == _BASE_PACKAGE_PATH:
|
||||||
plugin_dir = config_dir / containing_folder
|
|
||||||
# if plugin_dir in seen:
|
|
||||||
# continue
|
|
||||||
seen.add(plugin_dir)
|
|
||||||
if not plugin_dir.is_dir():
|
|
||||||
continue
|
continue
|
||||||
yield from plugin_dir.iterdir()
|
with contextlib.suppress(OSError):
|
||||||
|
for item in (config_dir / containing_folder).iterdir():
|
||||||
|
yield item
|
||||||
|
|
||||||
# Load from yt-dlp config folders
|
# Load from yt-dlp config folders
|
||||||
yield from _get_unique_package_paths(
|
yield from _get_package_paths(
|
||||||
*get_user_config_dirs('yt-dlp'),
|
*get_user_config_dirs('yt-dlp'),
|
||||||
*get_system_config_dirs('yt-dlp'),
|
*get_system_config_dirs('yt-dlp'),
|
||||||
containing_folder='plugins')
|
containing_folder='plugins')
|
||||||
|
|
||||||
# Load from yt-dlp-plugins folders
|
# Load from yt-dlp-plugins folders
|
||||||
yield from _get_unique_package_paths(
|
yield from _get_package_paths(
|
||||||
get_executable_path(),
|
get_executable_path(),
|
||||||
*get_user_config_dirs(''),
|
*get_user_config_dirs(''),
|
||||||
*get_system_config_dirs(''),
|
*get_system_config_dirs(''),
|
||||||
containing_folder='yt-dlp-plugins')
|
containing_folder='yt-dlp-plugins')
|
||||||
|
|
||||||
# Load from PYTHONPATH folders
|
# Load from PYTHONPATH folders
|
||||||
yield from map(Path, sys.path)
|
yield from (path for path in map(Path, sys.path) if path != _BASE_PACKAGE_PATH)
|
||||||
|
# yield from _get_package_paths(*sys.path, containing_folder='')
|
||||||
|
|
||||||
|
|
||||||
class PluginFinder(importlib.abc.MetaPathFinder):
|
class PluginFinder(importlib.abc.MetaPathFinder):
|
||||||
|
@ -109,26 +113,21 @@ def search_locations(self, fullname):
|
||||||
candidate_locations = itertools.chain.from_iterable(
|
candidate_locations = itertools.chain.from_iterable(
|
||||||
default_plugin_paths() if candidate is ...
|
default_plugin_paths() if candidate is ...
|
||||||
else Path(candidate).iterdir()
|
else Path(candidate).iterdir()
|
||||||
for candidate in plugin_dirs.get((..., )))
|
for candidate in plugin_dirs.get())
|
||||||
|
|
||||||
parts = Path(*fullname.split('.'))
|
parts = Path(*fullname.split('.'))
|
||||||
locations = dict()
|
for path in orderedSet(candidate_locations, lazy=True):
|
||||||
for path in dict.fromkeys(candidate_locations):
|
|
||||||
candidate = path / parts
|
candidate = path / parts
|
||||||
# print(candidate)
|
|
||||||
if candidate.is_dir():
|
if candidate.is_dir():
|
||||||
locations[candidate] = None
|
yield candidate
|
||||||
elif path.name and any(path.with_suffix(suffix).is_file() for suffix in {'.zip', '.egg', '.whl'}):
|
elif path.suffix in ('.zip', '.egg', '.whl') and parts in dirs_in_zip(path):
|
||||||
if parts in dirs_in_zip(path):
|
yield candidate
|
||||||
locations[candidate] = None
|
|
||||||
|
|
||||||
return list(map(str, locations))
|
|
||||||
|
|
||||||
def find_spec(self, fullname, path=None, target=None):
|
def find_spec(self, fullname, path=None, target=None):
|
||||||
if fullname not in self.packages:
|
if fullname not in self.packages:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
search_locations = self.search_locations(fullname)
|
search_locations = list(map(str, self.search_locations(fullname)))
|
||||||
if not search_locations:
|
if not search_locations:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -200,10 +199,12 @@ def load_plugins(plugin_type: PluginType):
|
||||||
spec.loader.exec_module(plugins)
|
spec.loader.exec_module(plugins)
|
||||||
classes.update(load_module(plugins, spec.name, suffix))
|
classes.update(load_module(plugins, spec.name, suffix))
|
||||||
|
|
||||||
|
regular_plugins = {}
|
||||||
# __init_subclass__ was removed so we manually add overrides
|
# __init_subclass__ was removed so we manually add overrides
|
||||||
for name, klass in classes.items():
|
for name, klass in classes.items():
|
||||||
plugin_name = getattr(klass, '_plugin_name', None)
|
plugin_name = getattr(klass, '_plugin_name', None)
|
||||||
if not plugin_name:
|
if not plugin_name:
|
||||||
|
regular_plugins[name] = klass
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# FIXME: Most likely something wrong here
|
# FIXME: Most likely something wrong here
|
||||||
|
@ -217,13 +218,13 @@ def load_plugins(plugin_type: PluginType):
|
||||||
plugin_overrides.get()[super_class].append(klass)
|
plugin_overrides.get()[super_class].append(klass)
|
||||||
|
|
||||||
# Add the classes into the global plugin lookup
|
# Add the classes into the global plugin lookup
|
||||||
plugin_destination.set(classes)
|
plugin_destination.set(regular_plugins)
|
||||||
# We want to prepend to the main lookup
|
# We want to prepend to the main lookup
|
||||||
current = destination.get()
|
current = destination.get()
|
||||||
result = merge_dicts(classes, current)
|
result = merge_dicts(regular_plugins, current)
|
||||||
destination.set(result)
|
destination.set(result)
|
||||||
|
|
||||||
return classes
|
return result
|
||||||
|
|
||||||
|
|
||||||
def load_all_plugin_types():
|
def load_all_plugin_types():
|
||||||
|
|
Loading…
Reference in a new issue