1
0
Fork 0
mirror of https://github.com/yt-dlp/yt-dlp.git synced 2025-03-09 12:50:23 -05:00

Rough demo

This commit is contained in:
Simon Sawicki 2023-01-07 08:19:13 +01:00
parent 7287ab92f6
commit 40f772c300
10 changed files with 213 additions and 125 deletions

View file

@ -31,9 +31,15 @@
from .extractor import gen_extractor_classes, get_info_extractor from .extractor import gen_extractor_classes, get_info_extractor
from .extractor.common import UnsupportedURLIE from .extractor.common import UnsupportedURLIE
from .extractor.openload import PhantomJSwrapper from .extractor.openload import PhantomJSwrapper
from .globals import (
IN_CLI,
LAZY_EXTRACTORS,
plugin_ies,
plugin_overrides,
plugin_pps,
)
from .minicurses import format_text from .minicurses import format_text
from .plugins import directories as plugin_directories from .plugins import directories as plugin_directories
from .postprocessor import _PLUGIN_CLASSES as plugin_pps
from .postprocessor import ( from .postprocessor import (
EmbedThumbnailPP, EmbedThumbnailPP,
FFmpegFixupDuplicateMoovPP, FFmpegFixupDuplicateMoovPP,
@ -3733,15 +3739,6 @@ def print_debug_header(self):
if not self.params.get('verbose'): if not self.params.get('verbose'):
return return
from . import _IN_CLI # Must be delayed import
# These imports can be slow. So import them only as needed
from .extractor.extractors import _LAZY_LOADER
from .extractor.extractors import (
_PLUGIN_CLASSES as plugin_ies,
_PLUGIN_OVERRIDES as plugin_ie_overrides
)
def get_encoding(stream): def get_encoding(stream):
ret = str(getattr(stream, 'encoding', 'missing (%s)' % type(stream).__name__)) ret = str(getattr(stream, 'encoding', 'missing (%s)' % type(stream).__name__))
if not supports_terminal_sequences(stream): if not supports_terminal_sequences(stream):
@ -3774,17 +3771,17 @@ def get_encoding(stream):
__version__, __version__,
f'[{RELEASE_GIT_HEAD}]' if RELEASE_GIT_HEAD else '', f'[{RELEASE_GIT_HEAD}]' if RELEASE_GIT_HEAD else '',
'' if source == 'unknown' else f'({source})', '' if source == 'unknown' else f'({source})',
'' if _IN_CLI else 'API', '' if IN_CLI.get() else 'API',
delim=' ')) delim=' '))
if not _IN_CLI: if not IN_CLI.get():
write_debug(f'params: {self.params}') write_debug(f'params: {self.params}')
if not _LAZY_LOADER: lazy_extractors = LAZY_EXTRACTORS.get()
if os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'): if lazy_extractors is None:
write_debug('Lazy loading extractors is forcibly disabled') write_debug('Lazy loading extractors is disabled')
else: elif not lazy_extractors:
write_debug('Lazy loading extractors is disabled') write_debug('Lazy loading extractors is forcibly disabled')
if self.params['compat_opts']: if self.params['compat_opts']:
write_debug('Compatibility options: %s' % ', '.join(self.params['compat_opts'])) write_debug('Compatibility options: %s' % ', '.join(self.params['compat_opts']))
@ -3818,13 +3815,13 @@ def get_encoding(stream):
proxy_map.update(handler.proxies) proxy_map.update(handler.proxies)
write_debug(f'Proxy map: {proxy_map}') write_debug(f'Proxy map: {proxy_map}')
for plugin_type, plugins in {'Extractor': plugin_ies, 'Post-Processor': plugin_pps}.items(): for plugin_type, plugins in (('Extractor', plugin_ies), ('Post-Processor', plugin_pps)):
display_list = ['%s%s' % ( display_list = [
klass.__name__, '' if klass.__name__ == name else f' as {name}') klass.__name__ if klass.__name__ == name else f'{klass.__name__} as {name}'
for name, klass in plugins.items()] for name, klass in plugins.get().items()]
if plugin_type == 'Extractor': if plugin_type == 'Extractor':
display_list.extend(f'{plugins[-1].IE_NAME.partition("+")[2]} ({parent.__name__})' display_list.extend(f'{plugins[-1].IE_NAME.partition("+")[2]} ({parent.__name__})'
for parent, plugins in plugin_ie_overrides.items()) for parent, plugins in plugin_overrides.get().items())
if not display_list: if not display_list:
continue continue
write_debug(f'{plugin_type} Plugins: {", ".join(sorted(display_list))}') write_debug(f'{plugin_type} Plugins: {", ".join(sorted(display_list))}')

View file

@ -19,8 +19,10 @@
from .downloader.external import get_external_downloader from .downloader.external import get_external_downloader
from .extractor import list_extractor_classes from .extractor import list_extractor_classes
from .extractor.adobepass import MSO_INFO from .extractor.adobepass import MSO_INFO
from .globals import IN_CLI
from .options import parseOpts from .options import parseOpts
from .postprocessor import ( from .plugins import load_all_plugin_types
from .postprocessor.ffmpeg import (
FFmpegExtractAudioPP, FFmpegExtractAudioPP,
FFmpegMergerPP, FFmpegMergerPP,
FFmpegPostProcessor, FFmpegPostProcessor,
@ -28,9 +30,8 @@
FFmpegThumbnailsConvertorPP, FFmpegThumbnailsConvertorPP,
FFmpegVideoConvertorPP, FFmpegVideoConvertorPP,
FFmpegVideoRemuxerPP, FFmpegVideoRemuxerPP,
MetadataFromFieldPP,
MetadataParserPP,
) )
from .postprocessor.metadataparser import MetadataFromFieldPP, MetadataParserPP
from .update import Updater from .update import Updater
from .utils import ( from .utils import (
NO_DEFAULT, NO_DEFAULT,
@ -63,8 +64,6 @@
) )
from .YoutubeDL import YoutubeDL from .YoutubeDL import YoutubeDL
_IN_CLI = False
def _exit(status=0, *args): def _exit(status=0, *args):
for msg in args: for msg in args:
@ -394,6 +393,10 @@ def metadataparser_actions(f):
} }
# Other options # Other options
opts.plugin_dirs = opts.plugin_dirs or []
if 'no-default' not in opts.plugin_dirs:
opts.plugin_dirs.append(...)
if opts.playlist_items is not None: if opts.playlist_items is not None:
try: try:
tuple(PlaylistEntries.parse_playlist_items(opts.playlist_items)) tuple(PlaylistEntries.parse_playlist_items(opts.playlist_items))
@ -927,6 +930,9 @@ def _real_main(argv=None):
if opts.ffmpeg_location: if opts.ffmpeg_location:
FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location) FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
# load all plugins into the global lookup
load_all_plugin_types()
with YoutubeDL(ydl_opts) as ydl: with YoutubeDL(ydl_opts) as ydl:
pre_process = opts.update_self or opts.rm_cachedir pre_process = opts.update_self or opts.rm_cachedir
actual_use = all_urls or opts.load_info_filename actual_use = all_urls or opts.load_info_filename
@ -964,8 +970,7 @@ def _real_main(argv=None):
def main(argv=None): def main(argv=None):
global _IN_CLI IN_CLI.set(True)
_IN_CLI = True
try: try:
_exit(*variadic(_real_main(argv))) _exit(*variadic(_real_main(argv)))
except DownloadError: except DownloadError:

View file

@ -1,16 +1,16 @@
from ..compat.compat_utils import passthrough_module from .extractors import *
from ..globals import extractors as _extractor_classes
passthrough_module(__name__, '.extractors') # from ..compat.compat_utils import passthrough_module
del passthrough_module # passthrough_module(__name__, '.extractors')
# del passthrough_module
def gen_extractor_classes(): def gen_extractor_classes():
""" Return a list of supported extractors. """ Return a list of supported extractors.
The order does matter; the first extractor matched is the one handling the URL. The order does matter; the first extractor matched is the one handling the URL.
""" """
from .extractors import _ALL_CLASSES return list(_extractor_classes.get().values())
return _ALL_CLASSES
def gen_extractors(): def gen_extractors():
@ -37,6 +37,4 @@ def list_extractors(age_limit=None):
def get_info_extractor(ie_name): def get_info_extractor(ie_name):
"""Returns the info extractor class with the given ie_name""" """Returns the info extractor class with the given ie_name"""
from . import extractors return _extractor_classes.get()[f'{ie_name}IE']
return getattr(extractors, f'{ie_name}IE')

View file

@ -5,7 +5,6 @@
import http.client import http.client
import http.cookiejar import http.cookiejar
import http.cookies import http.cookies
import inspect
import itertools import itertools
import json import json
import math import math
@ -3724,16 +3723,8 @@ def _extract_url(cls, webpage): # TODO: Remove
@classmethod @classmethod
def __init_subclass__(cls, *, plugin_name=None, **kwargs): def __init_subclass__(cls, *, plugin_name=None, **kwargs):
if plugin_name: if plugin_name is not None:
mro = inspect.getmro(cls) cls._plugin_name = plugin_name
super_class = cls.__wrapped__ = mro[mro.index(cls) + 1]
cls.PLUGIN_NAME, cls.ie_key = plugin_name, super_class.ie_key
cls.IE_NAME = f'{super_class.IE_NAME}+{plugin_name}'
while getattr(super_class, '__wrapped__', None):
super_class = super_class.__wrapped__
setattr(sys.modules[super_class.__module__], super_class.__name__, cls)
_PLUGIN_OVERRIDES[super_class].append(cls)
return super().__init_subclass__(**kwargs) return super().__init_subclass__(**kwargs)
@ -3789,6 +3780,3 @@ class UnsupportedURLIE(InfoExtractor):
def _real_extract(self, url): def _real_extract(self, url):
raise UnsupportedError(url) raise UnsupportedError(url)
_PLUGIN_OVERRIDES = collections.defaultdict(list)

View file

@ -1,28 +1,25 @@
import contextlib import inspect
import os import os
from ..plugins import load_plugins from ..globals import LAZY_EXTRACTORS, extractors
# NB: Must be before other imports so that plugins can be correctly injected _CLASS_LOOKUP = None
_PLUGIN_CLASSES = load_plugins('extractor', 'IE')
_LAZY_LOADER = False
if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'): if not os.environ.get('YTDLP_NO_LAZY_EXTRACTORS'):
with contextlib.suppress(ImportError): try:
from .lazy_extractors import * # noqa: F403 from .lazy_extractors import _CLASS_LOOKUP
from .lazy_extractors import _ALL_CLASSES LAZY_EXTRACTORS.set(True)
_LAZY_LOADER = True except ImportError:
LAZY_EXTRACTORS.set(None)
if not _LAZY_LOADER: if not _CLASS_LOOKUP:
from ._extractors import * # noqa: F403 from . import _extractors
_ALL_CLASSES = [ # noqa: F811
klass _CLASS_LOOKUP = {
for name, klass in globals().items() name: value
for name, value in inspect.getmembers(_extractors)
if name.endswith('IE') and name != 'GenericIE' if name.endswith('IE') and name != 'GenericIE'
] }
_ALL_CLASSES.append(GenericIE) # noqa: F405 _CLASS_LOOKUP['GenericIE'] = _extractors.GenericIE
globals().update(_PLUGIN_CLASSES) extractors.set(_CLASS_LOOKUP)
_ALL_CLASSES[:0] = _PLUGIN_CLASSES.values() globals().update(_CLASS_LOOKUP)
from .common import _PLUGIN_OVERRIDES # noqa: F401

15
yt_dlp/globals.py Normal file
View file

@ -0,0 +1,15 @@
from collections import defaultdict
from contextvars import ContextVar
# NAME = 'yt-dlp'
postprocessors = ContextVar('postprocessors', default={})
extractors = ContextVar('extractors', default={})
IN_CLI = ContextVar('IN_CLI', default=False)
# `False`=force, `None`=disabled, `True`=enabled
LAZY_EXTRACTORS = ContextVar('LAZY_EXTRACTORS')
plugin_dirs = ContextVar('plugin_dirs')
plugin_ies = ContextVar('plugin_ies', default={})
plugin_overrides = ContextVar('plugin_overrides', default=defaultdict(list))
plugin_pps = ContextVar('plugin_pps', default={})

View file

@ -11,15 +11,15 @@
from .compat import compat_expanduser from .compat import compat_expanduser
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
from .downloader.external import list_external_downloaders from .downloader.external import list_external_downloaders
from .postprocessor import ( from .postprocessor.ffmpeg import (
FFmpegExtractAudioPP, FFmpegExtractAudioPP,
FFmpegMergerPP, FFmpegMergerPP,
FFmpegSubtitlesConvertorPP, FFmpegSubtitlesConvertorPP,
FFmpegThumbnailsConvertorPP, FFmpegThumbnailsConvertorPP,
FFmpegVideoRemuxerPP, FFmpegVideoRemuxerPP,
SponsorBlockPP,
) )
from .postprocessor.modify_chapters import DEFAULT_SPONSORBLOCK_CHAPTER_TITLE from .postprocessor.modify_chapters import DEFAULT_SPONSORBLOCK_CHAPTER_TITLE
from .postprocessor.sponsorblock import SponsorBlockPP
from .update import detect_variant, is_non_updateable from .update import detect_variant, is_non_updateable
from .utils import ( from .utils import (
OUTTMPL_TYPES, OUTTMPL_TYPES,
@ -435,6 +435,12 @@ def _alias_callback(option, opt_str, value, parser, opts, nargs):
'--no-colors', '--no-colours', '--no-colors', '--no-colours',
action='store_true', dest='no_color', default=False, action='store_true', dest='no_color', default=False,
help='Do not emit color codes in output (Alias: --no-colours)') help='Do not emit color codes in output (Alias: --no-colours)')
general.add_option(
'--plugin-dirs',
metavar='PATH', dest='plugin_dirs', action='append',
help=(
'Directory to search for plugins. Can be used multiple times to add multiple directories. '
'Add "no-default" to disable the default plugin directories'))
general.add_option( general.add_option(
'--compat-options', '--compat-options',
metavar='OPTS', dest='compat_opts', default=set(), type='str', metavar='OPTS', dest='compat_opts', default=set(), type='str',

View file

@ -1,4 +1,5 @@
import contextlib import contextlib
import enum
import importlib import importlib
import importlib.abc import importlib.abc
import importlib.machinery import importlib.machinery
@ -12,12 +13,21 @@
from pathlib import Path from pathlib import Path
from zipfile import ZipFile from zipfile import ZipFile
from .globals import (
extractors,
plugin_dirs,
plugin_ies,
plugin_overrides,
plugin_pps,
postprocessors,
)
from .compat import functools # isort: split from .compat import functools # isort: split
from .utils import ( from .utils import (
get_executable_path, get_executable_path,
get_system_config_dirs, get_system_config_dirs,
get_user_config_dirs, get_user_config_dirs,
orderedSet, merge_dicts,
write_string, write_string,
) )
@ -25,6 +35,17 @@
COMPAT_PACKAGE_NAME = 'ytdlp_plugins' COMPAT_PACKAGE_NAME = 'ytdlp_plugins'
class PluginType(enum.Enum):
POSTPROCESSORS = ('postprocessor', 'PP')
EXTRACTORS = ('extractor', 'IE')
_plugin_type_lookup = {
PluginType.POSTPROCESSORS: (postprocessors, plugin_pps),
PluginType.EXTRACTORS: (extractors, plugin_ies),
}
class PluginLoader(importlib.abc.Loader): class PluginLoader(importlib.abc.Loader):
"""Dummy loader for virtual namespace packages""" """Dummy loader for virtual namespace packages"""
@ -34,9 +55,41 @@ def exec_module(self, module):
@functools.cache @functools.cache
def dirs_in_zip(archive): def dirs_in_zip(archive):
with ZipFile(archive) as zip: with contextlib.suppress(FileNotFoundError):
return set(itertools.chain.from_iterable( with ZipFile(archive) as zip:
Path(file).parents for file in zip.namelist())) return set(itertools.chain.from_iterable(
Path(file).parents for file in zip.namelist()))
return ()
def default_plugin_paths():
seen = set()
def _get_unique_package_paths(*root_paths, containing_folder):
for config_dir in map(Path, root_paths):
plugin_dir = config_dir / containing_folder
# if plugin_dir in seen:
# continue
seen.add(plugin_dir)
if not plugin_dir.is_dir():
continue
yield from plugin_dir.iterdir()
# Load from yt-dlp config folders
yield from _get_unique_package_paths(
*get_user_config_dirs('yt-dlp'),
*get_system_config_dirs('yt-dlp'),
containing_folder='plugins')
# Load from yt-dlp-plugins folders
yield from _get_unique_package_paths(
get_executable_path(),
*get_user_config_dirs(''),
*get_system_config_dirs(''),
containing_folder='yt-dlp-plugins')
# Load from PYTHONPATH folders
yield from map(Path, sys.path)
class PluginFinder(importlib.abc.MetaPathFinder): class PluginFinder(importlib.abc.MetaPathFinder):
@ -53,41 +106,23 @@ def __init__(self, *packages):
for name in packages)) for name in packages))
def search_locations(self, fullname): def search_locations(self, fullname):
candidate_locations = [] candidate_locations = itertools.chain.from_iterable(
default_plugin_paths() if candidate is ...
def _get_package_paths(*root_paths, containing_folder='plugins'): else Path(candidate).iterdir()
for config_dir in orderedSet(map(Path, root_paths), lazy=True): for candidate in plugin_dirs.get((..., )))
plugin_dir = config_dir / containing_folder
if not plugin_dir.is_dir():
continue
yield from plugin_dir.iterdir()
# Load from yt-dlp config folders
candidate_locations.extend(_get_package_paths(
*get_user_config_dirs('yt-dlp'),
*get_system_config_dirs('yt-dlp'),
containing_folder='plugins'))
# Load from yt-dlp-plugins folders
candidate_locations.extend(_get_package_paths(
get_executable_path(),
*get_user_config_dirs(''),
*get_system_config_dirs(''),
containing_folder='yt-dlp-plugins'))
candidate_locations.extend(map(Path, sys.path)) # PYTHONPATH
parts = Path(*fullname.split('.')) parts = Path(*fullname.split('.'))
locations = set() locations = dict()
for path in dict.fromkeys(candidate_locations): for path in dict.fromkeys(candidate_locations):
candidate = path / parts candidate = path / parts
# print(candidate)
if candidate.is_dir(): if candidate.is_dir():
locations.add(str(candidate)) locations[candidate] = None
elif path.name and any(path.with_suffix(suffix).is_file() for suffix in {'.zip', '.egg', '.whl'}): elif path.name and any(path.with_suffix(suffix).is_file() for suffix in {'.zip', '.egg', '.whl'}):
with contextlib.suppress(FileNotFoundError): if parts in dirs_in_zip(path):
if parts in dirs_in_zip(path): locations[candidate] = None
locations.add(str(candidate))
return locations return list(map(str, locations))
def find_spec(self, fullname, path=None, target=None): def find_spec(self, fullname, path=None, target=None):
if fullname not in self.packages: if fullname not in self.packages:
@ -129,7 +164,9 @@ def load_module(module, module_name, suffix):
and obj.__name__ in getattr(module, '__all__', [obj.__name__]))) and obj.__name__ in getattr(module, '__all__', [obj.__name__])))
def load_plugins(name, suffix): def load_plugins(plugin_type: PluginType):
destination, plugin_destination = _plugin_type_lookup[plugin_type]
name, suffix = plugin_type.value
classes = {} classes = {}
for finder, module_name, _ in iter_modules(name): for finder, module_name, _ in iter_modules(name):
@ -154,17 +191,46 @@ def load_plugins(name, suffix):
# Compat: old plugin system using __init__.py # Compat: old plugin system using __init__.py
# Note: plugins imported this way do not show up in directories() # Note: plugins imported this way do not show up in directories()
# nor are considered part of the yt_dlp_plugins namespace package # nor are considered part of the yt_dlp_plugins namespace package
with contextlib.suppress(FileNotFoundError): if ... in plugin_dirs.get((..., )):
spec = importlib.util.spec_from_file_location( with contextlib.suppress(FileNotFoundError):
name, Path(get_executable_path(), COMPAT_PACKAGE_NAME, name, '__init__.py')) spec = importlib.util.spec_from_file_location(
plugins = importlib.util.module_from_spec(spec) name, Path(get_executable_path(), COMPAT_PACKAGE_NAME, name, '__init__.py'))
sys.modules[spec.name] = plugins plugins = importlib.util.module_from_spec(spec)
spec.loader.exec_module(plugins) sys.modules[spec.name] = plugins
classes.update(load_module(plugins, spec.name, suffix)) spec.loader.exec_module(plugins)
classes.update(load_module(plugins, spec.name, suffix))
# __init_subclass__ was removed so we manually add overrides
for name, klass in classes.items():
plugin_name = getattr(klass, '_plugin_name', None)
if not plugin_name:
continue
# FIXME: Most likely something wrong here
mro = inspect.getmro(klass)
super_class = klass.__wrapped__ = mro[mro.index(klass) + 1]
klass.PLUGIN_NAME, klass.ie_key = plugin_name, super_class.ie_key
klass.IE_NAME = f'{super_class.IE_NAME}+{plugin_name}'
while getattr(super_class, '__wrapped__', None):
super_class = super_class.__wrapped__
setattr(sys.modules[super_class.__module__], super_class.__name__, klass)
plugin_overrides.get()[super_class].append(klass)
# Add the classes into the global plugin lookup
plugin_destination.set(classes)
# We want to prepend to the main lookup
current = destination.get()
result = merge_dicts(classes, current)
destination.set(result)
return classes return classes
def load_all_plugin_types():
for plugin_type in PluginType:
load_plugins(plugin_type)
sys.meta_path.insert(0, PluginFinder(f'{PACKAGE_NAME}.extractor', f'{PACKAGE_NAME}.postprocessor')) sys.meta_path.insert(0, PluginFinder(f'{PACKAGE_NAME}.extractor', f'{PACKAGE_NAME}.postprocessor'))
__all__ = ['directories', 'load_plugins', 'PACKAGE_NAME', 'COMPAT_PACKAGE_NAME'] __all__ = ['directories', 'load_plugins', 'load_all_plugin_types', 'PACKAGE_NAME', 'COMPAT_PACKAGE_NAME']

View file

@ -33,15 +33,31 @@
from .sponskrub import SponSkrubPP from .sponskrub import SponSkrubPP
from .sponsorblock import SponsorBlockPP from .sponsorblock import SponsorBlockPP
from .xattrpp import XAttrMetadataPP from .xattrpp import XAttrMetadataPP
from ..plugins import load_plugins from ..globals import plugin_pps, postprocessors
from ..plugins import PACKAGE_NAME
from ..utils import deprecation_warning
_PLUGIN_CLASSES = load_plugins('postprocessor', 'PP')
def __getattr__(name):
lookup = plugin_pps.get()
if name in lookup:
deprecation_warning(
f'Importing a plugin Post-Processor from {__name__} is deprecated. '
f'Please import {PACKAGE_NAME}.postprocessor.{name} instead.')
return lookup[name]
raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
def get_postprocessor(key): def get_postprocessor(key):
return globals()[key + 'PP'] return postprocessors.get()[key + 'PP']
globals().update(_PLUGIN_CLASSES) _default_pps = {
__all__ = [name for name in globals().keys() if name.endswith('PP')] name: value
__all__.extend(('PostProcessor', 'FFmpegPostProcessor')) for name, value in globals().items()
if name.endswith('PP') or name in ('PostProcessor', 'FFmpegPostProcessor')
}
postprocessors.set(_default_pps)
__all__ = list(_default_pps.values())

View file

@ -56,6 +56,7 @@
compat_shlex_quote, compat_shlex_quote,
) )
from .dependencies import brotli, certifi, websockets, xattr from .dependencies import brotli, certifi, websockets, xattr
from .globals import IN_CLI
from .socks import ProxyType, sockssocket from .socks import ProxyType, sockssocket
@ -2053,8 +2054,7 @@ def write_string(s, out=None, encoding=None):
def deprecation_warning(msg, *, printer=None, stacklevel=0, **kwargs): def deprecation_warning(msg, *, printer=None, stacklevel=0, **kwargs):
from . import _IN_CLI if IN_CLI.get():
if _IN_CLI:
if msg in deprecation_warning._cache: if msg in deprecation_warning._cache:
return return
deprecation_warning._cache.add(msg) deprecation_warning._cache.add(msg)