diff --git a/django/apps/registry.py b/django/apps/registry.py index 464d69a89d..234a830fb9 100644 --- a/django/apps/registry.py +++ b/django/apps/registry.py @@ -42,6 +42,8 @@ class Apps: # Whether the registry is populated. self.apps_ready = self.models_ready = self.ready = False + # For the autoreloader. + self.ready_event = threading.Event() # Lock for thread-safe population. self._lock = threading.RLock() @@ -120,6 +122,7 @@ class Apps: app_config.ready() self.ready = True + self.ready_event.set() def check_apps_ready(self): """Raise an exception if all apps haven't been imported yet.""" diff --git a/django/core/management/commands/runserver.py b/django/core/management/commands/runserver.py index 69a94adbec..862da12c64 100644 --- a/django/core/management/commands/runserver.py +++ b/django/core/management/commands/runserver.py @@ -99,7 +99,7 @@ class Command(BaseCommand): use_reloader = options['use_reloader'] if use_reloader: - autoreload.main(self.inner_run, None, options) + autoreload.run_with_reloader(self.inner_run, **options) else: self.inner_run(None, **options) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index b2671d0819..a20aa0bd4a 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -264,9 +264,11 @@ class StateApps(Apps): app_configs = [AppConfigStub(label) for label in sorted([*real_apps, *app_labels])] super().__init__(app_configs) - # The lock gets in the way of copying as implemented in clone(), which - # is called whenever Django duplicates a StateApps before updating it. + # These locks get in the way of copying as implemented in clone(), + # which is called whenever Django duplicates a StateApps before + # updating it. self._lock = None + self.ready_event = None self.render_multiple([*models.values(), *self.real_models]) diff --git a/django/utils/autoreload.py b/django/utils/autoreload.py index abee72e485..708205917a 100644 --- a/django/utils/autoreload.py +++ b/django/utils/autoreload.py @@ -1,224 +1,52 @@ -# Autoreloading launcher. -# Borrowed from Peter Hunt and the CherryPy project (https://cherrypy.org/). -# Some taken from Ian Bicking's Paste (http://pythonpaste.org/). -# -# Portions copyright (c) 2004, CherryPy Team (team@cherrypy.org) -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * Neither the name of the CherryPy Team nor the names of its contributors -# may be used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +import functools +import itertools +import logging import os +import pathlib import signal import subprocess import sys +import threading import time import traceback - -import _thread +from collections import defaultdict +from pathlib import Path +from types import ModuleType +from zipimport import zipimporter from django.apps import apps -from django.conf import settings from django.core.signals import request_finished +from django.dispatch import Signal +from django.utils.functional import cached_property +from django.utils.version import get_version_tuple -# This import does nothing, but it's necessary to avoid some race conditions -# in the threading module. See https://code.djangoproject.com/ticket/2330 . -try: - import threading # NOQA -except ImportError: - pass +autoreload_started = Signal() +file_changed = Signal(providing_args=['file_path', 'kind']) + +DJANGO_AUTORELOAD_ENV = 'RUN_MAIN' + +logger = logging.getLogger('django.utils.autoreload') + +# If an error is raised while importing a file, it's not placed in sys.modules. +# This means that any future modifications aren't caught. Keep a list of these +# file paths to allow watching them in the future. +_error_files = [] +_exception = None try: import termios except ImportError: termios = None -USE_INOTIFY = False + try: - # Test whether inotify is enabled and likely to work - import pyinotify - - fd = pyinotify.INotifyWrapper.create().inotify_init() - if fd >= 0: - USE_INOTIFY = True - os.close(fd) + import pywatchman except ImportError: - pass - -RUN_RELOADER = True - -FILE_MODIFIED = 1 -I18N_MODIFIED = 2 - -_mtimes = {} -_win = (sys.platform == "win32") - -_exception = None -_error_files = [] -_cached_modules = set() -_cached_filenames = [] - - -def gen_filenames(only_new=False): - """ - Return a list of filenames referenced in sys.modules and translation files. - """ - # N.B. ``list(...)`` is needed, because this runs in parallel with - # application code which might be mutating ``sys.modules``, and this will - # fail with RuntimeError: cannot mutate dictionary while iterating - global _cached_modules, _cached_filenames - module_values = set(sys.modules.values()) - _cached_filenames = clean_files(_cached_filenames) - if _cached_modules == module_values: - # No changes in module list, short-circuit the function - if only_new: - return [] - else: - return _cached_filenames + clean_files(_error_files) - - new_modules = module_values - _cached_modules - new_filenames = clean_files( - [filename.__file__ for filename in new_modules - if hasattr(filename, '__file__')]) - - if not _cached_filenames and settings.USE_I18N: - # Add the names of the .mo files that can be generated - # by compilemessages management command to the list of files watched. - basedirs = [os.path.join(os.path.dirname(os.path.dirname(__file__)), - 'conf', 'locale'), - 'locale'] - for app_config in reversed(list(apps.get_app_configs())): - basedirs.append(os.path.join(app_config.path, 'locale')) - basedirs.extend(settings.LOCALE_PATHS) - basedirs = [os.path.abspath(basedir) for basedir in basedirs - if os.path.isdir(basedir)] - for basedir in basedirs: - for dirpath, dirnames, locale_filenames in os.walk(basedir): - for filename in locale_filenames: - if filename.endswith('.mo'): - new_filenames.append(os.path.join(dirpath, filename)) - - _cached_modules = _cached_modules.union(new_modules) - _cached_filenames += new_filenames - if only_new: - return new_filenames + clean_files(_error_files) - else: - return _cached_filenames + clean_files(_error_files) - - -def clean_files(filelist): - filenames = [] - for filename in filelist: - if not filename: - continue - if filename.endswith(".pyc") or filename.endswith(".pyo"): - filename = filename[:-1] - if filename.endswith("$py.class"): - filename = filename[:-9] + ".py" - if os.path.exists(filename): - filenames.append(filename) - return filenames - - -def reset_translations(): - import gettext - from django.utils.translation import trans_real - gettext._translations = {} - trans_real._translations = {} - trans_real._default = None - trans_real._active = threading.local() - - -def inotify_code_changed(): - """ - Check for changed code using inotify. After being called - it blocks until a change event has been fired. - """ - class EventHandler(pyinotify.ProcessEvent): - modified_code = None - - def process_default(self, event): - if event.path.endswith('.mo'): - EventHandler.modified_code = I18N_MODIFIED - else: - EventHandler.modified_code = FILE_MODIFIED - - wm = pyinotify.WatchManager() - notifier = pyinotify.Notifier(wm, EventHandler()) - - def update_watch(sender=None, **kwargs): - if sender and getattr(sender, 'handles_files', False): - # No need to update watches when request serves files. - # (sender is supposed to be a django.core.handlers.BaseHandler subclass) - return - mask = ( - pyinotify.IN_MODIFY | - pyinotify.IN_DELETE | - pyinotify.IN_ATTRIB | - pyinotify.IN_MOVED_FROM | - pyinotify.IN_MOVED_TO | - pyinotify.IN_CREATE | - pyinotify.IN_DELETE_SELF | - pyinotify.IN_MOVE_SELF - ) - for path in gen_filenames(only_new=True): - wm.add_watch(path, mask) - - # New modules may get imported when a request is processed. - request_finished.connect(update_watch) - - # Block until an event happens. - update_watch() - notifier.check_events(timeout=None) - notifier.read_events() - notifier.process_events() - notifier.stop() - - # If we are here the code must have changed. - return EventHandler.modified_code - - -def code_changed(): - global _mtimes, _win - for filename in gen_filenames(): - stat = os.stat(filename) - mtime = stat.st_mtime - if _win: - mtime -= stat.st_ctime - if filename not in _mtimes: - _mtimes[filename] = mtime - continue - if mtime != _mtimes[filename]: - _mtimes = {} - try: - del _error_files[_error_files.index(filename)] - except ValueError: - pass - return I18N_MODIFIED if filename.endswith('.mo') else FILE_MODIFIED - return False + pywatchman = None def check_errors(fn): + @functools.wraps(fn) def wrapper(*args, **kwargs): global _exception try: @@ -245,7 +73,7 @@ def check_errors(fn): def raise_last_exception(): global _exception if _exception is not None: - raise _exception[1] + raise _exception[0](_exception[1]).with_traceback(_exception[2]) def ensure_echo_on(): @@ -264,60 +92,496 @@ def ensure_echo_on(): signal.signal(signal.SIGTTOU, old_handler) -def reloader_thread(): - ensure_echo_on() - if USE_INOTIFY: - fn = inotify_code_changed +def iter_all_python_module_files(): + # This is a hot path during reloading. Create a stable sorted list of + # modules based on the module name and pass it to iter_modules_and_files(). + # This ensures cached results are returned in the usual case that modules + # aren't loaded on the fly. + modules_view = sorted(list(sys.modules.items()), key=lambda i: i[0]) + modules = tuple(m[1] for m in modules_view) + return iter_modules_and_files(modules, frozenset(_error_files)) + + +@functools.lru_cache(maxsize=1) +def iter_modules_and_files(modules, extra_files): + """Iterate through all modules needed to be watched.""" + sys_file_paths = [] + for module in modules: + # During debugging (with PyDev) the 'typing.io' and 'typing.re' objects + # are added to sys.modules, however they are types not modules and so + # cause issues here. + if not isinstance(module, ModuleType) or module.__spec__ is None: + continue + spec = module.__spec__ + # Modules could be loaded from places without a concrete location. If + # this is the case, skip them. + if spec.has_location: + origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin + sys_file_paths.append(origin) + + results = set() + for filename in itertools.chain(sys_file_paths, extra_files): + if not filename: + continue + path = pathlib.Path(filename) + if not path.exists(): + # The module could have been removed, don't fail loudly if this + # is the case. + continue + results.add(path.resolve().absolute()) + return frozenset(results) + + +@functools.lru_cache(maxsize=1) +def common_roots(paths): + """ + Return a tuple of common roots that are shared between the given paths. + File system watchers operate on directories and aren't cheap to create. + Try to find the minimum set of directories to watch that encompass all of + the files that need to be watched. + """ + # Inspired from Werkzeug: + # https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py + # Create a sorted list of the path components, longest first. + path_parts = sorted([x.parts for x in paths], key=len, reverse=True) + tree = {} + for chunks in path_parts: + node = tree + # Add each part of the path to the tree. + for chunk in chunks: + node = node.setdefault(chunk, {}) + # Clear the last leaf in the tree. + node.clear() + + # Turn the tree into a list of Path instances. + def _walk(node, path): + for prefix, child in node.items(): + yield from _walk(child, path + (prefix,)) + if not node: + yield Path(*path) + + return tuple(_walk(tree, ())) + + +def sys_path_directories(): + """ + Yield absolute directories from sys.path, ignoring entries that don't + exist. + """ + for path in sys.path: + path = Path(path) + if not path.exists(): + continue + path = path.resolve().absolute() + # If the path is a file (like a zip file), watch the parent directory. + if path.is_file(): + yield path.parent + else: + yield path + + +def get_child_arguments(): + """ + Return the executable. This contains a workaround for Windows if the + executable is reported to not have the .exe extension which can cause bugs + on reloading. + """ + import django.__main__ + + args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] + if sys.argv[0] == django.__main__.__file__: + # The server was started with `python -m django runserver`. + args += ['-m', 'django'] + args += sys.argv[1:] else: - fn = code_changed - while RUN_RELOADER: - change = fn() - if change == FILE_MODIFIED: - sys.exit(3) # force reload - elif change == I18N_MODIFIED: - reset_translations() - time.sleep(1) + args += sys.argv + return args + + +def trigger_reload(filename): + logger.info('%s changed, reloading.', filename) + sys.exit(3) def restart_with_reloader(): - import django.__main__ + new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: 'true'} + args = get_child_arguments() while True: - args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] - if sys.argv[0] == django.__main__.__file__: - # The server was started with `python -m django runserver`. - args += ['-m', 'django'] - args += sys.argv[1:] - else: - args += sys.argv - new_environ = {**os.environ, 'RUN_MAIN': 'true'} - exit_code = subprocess.call(args, env=new_environ) + exit_code = subprocess.call(args, env=new_environ, close_fds=False) if exit_code != 3: return exit_code -def python_reloader(main_func, args, kwargs): - if os.environ.get("RUN_MAIN") == "true": - _thread.start_new_thread(main_func, args, kwargs) +class BaseReloader: + def __init__(self): + self.extra_files = set() + self.directory_globs = defaultdict(set) + self._stop_condition = threading.Event() + + def watch_dir(self, path, glob): + path = Path(path) + if not path.is_absolute(): + raise ValueError('%s must be absolute.' % path) + logger.debug('Watching dir %s with glob %s.', path, glob) + self.directory_globs[path].add(glob) + + def watch_file(self, path): + path = Path(path) + if not path.is_absolute(): + raise ValueError('%s must be absolute.' % path) + logger.debug('Watching file %s.', path) + self.extra_files.add(path) + + def watched_files(self, include_globs=True): + """ + Yield all files that need to be watched, including module files and + files within globs. + """ + yield from iter_all_python_module_files() + yield from self.extra_files + if include_globs: + for directory, patterns in self.directory_globs.items(): + for pattern in patterns: + yield from directory.glob(pattern) + + def wait_for_apps_ready(self, app_reg, django_main_thread): + """ + Wait until Django reports that the apps have been loaded. If the given + thread has terminated before the apps are ready, then a SyntaxError or + other non-recoverable error has been raised. In that case, stop waiting + for the apps_ready event and continue processing. + + Return True if the thread is alive and the ready event has been + triggered, or False if the thread is terminated while waiting for the + event. + """ + while django_main_thread.is_alive(): + if app_reg.ready_event.wait(timeout=0.1): + return True + else: + logger.debug('Main Django thread has terminated before apps are ready.') + return False + + def run(self, django_main_thread): + logger.debug('Waiting for apps ready_event.') + self.wait_for_apps_ready(apps, django_main_thread) + from django.urls import get_resolver + # Prevent a race condition where URL modules aren't loaded when the + # reloader starts by accessing the urlconf_module property. + get_resolver().urlconf_module + logger.debug('Apps ready_event triggered. Sending autoreload_started signal.') + autoreload_started.send(sender=self) + self.run_loop() + + def run_loop(self): + ticker = self.tick() + while not self.should_stop: + try: + next(ticker) + except StopIteration: + break + self.stop() + + def tick(self): + """ + This generator is called in a loop from run_loop. It's important that + the method takes care of pausing or otherwise waiting for a period of + time. This split between run_loop() and tick() is to improve the + testability of the reloader implementations by decoupling the work they + do from the loop. + """ + raise NotImplementedError('subclasses must implement tick().') + + @classmethod + def check_availability(cls): + raise NotImplementedError('subclasses must implement check_availability().') + + def notify_file_changed(self, path): + results = file_changed.send(sender=self, file_path=path) + logger.debug('%s notified as changed. Signal results: %s.', path, results) + if not any(res[1] for res in results): + trigger_reload(path) + + # These are primarily used for testing. + @property + def should_stop(self): + return self._stop_condition.is_set() + + def stop(self): + self._stop_condition.set() + + +class StatReloader(BaseReloader): + SLEEP_TIME = 1 # Check for changes once per second. + + def tick(self): + state, previous_timestamp = {}, time.time() + while True: + state.update(self.loop_files(state, previous_timestamp)) + previous_timestamp = time.time() + time.sleep(self.SLEEP_TIME) + yield + + def loop_files(self, previous_times, previous_timestamp): + updated_times = {} + for path, mtime in self.snapshot_files(): + previous_time = previous_times.get(path) + # If there are overlapping globs, a file may be iterated twice. + if path in updated_times: + continue + # A new file has been detected. This could happen due to it being + # imported at runtime and only being polled now, or because the + # file was just created. Compare the file's mtime to the + # previous_timestamp and send a notification if it was created + # since the last poll. + is_newly_created = previous_time is None and mtime > previous_timestamp + is_changed = previous_time is not None and previous_time != mtime + if is_newly_created or is_changed: + logger.debug('File %s. is_changed: %s, is_new: %s', path, is_changed, is_newly_created) + logger.debug('File %s previous mtime: %s, current mtime: %s', path, previous_time, mtime) + self.notify_file_changed(path) + updated_times[path] = mtime + return updated_times + + def snapshot_files(self): + for file in self.watched_files(): + try: + mtime = file.stat().st_mtime + except OSError: + # This is thrown when the file does not exist. + continue + yield file, mtime + + @classmethod + def check_availability(cls): + return True + + +class WatchmanUnavailable(RuntimeError): + pass + + +class WatchmanReloader(BaseReloader): + def __init__(self): + self.roots = defaultdict(set) + self.processed_request = threading.Event() + super().__init__() + + @cached_property + def client(self): + return pywatchman.client() + + def _watch_root(self, root): + # In practice this shouldn't occur, however, it's possible that a + # directory that doesn't exist yet is being watched. If it's outside of + # sys.path then this will end up a new root. How to handle this isn't + # clear: Not adding the root will likely break when subscribing to the + # changes, however, as this is currently an internal API, no files + # will be being watched outside of sys.path. Fixing this by checking + # inside watch_glob() and watch_dir() is expensive, instead this could + # could fall back to the StatReloader if this case is detected? For + # now, watching its parent, if possible, is sufficient. + if not root.exists(): + if not root.parent.exists(): + logger.warning('Unable to watch root dir %s as neither it or its parent exist.', root) + return + root = root.parent + result = self.client.query('watch-project', str(root.absolute())) + if 'warning' in result: + logger.warning('Watchman warning: %s', result['warning']) + logger.debug('Watchman watch-project result: %s', result) + return result['watch'], result.get('relative_path') + + @functools.lru_cache() + def _get_clock(self, root): + return self.client.query('clock', root)['clock'] + + def _subscribe(self, directory, name, expression): + root, rel_path = self._watch_root(directory) + query = { + 'expression': expression, + 'fields': ['name'], + 'since': self._get_clock(root), + 'dedup_results': True, + } + if rel_path: + query['relative_root'] = rel_path + logger.debug('Issuing watchman subscription %s, for root %s. Query: %s', name, root, query) + self.client.query('subscribe', root, name, query) + + def _subscribe_dir(self, directory, filenames): + if not directory.exists(): + if not directory.parent.exists(): + logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) + return + prefix = 'files-parent-%s' % directory.name + filenames = ['%s/%s' % (directory.name, filename) for filename in filenames] + directory = directory.parent + expression = ['name', filenames, 'wholename'] + else: + prefix = 'files' + expression = ['name', filenames] + self._subscribe(directory, '%s:%s' % (prefix, directory), expression) + + def _watch_glob(self, directory, patterns): + """ + Watch a directory with a specific glob. If the directory doesn't yet + exist, attempt to watch the parent directory and amend the patterns to + include this. It's important this method isn't called more than one per + directory when updating all subscriptions. Subsequent calls will + overwrite the named subscription, so it must include all possible glob + expressions. + """ + prefix = 'glob' + if not directory.exists(): + if not directory.parent.exists(): + logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory) + return + prefix = 'glob-parent-%s' % directory.name + patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns] + directory = directory.parent + + expression = ['anyof'] + for pattern in patterns: + expression.append(['match', pattern, 'wholename']) + self._subscribe(directory, '%s:%s' % (prefix, directory), expression) + + def watched_roots(self, watched_files): + extra_directories = self.directory_globs.keys() + watched_file_dirs = [f.parent for f in watched_files] + sys_paths = list(sys_path_directories()) + return frozenset((*extra_directories, *watched_file_dirs, *sys_paths)) + + def _update_watches(self): + watched_files = list(self.watched_files(include_globs=False)) + found_roots = common_roots(self.watched_roots(watched_files)) + logger.debug('Watching %s files', len(watched_files)) + logger.debug('Found common roots: %s', found_roots) + # Setup initial roots for performance, shortest roots first. + for root in sorted(found_roots): + self._watch_root(root) + for directory, patterns in self.directory_globs.items(): + self._watch_glob(directory, patterns) + # Group sorted watched_files by their parent directory. + sorted_files = sorted(watched_files, key=lambda p: p.parent) + for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent): + # These paths need to be relative to the parent directory. + self._subscribe_dir(directory, [str(p.relative_to(directory)) for p in group]) + + def update_watches(self): try: - reloader_thread() - except KeyboardInterrupt: - pass - else: - try: - exit_code = restart_with_reloader() - if exit_code < 0: - os.kill(os.getpid(), -exit_code) + self._update_watches() + except Exception as ex: + # If the service is still available, raise the original exception. + if self.check_server_status(ex): + raise + + def _check_subscription(self, sub): + subscription = self.client.getSubscription(sub) + if not subscription: + return + logger.debug('Watchman subscription %s has results.', sub) + for result in subscription: + # When using watch-project, it's not simple to get the relative + # directory without storing some specific state. Store the full + # path to the directory in the subscription name, prefixed by its + # type (glob, files). + root_directory = Path(result['subscription'].split(':', 1)[1]) + logger.debug('Found root directory %s', root_directory) + for file in result.get('files', []): + self.notify_file_changed(root_directory / file) + + def request_processed(self, **kwargs): + logger.debug('Request processed. Setting update_watches event.') + self.processed_request.set() + + def tick(self): + request_finished.connect(self.request_processed) + self.update_watches() + while True: + if self.processed_request.is_set(): + self.update_watches() + self.processed_request.clear() + try: + self.client.receive() + except pywatchman.WatchmanError as ex: + self.check_server_status(ex) else: - sys.exit(exit_code) - except KeyboardInterrupt: - pass + for sub in list(self.client.subs.keys()): + self._check_subscription(sub) + yield + + def stop(self): + self.client.close() + super().stop() + + def check_server_status(self, inner_ex=None): + """Return True if the server is available.""" + try: + self.client.query('version') + except Exception: + raise WatchmanUnavailable(str(inner_ex)) from inner_ex + return True + + @classmethod + def check_availability(cls): + if not pywatchman: + raise WatchmanUnavailable('pywatchman not installed.') + client = pywatchman.client(timeout=0.01) + try: + result = client.capabilityCheck() + except Exception: + # The service is down? + raise WatchmanUnavailable('Cannot connect to the watchman service.') + version = get_version_tuple(result['version']) + # Watchman 4.9 includes multiple improvements to watching project + # directories as well as case insensitive filesystems. + logger.debug('Watchman version %s', version) + if version < (4, 9): + raise WatchmanUnavailable('Watchman 4.9 or later is required.') -def main(main_func, args=None, kwargs=None): - if args is None: - args = () - if kwargs is None: - kwargs = {} +def get_reloader(): + """Return the most suitable reloader for this environment.""" + try: + WatchmanReloader.check_availability() + except WatchmanUnavailable: + return StatReloader() + return WatchmanReloader() - wrapped_main_func = check_errors(main_func) - python_reloader(wrapped_main_func, args, kwargs) + +def start_django(reloader, main_func, *args, **kwargs): + ensure_echo_on() + + main_func = check_errors(main_func) + django_main_thread = threading.Thread(target=main_func, args=args, kwargs=kwargs) + django_main_thread.setDaemon(True) + django_main_thread.start() + + while not reloader.should_stop: + try: + reloader.run(django_main_thread) + except WatchmanUnavailable as ex: + # It's possible that the watchman service shuts down or otherwise + # becomes unavailable. In that case, use the StatReloader. + reloader = StatReloader() + logger.error('Error connecting to Watchman: %s', ex) + logger.info('Watching for file changes with %s', reloader.__class__.__name__) + + +def run_with_reloader(main_func, *args, **kwargs): + signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) + try: + if os.environ.get(DJANGO_AUTORELOAD_ENV) == 'true': + reloader = get_reloader() + logger.info('Watching for file changes with %s', reloader.__class__.__name__) + start_django(reloader, main_func, *args, **kwargs) + else: + try: + WatchmanReloader.check_availability() + except WatchmanUnavailable as e: + logger.info('Watchman unavailable: %s.', e) + exit_code = restart_with_reloader() + sys.exit(exit_code) + except KeyboardInterrupt: + pass diff --git a/django/utils/translation/__init__.py b/django/utils/translation/__init__.py index b1df722428..955a038109 100644 --- a/django/utils/translation/__init__.py +++ b/django/utils/translation/__init__.py @@ -4,6 +4,7 @@ Internationalization support. import re from contextlib import ContextDecorator +from django.utils.autoreload import autoreload_started, file_changed from django.utils.functional import lazy __all__ = [ @@ -52,6 +53,9 @@ class Trans: from django.conf import settings if settings.USE_I18N: from django.utils.translation import trans_real as trans + from django.utils.translation.reloader import watch_for_translation_changes, translation_file_changed + autoreload_started.connect(watch_for_translation_changes, dispatch_uid='translation_file_changed') + file_changed.connect(translation_file_changed, dispatch_uid='translation_file_changed') else: from django.utils.translation import trans_null as trans setattr(self, real_name, getattr(trans, real_name)) diff --git a/django/utils/translation/reloader.py b/django/utils/translation/reloader.py new file mode 100644 index 0000000000..8e2d320208 --- /dev/null +++ b/django/utils/translation/reloader.py @@ -0,0 +1,29 @@ +import threading +from pathlib import Path + +from django.apps import apps + + +def watch_for_translation_changes(sender, **kwargs): + """Register file watchers for .mo files in potential locale paths.""" + from django.conf import settings + + if settings.USE_I18N: + directories = [Path('locale')] + directories.extend(Path(config.path) / 'locale' for config in apps.get_app_configs()) + directories.extend(Path(p) for p in settings.LOCALE_PATHS) + for path in directories: + absolute_path = path.absolute() + sender.watch_dir(absolute_path, '**/*.mo') + + +def translation_file_changed(sender, file_path, **kwargs): + """Clear the internal translations cache if a .mo file is modified.""" + if file_path.suffix == '.mo': + import gettext + from django.utils.translation import trans_real + gettext._translations = {} + trans_real._translations = {} + trans_real._default = None + trans_real._active = threading.local() + return True diff --git a/docs/internals/contributing/writing-code/unit-tests.txt b/docs/internals/contributing/writing-code/unit-tests.txt index c3af1cd02f..3480e3f2ca 100644 --- a/docs/internals/contributing/writing-code/unit-tests.txt +++ b/docs/internals/contributing/writing-code/unit-tests.txt @@ -229,6 +229,7 @@ dependencies: * Pillow_ * PyYAML_ * pytz_ (required) +* pywatchman_ * setuptools_ * memcached_, plus a :ref:`supported Python binding ` * gettext_ (:ref:`gettext_on_windows`) @@ -258,6 +259,9 @@ and install the Geospatial libraries`. Each of these dependencies is optional. If you're missing any of them, the associated tests will be skipped. +To run some of the autoreload tests, you'll need to install the Watchman_ +service. + .. _argon2-cffi: https://pypi.org/project/argon2_cffi/ .. _bcrypt: https://pypi.org/project/bcrypt/ .. _docutils: https://pypi.org/project/docutils/ @@ -267,12 +271,14 @@ associated tests will be skipped. .. _Pillow: https://pypi.org/project/Pillow/ .. _PyYAML: https://pyyaml.org/wiki/PyYAML .. _pytz: https://pypi.org/project/pytz/ +.. _pywatchman: https://pypi.org/project/pywatchman/ .. _setuptools: https://pypi.org/project/setuptools/ .. _memcached: https://memcached.org/ .. _gettext: https://www.gnu.org/software/gettext/manual/gettext.html .. _selenium: https://pypi.org/project/selenium/ .. _sqlparse: https://pypi.org/project/sqlparse/ .. _pip requirements files: https://pip.pypa.io/en/latest/user_guide/#requirements-files +.. _Watchman: https://facebook.github.io/watchman/ Code coverage ------------- diff --git a/docs/ref/django-admin.txt b/docs/ref/django-admin.txt index e8371897e2..22a5c4dbac 100644 --- a/docs/ref/django-admin.txt +++ b/docs/ref/django-admin.txt @@ -879,13 +879,26 @@ needed. You don't need to restart the server for code changes to take effect. However, some actions like adding files don't trigger a restart, so you'll have to restart the server in these cases. -If you are using Linux and install `pyinotify`_, kernel signals will be used to -autoreload the server (rather than polling file modification timestamps each -second). This offers better scaling to large projects, reduction in response -time to code modification, more robust change detection, and battery usage -reduction. +If you're using Linux or MacOS and install both `pywatchman`_ and the +`Watchman`_ service, kernel signals will be used to autoreload the server +(rather than polling file modification timestamps each second). This offers +better performance on large projects, reduced response time after code changes, +more robust change detection, and a reduction in power usage. -.. _pyinotify: https://pypi.org/project/pyinotify/ +.. admonition:: Large directories with many files may cause performance issues + + When using Watchman with a project that includes large non-Python + directories like ``node_modules``, it's advisable to ignore this directory + for optimal performance. See the `watchman documentation`_ for information + on how to do this. + +.. _Watchman: https://facebook.github.io/watchman/ +.. _pywatchman: https://pypi.org/project/pywatchman/ +.. _watchman documentation: https://facebook.github.io/watchman/docs/config.html#ignore_dirs + +.. versionchanged:: 2.2 + + Watchman support replaced support for `pyinotify`. When you start the server, and each time you change Python code while the server is running, the system check framework will check your entire Django diff --git a/docs/releases/2.2.txt b/docs/releases/2.2.txt index 45d436ebe1..13f7617888 100644 --- a/docs/releases/2.2.txt +++ b/docs/releases/2.2.txt @@ -203,6 +203,10 @@ Management Commands comments in generated migration file(s). This option is also available for :djadmin:`squashmigrations`. +* :djadmin:`runserver` can now use `Watchman + `_ to improve the performance of + watching a large number of files for changes. + Migrations ~~~~~~~~~~ @@ -487,6 +491,8 @@ Miscellaneous :func:`~django.contrib.sitemaps.ping_google` function, set the new ``sitemap_uses_https`` argument to ``False``. +* :djadmin:`runserver` no longer supports `pyinotify` (replaced by Watchman). + .. _deprecated-features-2.2: Features deprecated in 2.2 diff --git a/tests/apps/tests.py b/tests/apps/tests.py index cd22a4d45c..566aec60c3 100644 --- a/tests/apps/tests.py +++ b/tests/apps/tests.py @@ -48,6 +48,9 @@ class AppsTests(SimpleTestCase): self.assertIs(apps.ready, True) # Non-master app registries are populated in __init__. self.assertIs(Apps().ready, True) + # The condition is set when apps are ready + self.assertIs(apps.ready_event.is_set(), True) + self.assertIs(Apps().ready_event.is_set(), True) def test_bad_app_config(self): """ diff --git a/tests/i18n/tests.py b/tests/i18n/tests.py index 7b54089cf2..2377c8992e 100644 --- a/tests/i18n/tests.py +++ b/tests/i18n/tests.py @@ -7,9 +7,12 @@ import re import tempfile from contextlib import contextmanager from importlib import import_module +from pathlib import Path from threading import local from unittest import mock +import _thread + from django import forms from django.apps import AppConfig from django.conf import settings @@ -33,6 +36,9 @@ from django.utils.translation import ( npgettext, npgettext_lazy, pgettext, to_language, to_locale, trans_null, trans_real, ugettext, ugettext_lazy, ungettext, ungettext_lazy, ) +from django.utils.translation.reloader import ( + translation_file_changed, watch_for_translation_changes, +) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel @@ -1790,3 +1796,65 @@ class NonDjangoLanguageTests(SimpleTestCase): def test_plural_non_django_language(self): self.assertEqual(get_language(), 'xyz') self.assertEqual(ngettext('year', 'years', 2), 'years') + + +@override_settings(USE_I18N=True) +class WatchForTranslationChangesTests(SimpleTestCase): + @override_settings(USE_I18N=False) + def test_i18n_disabled(self): + mocked_sender = mock.MagicMock() + watch_for_translation_changes(mocked_sender) + mocked_sender.watch_dir.assert_not_called() + + def test_i18n_enabled(self): + mocked_sender = mock.MagicMock() + watch_for_translation_changes(mocked_sender) + self.assertGreater(mocked_sender.watch_dir.call_count, 1) + + def test_i18n_locale_paths(self): + mocked_sender = mock.MagicMock() + with tempfile.TemporaryDirectory() as app_dir: + with self.settings(LOCALE_PATHS=[app_dir]): + watch_for_translation_changes(mocked_sender) + mocked_sender.watch_dir.assert_any_call(Path(app_dir), '**/*.mo') + + def test_i18n_app_dirs(self): + mocked_sender = mock.MagicMock() + with self.settings(INSTALLED_APPS=['tests.i18n.sampleproject']): + watch_for_translation_changes(mocked_sender) + project_dir = Path(__file__).parent / 'sampleproject' / 'locale' + mocked_sender.watch_dir.assert_any_call(project_dir, '**/*.mo') + + def test_i18n_local_locale(self): + mocked_sender = mock.MagicMock() + watch_for_translation_changes(mocked_sender) + locale_dir = Path(__file__).parent / 'locale' + mocked_sender.watch_dir.assert_any_call(locale_dir, '**/*.mo') + + +class TranslationFileChangedTests(SimpleTestCase): + def setUp(self): + self.gettext_translations = gettext_module._translations.copy() + self.trans_real_translations = trans_real._translations.copy() + + def tearDown(self): + gettext._translations = self.gettext_translations + trans_real._translations = self.trans_real_translations + + def test_ignores_non_mo_files(self): + gettext_module._translations = {'foo': 'bar'} + path = Path('test.py') + self.assertIsNone(translation_file_changed(None, path)) + self.assertEqual(gettext_module._translations, {'foo': 'bar'}) + + def test_resets_cache_with_mo_files(self): + gettext_module._translations = {'foo': 'bar'} + trans_real._translations = {'foo': 'bar'} + trans_real._default = 1 + trans_real._active = False + path = Path('test.mo') + self.assertIs(translation_file_changed(None, path), True) + self.assertEqual(gettext_module._translations, {}) + self.assertEqual(trans_real._translations, {}) + self.assertIsNone(trans_real._default) + self.assertIsInstance(trans_real._active, _thread._local) diff --git a/tests/requirements/py3.txt b/tests/requirements/py3.txt index cc84522ca0..3f0a01e164 100644 --- a/tests/requirements/py3.txt +++ b/tests/requirements/py3.txt @@ -9,6 +9,7 @@ Pillow != 5.4.0 pylibmc; sys.platform != 'win32' python-memcached >= 1.59 pytz +pywatchman; sys.platform != 'win32' PyYAML selenium sqlparse diff --git a/tests/utils_tests/locale/nl/LC_MESSAGES/django.mo b/tests/utils_tests/locale/nl/LC_MESSAGES/django.mo deleted file mode 100644 index 3ead8f2a31..0000000000 Binary files a/tests/utils_tests/locale/nl/LC_MESSAGES/django.mo and /dev/null differ diff --git a/tests/utils_tests/locale/nl/LC_MESSAGES/django.po b/tests/utils_tests/locale/nl/LC_MESSAGES/django.po deleted file mode 100644 index 6633f12b39..0000000000 --- a/tests/utils_tests/locale/nl/LC_MESSAGES/django.po +++ /dev/null @@ -1,17 +0,0 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER -# This file is distributed under the same license as the PACKAGE package. -# FIRST AUTHOR , YEAR. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: PACKAGE VERSION\n" -"Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2007-09-15 19:15+0200\n" -"PO-Revision-Date: 2010-05-12 12:41-0300\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" diff --git a/tests/utils_tests/test_autoreload.py b/tests/utils_tests/test_autoreload.py index 486d62cd18..6aa272dd9a 100644 --- a/tests/utils_tests/test_autoreload.py +++ b/tests/utils_tests/test_autoreload.py @@ -1,257 +1,279 @@ -import gettext +import contextlib import os +import py_compile import shutil +import sys import tempfile +import threading +import time +import zipfile from importlib import import_module -from unittest import mock +from pathlib import Path +from unittest import mock, skip -import _thread - -from django import conf -from django.contrib import admin -from django.test import SimpleTestCase, override_settings +from django.apps.registry import Apps +from django.test import SimpleTestCase from django.test.utils import extend_sys_path from django.utils import autoreload -from django.utils.translation import trans_real - -LOCALE_PATH = os.path.join(os.path.dirname(__file__), 'locale') +from django.utils.autoreload import WatchmanUnavailable -class TestFilenameGenerator(SimpleTestCase): +class TestIterModulesAndFiles(SimpleTestCase): + def import_and_cleanup(self, name): + import_module(name) + self.addCleanup(lambda: sys.path_importer_cache.clear()) + self.addCleanup(lambda: sys.modules.pop(name, None)) def clear_autoreload_caches(self): - autoreload._cached_modules = set() - autoreload._cached_filenames = [] + autoreload.iter_modules_and_files.cache_clear() def assertFileFound(self, filename): + # Some temp directories are symlinks. Python resolves these fully while + # importing. + resolved_filename = filename.resolve() self.clear_autoreload_caches() # Test uncached access - self.assertIn(filename, autoreload.gen_filenames()) + self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access - self.assertIn(filename, autoreload.gen_filenames()) + self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) + self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) def assertFileNotFound(self, filename): + resolved_filename = filename.resolve() self.clear_autoreload_caches() # Test uncached access - self.assertNotIn(filename, autoreload.gen_filenames()) + self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access - self.assertNotIn(filename, autoreload.gen_filenames()) + self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) + self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) - def assertFileFoundOnlyNew(self, filename): - self.clear_autoreload_caches() - # Test uncached access - self.assertIn(filename, autoreload.gen_filenames(only_new=True)) - # Test cached access - self.assertNotIn(filename, autoreload.gen_filenames(only_new=True)) - - def test_django_locales(self): - """ - gen_filenames() yields the built-in Django locale files. - """ - django_dir = os.path.join(os.path.dirname(conf.__file__), 'locale') - django_mo = os.path.join(django_dir, 'nl', 'LC_MESSAGES', 'django.mo') - self.assertFileFound(django_mo) - - @override_settings(LOCALE_PATHS=[LOCALE_PATH]) - def test_locale_paths_setting(self): - """ - gen_filenames also yields from LOCALE_PATHS locales. - """ - locale_paths_mo = os.path.join(LOCALE_PATH, 'nl', 'LC_MESSAGES', 'django.mo') - self.assertFileFound(locale_paths_mo) - - @override_settings(INSTALLED_APPS=[]) - def test_project_root_locale(self): - """ - gen_filenames() also yields from the current directory (project root). - """ - old_cwd = os.getcwd() - os.chdir(os.path.dirname(__file__)) - current_dir = os.path.join(os.path.dirname(__file__), 'locale') - current_dir_mo = os.path.join(current_dir, 'nl', 'LC_MESSAGES', 'django.mo') - try: - self.assertFileFound(current_dir_mo) - finally: - os.chdir(old_cwd) - - @override_settings(INSTALLED_APPS=['django.contrib.admin']) - def test_app_locales(self): - """ - gen_filenames() also yields from locale dirs in installed apps. - """ - admin_dir = os.path.join(os.path.dirname(admin.__file__), 'locale') - admin_mo = os.path.join(admin_dir, 'nl', 'LC_MESSAGES', 'django.mo') - self.assertFileFound(admin_mo) - - @override_settings(USE_I18N=False) - def test_no_i18n(self): - """ - If i18n machinery is disabled, there is no need for watching the - locale files. - """ - django_dir = os.path.join(os.path.dirname(conf.__file__), 'locale') - django_mo = os.path.join(django_dir, 'nl', 'LC_MESSAGES', 'django.mo') - self.assertFileNotFound(django_mo) - - def test_paths_are_native_strings(self): - for filename in autoreload.gen_filenames(): - self.assertIsInstance(filename, str) - - def test_only_new_files(self): - """ - When calling a second time gen_filenames with only_new = True, only - files from newly loaded modules should be given. - """ + def temporary_file(self, filename): dirname = tempfile.mkdtemp() - filename = os.path.join(dirname, 'test_only_new_module.py') self.addCleanup(shutil.rmtree, dirname) - with open(filename, 'w'): - pass + return Path(dirname) / filename - # Test uncached access - self.clear_autoreload_caches() - filenames = set(autoreload.gen_filenames(only_new=True)) - filenames_reference = set(autoreload.gen_filenames()) - self.assertEqual(filenames, filenames_reference) + def test_paths_are_pathlib_instances(self): + for filename in autoreload.iter_all_python_module_files(): + self.assertIsInstance(filename, Path) - # Test cached access: no changes - filenames = set(autoreload.gen_filenames(only_new=True)) - self.assertEqual(filenames, set()) - - # Test cached access: add a module - with extend_sys_path(dirname): - import_module('test_only_new_module') - filenames = set(autoreload.gen_filenames(only_new=True)) - self.assertEqual(filenames, {filename}) - - def test_deleted_removed(self): + def test_file_added(self): """ - When a file is deleted, gen_filenames() no longer returns it. + When a file is added, it's returned by iter_all_python_module_files(). """ - dirname = tempfile.mkdtemp() - filename = os.path.join(dirname, 'test_deleted_removed_module.py') - self.addCleanup(shutil.rmtree, dirname) - with open(filename, 'w'): - pass + filename = self.temporary_file('test_deleted_removed_module.py') + filename.touch() - with extend_sys_path(dirname): - import_module('test_deleted_removed_module') - self.assertFileFound(filename) + with extend_sys_path(str(filename.parent)): + self.import_and_cleanup('test_deleted_removed_module') - os.unlink(filename) - self.assertFileNotFound(filename) + self.assertFileFound(filename.absolute()) def test_check_errors(self): """ When a file containing an error is imported in a function wrapped by check_errors(), gen_filenames() returns it. """ - dirname = tempfile.mkdtemp() - filename = os.path.join(dirname, 'test_syntax_error.py') - self.addCleanup(shutil.rmtree, dirname) - with open(filename, 'w') as f: - f.write("Ceci n'est pas du Python.") + filename = self.temporary_file('test_syntax_error.py') + filename.write_text("Ceci n'est pas du Python.") - with extend_sys_path(dirname): + with extend_sys_path(str(filename.parent)): with self.assertRaises(SyntaxError): autoreload.check_errors(import_module)('test_syntax_error') self.assertFileFound(filename) - def test_check_errors_only_new(self): - """ - When a file containing an error is imported in a function wrapped by - check_errors(), gen_filenames(only_new=True) returns it. - """ - dirname = tempfile.mkdtemp() - filename = os.path.join(dirname, 'test_syntax_error.py') - self.addCleanup(shutil.rmtree, dirname) - with open(filename, 'w') as f: - f.write("Ceci n'est pas du Python.") - - with extend_sys_path(dirname): - with self.assertRaises(SyntaxError): - autoreload.check_errors(import_module)('test_syntax_error') - self.assertFileFoundOnlyNew(filename) - def test_check_errors_catches_all_exceptions(self): """ Since Python may raise arbitrary exceptions when importing code, check_errors() must catch Exception, not just some subclasses. """ - dirname = tempfile.mkdtemp() - filename = os.path.join(dirname, 'test_exception.py') - self.addCleanup(shutil.rmtree, dirname) - with open(filename, 'w') as f: - f.write("raise Exception") - - with extend_sys_path(dirname): + filename = self.temporary_file('test_exception.py') + filename.write_text('raise Exception') + with extend_sys_path(str(filename.parent)): with self.assertRaises(Exception): autoreload.check_errors(import_module)('test_exception') self.assertFileFound(filename) + def test_zip_reload(self): + """ + Modules imported from zipped files have their archive location included + in the result. + """ + zip_file = self.temporary_file('zip_import.zip') + with zipfile.ZipFile(str(zip_file), 'w', zipfile.ZIP_DEFLATED) as zipf: + zipf.writestr('test_zipped_file.py', '') -class CleanFilesTests(SimpleTestCase): - TEST_MAP = { - # description: (input_file_list, expected_returned_file_list) - 'falsies': ([None, False], []), - 'pycs': (['myfile.pyc'], ['myfile.py']), - 'pyos': (['myfile.pyo'], ['myfile.py']), - '$py.class': (['myclass$py.class'], ['myclass.py']), - 'combined': ( - [None, 'file1.pyo', 'file2.pyc', 'myclass$py.class'], - ['file1.py', 'file2.py', 'myclass.py'], + with extend_sys_path(str(zip_file)): + self.import_and_cleanup('test_zipped_file') + self.assertFileFound(zip_file) + + def test_bytecode_conversion_to_source(self): + """.pyc and .pyo files are included in the files list.""" + filename = self.temporary_file('test_compiled.py') + filename.touch() + compiled_file = Path(py_compile.compile(str(filename), str(filename.with_suffix('.pyc')))) + filename.unlink() + with extend_sys_path(str(compiled_file.parent)): + self.import_and_cleanup('test_compiled') + self.assertFileFound(compiled_file) + + +class TestCommonRoots(SimpleTestCase): + def test_common_roots(self): + paths = ( + Path('/first/second'), + Path('/first/second/third'), + Path('/first/'), + Path('/root/first/'), ) - } - - def _run_tests(self, mock_files_exist=True): - with mock.patch('django.utils.autoreload.os.path.exists', return_value=mock_files_exist): - for description, values in self.TEST_MAP.items(): - filenames, expected_returned_filenames = values - self.assertEqual( - autoreload.clean_files(filenames), - expected_returned_filenames if mock_files_exist else [], - msg='{} failed for input file list: {}; returned file list: {}'.format( - description, filenames, expected_returned_filenames - ), - ) - - def test_files_exist(self): - """ - If the file exists, any compiled files (pyc, pyo, $py.class) are - transformed as their source files. - """ - self._run_tests() - - def test_files_do_not_exist(self): - """ - If the files don't exist, they aren't in the returned file list. - """ - self._run_tests(mock_files_exist=False) + results = autoreload.common_roots(paths) + self.assertCountEqual(results, [Path('/first/'), Path('/root/first/')]) -class ResetTranslationsTests(SimpleTestCase): - +class TestSysPathDirectories(SimpleTestCase): def setUp(self): - self.gettext_translations = gettext._translations.copy() - self.trans_real_translations = trans_real._translations.copy() + self._directory = tempfile.TemporaryDirectory() + self.directory = Path(self._directory.name).resolve().absolute() + self.file = self.directory / 'test' + self.file.touch() def tearDown(self): - gettext._translations = self.gettext_translations - trans_real._translations = self.trans_real_translations + self._directory.cleanup() - def test_resets_gettext(self): - gettext._translations = {'foo': 'bar'} - autoreload.reset_translations() - self.assertEqual(gettext._translations, {}) + def test_sys_paths_with_directories(self): + with extend_sys_path(str(self.file)): + paths = list(autoreload.sys_path_directories()) + self.assertIn(self.file.parent, paths) - def test_resets_trans_real(self): - trans_real._translations = {'foo': 'bar'} - trans_real._default = 1 - trans_real._active = False - autoreload.reset_translations() - self.assertEqual(trans_real._translations, {}) - self.assertIsNone(trans_real._default) - self.assertIsInstance(trans_real._active, _thread._local) + def test_sys_paths_non_existing(self): + nonexistant_file = Path(self.directory.name) / 'does_not_exist' + with extend_sys_path(str(nonexistant_file)): + paths = list(autoreload.sys_path_directories()) + self.assertNotIn(nonexistant_file, paths) + self.assertNotIn(nonexistant_file.parent, paths) + + def test_sys_paths_absolute(self): + paths = list(autoreload.sys_path_directories()) + self.assertTrue(all(p.is_absolute() for p in paths)) + + def test_sys_paths_directories(self): + with extend_sys_path(str(self.directory)): + paths = list(autoreload.sys_path_directories()) + self.assertIn(self.directory, paths) + + +class GetReloaderTests(SimpleTestCase): + @mock.patch('django.utils.autoreload.WatchmanReloader') + def test_watchman_unavailable(self, mocked_watchman): + mocked_watchman.check_availability.side_effect = WatchmanUnavailable + self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader) + + @mock.patch.object(autoreload.WatchmanReloader, 'check_availability') + def test_watchman_available(self, mocked_available): + # If WatchmanUnavailable isn't raised, Watchman will be chosen. + mocked_available.return_value = None + result = autoreload.get_reloader() + self.assertIsInstance(result, autoreload.WatchmanReloader) + + +class RunWithReloaderTests(SimpleTestCase): + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) + @mock.patch('django.utils.autoreload.get_reloader') + def test_swallows_keyboard_interrupt(self, mocked_get_reloader): + mocked_get_reloader.side_effect = KeyboardInterrupt() + autoreload.run_with_reloader(lambda: None) # No exception + + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'false'}) + @mock.patch('django.utils.autoreload.restart_with_reloader') + def test_calls_sys_exit(self, mocked_restart_reloader): + mocked_restart_reloader.return_value = 1 + with self.assertRaises(SystemExit) as exc: + autoreload.run_with_reloader(lambda: None) + self.assertEqual(exc.exception.code, 1) + + @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) + @mock.patch('django.utils.autoreload.start_django') + @mock.patch('django.utils.autoreload.get_reloader') + def test_calls_start_django(self, mocked_reloader, mocked_start_django): + mocked_reloader.return_value = mock.sentinel.RELOADER + autoreload.run_with_reloader(mock.sentinel.METHOD) + self.assertEqual(mocked_start_django.call_count, 1) + self.assertSequenceEqual( + mocked_start_django.call_args[0], + [mock.sentinel.RELOADER, mock.sentinel.METHOD] + ) + + +class StartDjangoTests(SimpleTestCase): + @mock.patch('django.utils.autoreload.StatReloader') + def test_watchman_becomes_unavailable(self, mocked_stat): + mocked_stat.should_stop.return_value = True + fake_reloader = mock.MagicMock() + fake_reloader.should_stop = False + fake_reloader.run.side_effect = autoreload.WatchmanUnavailable() + + autoreload.start_django(fake_reloader, lambda: None) + self.assertEqual(mocked_stat.call_count, 1) + + @mock.patch('django.utils.autoreload.ensure_echo_on') + def test_echo_on_called(self, mocked_echo): + fake_reloader = mock.MagicMock() + autoreload.start_django(fake_reloader, lambda: None) + self.assertEqual(mocked_echo.call_count, 1) + + @mock.patch('django.utils.autoreload.check_errors') + def test_check_errors_called(self, mocked_check_errors): + fake_method = mock.MagicMock(return_value=None) + fake_reloader = mock.MagicMock() + autoreload.start_django(fake_reloader, fake_method) + self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method]) + + @mock.patch('threading.Thread') + @mock.patch('django.utils.autoreload.check_errors') + def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread): + fake_reloader = mock.MagicMock() + fake_main_func = mock.MagicMock() + fake_thread = mock.MagicMock() + mocked_check_errors.return_value = fake_main_func + mocked_thread.return_value = fake_thread + autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123) + self.assertEqual(mocked_thread.call_count, 1) + self.assertEqual( + mocked_thread.call_args[1], + {'target': fake_main_func, 'args': (123,), 'kwargs': {'abc': 123}} + ) + self.assertSequenceEqual(fake_thread.setDaemon.call_args[0], [True]) + self.assertTrue(fake_thread.start.called) + + +class TestCheckErrors(SimpleTestCase): + def test_mutates_error_files(self): + fake_method = mock.MagicMock(side_effect=RuntimeError()) + wrapped = autoreload.check_errors(fake_method) + with mock.patch.object(autoreload, '_error_files') as mocked_error_files: + with self.assertRaises(RuntimeError): + wrapped() + self.assertEqual(mocked_error_files.append.call_count, 1) + + +class TestRaiseLastException(SimpleTestCase): + @mock.patch('django.utils.autoreload._exception', None) + def test_no_exception(self): + # Should raise no exception if _exception is None + autoreload.raise_last_exception() + + def test_raises_exception(self): + class MyException(Exception): + pass + + # Create an exception + try: + raise MyException('Test Message') + except MyException: + exc_info = sys.exc_info() + + with mock.patch('django.utils.autoreload._exception', exc_info): + with self.assertRaises(MyException, msg='Test Message'): + autoreload.raise_last_exception() class RestartWithReloaderTests(SimpleTestCase): @@ -286,3 +308,363 @@ class RestartWithReloaderTests(SimpleTestCase): autoreload.restart_with_reloader() self.assertEqual(mock_call.call_count, 1) self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall', '-m', 'django'] + argv[1:]) + + +class ReloaderTests(SimpleTestCase): + RELOADER_CLS = None + + def setUp(self): + self._tempdir = tempfile.TemporaryDirectory() + self.tempdir = Path(self._tempdir.name).resolve().absolute() + self.existing_file = self.ensure_file(self.tempdir / 'test.py') + self.nonexistant_file = (self.tempdir / 'does_not_exist.py').absolute() + self.reloader = self.RELOADER_CLS() + + def tearDown(self): + self._tempdir.cleanup() + self.reloader.stop() + + def ensure_file(self, path): + path.parent.mkdir(exist_ok=True, parents=True) + path.touch() + # On Linux and Windows updating the mtime of a file using touch() will set a timestamp + # value that is in the past, as the time value for the last kernel tick is used rather + # than getting the correct absolute time. + # To make testing simpler set the mtime to be the observed time when this function is + # called. + self.set_mtime(path, time.time()) + return path.absolute() + + def set_mtime(self, fp, value): + os.utime(str(fp), (value, value)) + + def increment_mtime(self, fp, by=1): + current_time = time.time() + self.set_mtime(fp, current_time + by) + + @contextlib.contextmanager + def tick_twice(self): + ticker = self.reloader.tick() + next(ticker) + yield + next(ticker) + + +class IntegrationTests: + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_file(self, mocked_modules, notify_mock): + self.reloader.watch_file(self.existing_file) + with self.tick_twice(): + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_nonexistant_file(self, mocked_modules, notify_mock): + self.reloader.watch_file(self.nonexistant_file) + with self.tick_twice(): + self.ensure_file(self.nonexistant_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.nonexistant_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_nonexistant_file_in_non_existing_directory(self, mocked_modules, notify_mock): + non_existing_directory = self.tempdir / 'non_existing_dir' + nonexistant_file = non_existing_directory / 'test' + self.reloader.watch_file(nonexistant_file) + with self.tick_twice(): + self.ensure_file(nonexistant_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [nonexistant_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_glob(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / 'non_py_file') + self.reloader.watch_dir(self.tempdir, '*.py') + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_glob_non_existing_directory(self, mocked_modules, notify_mock): + non_existing_directory = self.tempdir / 'does_not_exist' + nonexistant_file = non_existing_directory / 'test.py' + self.reloader.watch_dir(non_existing_directory, '*.py') + with self.tick_twice(): + self.ensure_file(nonexistant_file) + self.set_mtime(nonexistant_file, time.time()) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [nonexistant_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_multiple_globs(self, mocked_modules, notify_mock): + self.ensure_file(self.tempdir / 'x.test') + self.reloader.watch_dir(self.tempdir, '*.py') + self.reloader.watch_dir(self.tempdir, '*.test') + with self.tick_twice(): + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_overlapping_globs(self, mocked_modules, notify_mock): + self.reloader.watch_dir(self.tempdir, '*.py') + self.reloader.watch_dir(self.tempdir, '*.p*') + with self.tick_twice(): + self.increment_mtime(self.existing_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_glob_recursive(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / 'dir' / 'non_py_file') + py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') + self.reloader.watch_dir(self.tempdir, '**/*.py') + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [py_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_multiple_recursive_globs(self, mocked_modules, notify_mock): + non_py_file = self.ensure_file(self.tempdir / 'dir' / 'test.txt') + py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') + self.reloader.watch_dir(self.tempdir, '**/*.txt') + self.reloader.watch_dir(self.tempdir, '**/*.py') + with self.tick_twice(): + self.increment_mtime(non_py_file) + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 2) + self.assertCountEqual(notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_nested_glob_recursive(self, mocked_modules, notify_mock): + inner_py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') + self.reloader.watch_dir(self.tempdir, '**/*.py') + self.reloader.watch_dir(inner_py_file.parent, '**/*.py') + with self.tick_twice(): + self.increment_mtime(inner_py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [inner_py_file]) + + @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') + @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) + def test_overlapping_glob_recursive(self, mocked_modules, notify_mock): + py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') + self.reloader.watch_dir(self.tempdir, '**/*.p*') + self.reloader.watch_dir(self.tempdir, '**/*.py*') + with self.tick_twice(): + self.increment_mtime(py_file) + self.assertEqual(notify_mock.call_count, 1) + self.assertCountEqual(notify_mock.call_args[0], [py_file]) + + +class BaseReloaderTests(ReloaderTests): + RELOADER_CLS = autoreload.BaseReloader + + def test_watch_without_absolute(self): + with self.assertRaisesMessage(ValueError, 'test.py must be absolute.'): + self.reloader.watch_file('test.py') + + def test_watch_with_single_file(self): + self.reloader.watch_file(self.existing_file) + watched_files = list(self.reloader.watched_files()) + self.assertIn(self.existing_file, watched_files) + + def test_watch_with_glob(self): + self.reloader.watch_dir(self.tempdir, '*.py') + watched_files = list(self.reloader.watched_files()) + self.assertIn(self.existing_file, watched_files) + + def test_watch_files_with_recursive_glob(self): + inner_file = self.ensure_file(self.tempdir / 'test' / 'test.py') + self.reloader.watch_dir(self.tempdir, '**/*.py') + watched_files = list(self.reloader.watched_files()) + self.assertIn(self.existing_file, watched_files) + self.assertIn(inner_file, watched_files) + + def test_run_loop_catches_stopiteration(self): + def mocked_tick(): + yield + + with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: + self.reloader.run_loop() + self.assertEqual(tick.call_count, 1) + + def test_run_loop_stop_and_return(self): + def mocked_tick(*args): + yield + self.reloader.stop() + return # Raises StopIteration + + with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: + self.reloader.run_loop() + + self.assertEqual(tick.call_count, 1) + + def test_wait_for_apps_ready_checks_for_exception(self): + app_reg = Apps() + app_reg.ready_event.set() + # thread.is_alive() is False if it's not started. + dead_thread = threading.Thread() + self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread)) + + def test_wait_for_apps_ready_without_exception(self): + app_reg = Apps() + app_reg.ready_event.set() + thread = mock.MagicMock() + thread.is_alive.return_value = True + self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread)) + + +def skip_unless_watchman_available(): + try: + autoreload.WatchmanReloader.check_availability() + except WatchmanUnavailable as e: + return skip('Watchman unavailable: %s' % e) + return lambda func: func + + +@skip_unless_watchman_available() +class WatchmanReloaderTests(ReloaderTests, IntegrationTests): + RELOADER_CLS = autoreload.WatchmanReloader + + def test_watch_glob_ignores_non_existing_directories_two_levels(self): + with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: + self.reloader._watch_glob(self.tempdir / 'does_not_exist' / 'more', ['*']) + self.assertFalse(mocked_subscribe.called) + + def test_watch_glob_uses_existing_parent_directories(self): + with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: + self.reloader._watch_glob(self.tempdir / 'does_not_exist', ['*']) + self.assertSequenceEqual( + mocked_subscribe.call_args[0], + [ + self.tempdir, 'glob-parent-does_not_exist:%s' % self.tempdir, + ['anyof', ['match', 'does_not_exist/*', 'wholename']] + ] + ) + + def test_watch_glob_multiple_patterns(self): + with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: + self.reloader._watch_glob(self.tempdir, ['*', '*.py']) + self.assertSequenceEqual( + mocked_subscribe.call_args[0], + [ + self.tempdir, 'glob:%s' % self.tempdir, + ['anyof', ['match', '*', 'wholename'], ['match', '*.py', 'wholename']] + ] + ) + + def test_watched_roots_contains_files(self): + paths = self.reloader.watched_roots([self.existing_file]) + self.assertIn(self.existing_file.parent, paths) + + def test_watched_roots_contains_directory_globs(self): + self.reloader.watch_dir(self.tempdir, '*.py') + paths = self.reloader.watched_roots([]) + self.assertIn(self.tempdir, paths) + + def test_watched_roots_contains_sys_path(self): + with extend_sys_path(str(self.tempdir)): + paths = self.reloader.watched_roots([]) + self.assertIn(self.tempdir, paths) + + def test_check_server_status(self): + self.assertTrue(self.reloader.check_server_status()) + + def test_check_server_status_raises_error(self): + with mock.patch.object(self.reloader.client, 'query') as mocked_query: + mocked_query.side_effect = Exception() + with self.assertRaises(autoreload.WatchmanUnavailable): + self.reloader.check_server_status() + + @mock.patch('pywatchman.client') + def test_check_availability(self, mocked_client): + mocked_client().capabilityCheck.side_effect = Exception() + with self.assertRaisesMessage(WatchmanUnavailable, 'Cannot connect to the watchman service'): + self.RELOADER_CLS.check_availability() + + @mock.patch('pywatchman.client') + def test_check_availability_lower_version(self, mocked_client): + mocked_client().capabilityCheck.return_value = {'version': '4.8.10'} + with self.assertRaisesMessage(WatchmanUnavailable, 'Watchman 4.9 or later is required.'): + self.RELOADER_CLS.check_availability() + + def test_pywatchman_not_available(self): + with mock.patch.object(autoreload, 'pywatchman') as mocked: + mocked.__bool__.return_value = False + with self.assertRaisesMessage(WatchmanUnavailable, 'pywatchman not installed.'): + self.RELOADER_CLS.check_availability() + + def test_update_watches_raises_exceptions(self): + class TestException(Exception): + pass + + with mock.patch.object(self.reloader, '_update_watches') as mocked_watches: + with mock.patch.object(self.reloader, 'check_server_status') as mocked_server_status: + mocked_watches.side_effect = TestException() + mocked_server_status.return_value = True + with self.assertRaises(TestException): + self.reloader.update_watches() + self.assertIsInstance(mocked_server_status.call_args[0][0], TestException) + + +class StatReloaderTests(ReloaderTests, IntegrationTests): + RELOADER_CLS = autoreload.StatReloader + + def setUp(self): + super().setUp() + # Shorten the sleep time to speed up tests. + self.reloader.SLEEP_TIME = 0.01 + + def test_snapshot_files_ignores_missing_files(self): + with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistant_file]): + self.assertEqual(dict(self.reloader.snapshot_files()), {}) + + def test_snapshot_files_updates(self): + with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]): + snapshot1 = dict(self.reloader.snapshot_files()) + self.assertIn(self.existing_file, snapshot1) + self.increment_mtime(self.existing_file) + snapshot2 = dict(self.reloader.snapshot_files()) + self.assertNotEqual(snapshot1[self.existing_file], snapshot2[self.existing_file]) + + def test_does_not_fire_without_changes(self): + with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]), \ + mock.patch.object(self.reloader, 'notify_file_changed') as notifier: + mtime = self.existing_file.stat().st_mtime + initial_snapshot = {self.existing_file: mtime} + second_snapshot = self.reloader.loop_files(initial_snapshot, time.time()) + self.assertEqual(second_snapshot, {}) + notifier.assert_not_called() + + def test_fires_when_created(self): + with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistant_file]), \ + mock.patch.object(self.reloader, 'notify_file_changed') as notifier: + self.nonexistant_file.touch() + mtime = self.nonexistant_file.stat().st_mtime + second_snapshot = self.reloader.loop_files({}, mtime - 1) + self.assertCountEqual(second_snapshot.keys(), [self.nonexistant_file]) + notifier.assert_called_once_with(self.nonexistant_file) + + def test_fires_with_changes(self): + with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]), \ + mock.patch.object(self.reloader, 'notify_file_changed') as notifier: + initial_snapshot = {self.existing_file: 1} + second_snapshot = self.reloader.loop_files(initial_snapshot, time.time()) + notifier.assert_called_once_with(self.existing_file) + self.assertCountEqual(second_snapshot.keys(), [self.existing_file])