diff --git a/README.md b/README.md index db228aad..80997563 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ A Rust HTTP server for Python applications. The main reasons behind Granian design are: - Have a single, correct HTTP implementation, supporting versions 1, 2 (and eventually 3) -- Provide a single package for several platforms +- Provide a single package for several platforms - Avoid the usual Gunicorn + uvicorn + http-tools dependency composition on unix systems - Provide stable [performance](https://github.com/emmett-framework/granian/blob/master/benchmarks/README.md) when compared to existing alternatives @@ -192,6 +192,19 @@ Options: changes (requires granian[reload] extra) [env var: GRANIAN_RELOAD; default: (disabled)] + --reload-paths PATH Paths to watch for changes [env var: + GRANIAN_RELOAD_PATHS; default: (Working + directory)] + --reload-ignore-dirs TEXT Names of directories to ignore changes for. + Extends the default list of directories to + ignore in watchfiles' default filter [env + var: GRANIAN_RELOAD_IGNORE_DIRS] + --reload-ignore-patterns TEXT Path patterns (regex) to ignore changes for. + Extends the default list of patterns to + ignore in watchfiles' default filter [env + var: GRANIAN_RELOAD_IGNORE_PATTERNS] + --reload-ignore-paths PATH Absolute paths to ignore changes for [env + var: GRANIAN_RELOAD_IGNORE_PATHS] --process-name TEXT Set a custom name for processes (requires granian[pname] extra) [env var: GRANIAN_PROCESS_NAME] diff --git a/granian/cli.py b/granian/cli.py index f793c753..fb131f66 100644 --- a/granian/cli.py +++ b/granian/cli.py @@ -1,7 +1,7 @@ import json import pathlib from enum import Enum -from typing import Any, Callable, Optional, Type, TypeVar, Union +from typing import Any, Callable, List, Optional, Type, TypeVar, Union import click @@ -194,6 +194,35 @@ def option(*param_decls: str, cls: Optional[Type[click.Option]] = None, **attrs: default=False, help="Enable auto reload on application's files changes (requires granian[reload] extra)", ) +@option( + '--reload-paths', + type=click.Path(exists=True, file_okay=True, dir_okay=True, readable=True, path_type=pathlib.Path), + help='Paths to watch for changes', + show_default='Working directory', + multiple=True, +) +@option( + '--reload-ignore-dirs', + help=( + 'Names of directories to ignore changes for. ' + "Extends the default list of directories to ignore in watchfiles' default filter" + ), + multiple=True, +) +@option( + '--reload-ignore-patterns', + help=( + 'Path patterns (regex) to ignore changes for. ' + "Extends the default list of patterns to ignore in watchfiles' default filter" + ), + multiple=True, +) +@option( + '--reload-ignore-paths', + type=click.Path(exists=False, path_type=pathlib.Path), + help='Absolute paths to ignore changes for', + multiple=True, +) @option( '--process-name', help='Set a custom name for processes (requires granian[pname] extra)', @@ -242,6 +271,10 @@ def cli( respawn_failed_workers: bool, respawn_interval: float, reload: bool, + reload_paths: Optional[List[pathlib.Path]], + reload_ignore_dirs: Optional[List[str]], + reload_ignore_patterns: Optional[List[str]], + reload_ignore_paths: Optional[List[pathlib.Path]], process_name: Optional[str], pid_file: Optional[pathlib.Path], ) -> None: @@ -294,6 +327,10 @@ def cli( respawn_failed_workers=respawn_failed_workers, respawn_interval=respawn_interval, reload=reload, + reload_paths=reload_paths, + reload_ignore_paths=reload_ignore_paths, + reload_ignore_dirs=reload_ignore_dirs, + reload_ignore_patterns=reload_ignore_patterns, process_name=process_name, pid_file=pid_file, ) diff --git a/granian/server.py b/granian/server.py index dadde64a..69cf030b 100644 --- a/granian/server.py +++ b/granian/server.py @@ -12,7 +12,7 @@ import time from functools import partial from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Type from ._futures import future_watcher_wrapper from ._granian import ASGIWorker, RSGIWorker, WSGIWorker @@ -95,6 +95,11 @@ def __init__( respawn_failed_workers: bool = False, respawn_interval: float = 3.5, reload: bool = False, + reload_paths: Optional[Sequence[Path]] = None, + reload_ignore_dirs: Optional[Sequence[str]] = None, + reload_ignore_patterns: Optional[Sequence[str]] = None, + reload_ignore_paths: Optional[Sequence[Path]] = None, + reload_filter: Optional[Type[watchfiles.BaseFilter]] = None, process_name: Optional[str] = None, pid_file: Optional[Path] = None, ): @@ -129,6 +134,11 @@ def __init__( self.respawn_failed_workers = respawn_failed_workers self.reload_on_changes = reload self.respawn_interval = respawn_interval + self.reload_paths = reload_paths or [Path.cwd()] + self.reload_ignore_paths = reload_ignore_paths or () + self.reload_ignore_dirs = reload_ignore_dirs or () + self.reload_ignore_patterns = reload_ignore_patterns or () + self.reload_filter = reload_filter self.process_name = process_name self.pid_file = pid_file @@ -580,13 +590,28 @@ def _serve_with_reloader(self, spawn_target, target_loader): logger.error('Using --reload requires the granian[reload] extra') sys.exit(1) - reload_path = Path.cwd() + # Use given or default filter rules + reload_filter = self.reload_filter or watchfiles.filters.DefaultFilter + # Extend `reload_filter` with explicit args + ignore_dirs = (*reload_filter.ignore_dirs, *self.reload_ignore_dirs) + ignore_entity_patterns = ( + *reload_filter.ignore_entity_patterns, + *self.reload_ignore_patterns, + ) + ignore_paths = (*reload_filter.ignore_paths, *self.reload_ignore_paths) + # Construct new filter + reload_filter = watchfiles.filters.DefaultFilter( + ignore_dirs=ignore_dirs, ignore_entity_patterns=ignore_entity_patterns, ignore_paths=ignore_paths + ) + sock = self.startup(spawn_target, target_loader) serve_loop = True while serve_loop: try: - for changes in watchfiles.watch(reload_path, stop_event=self.main_loop_interrupt): + for changes in watchfiles.watch( + *self.reload_paths, watch_filter=reload_filter, stop_event=self.main_loop_interrupt + ): logger.info('Changes detected, reloading workers..') for change, file in changes: logger.info(f'{change.raw_str().capitalize()}: {file}')