Sanic Server WorkerManager refactor (#2499)

Co-authored-by: Néstor Pérez <25409753+prryplatypus@users.noreply.github.com>
This commit is contained in:
Adam Hopkins 2022-09-18 17:17:23 +03:00 committed by GitHub
parent d352a4155e
commit 4726cf1910
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
73 changed files with 3929 additions and 940 deletions

View File

@ -4,8 +4,8 @@ source = sanic
omit = omit =
site-packages site-packages
sanic/__main__.py sanic/__main__.py
sanic/server/legacy.py
sanic/compat.py sanic/compat.py
sanic/reloader_helpers.py
sanic/simple.py sanic/simple.py
sanic/utils.py sanic/utils.py
sanic/cli sanic/cli
@ -21,12 +21,4 @@ exclude_lines =
NOQA NOQA
pragma: no cover pragma: no cover
TYPE_CHECKING TYPE_CHECKING
omit =
site-packages
sanic/__main__.py
sanic/compat.py
sanic/reloader_helpers.py
sanic/simple.py
sanic/utils.py
sanic/cli
skip_empty = True skip_empty = True

View File

@ -15,7 +15,6 @@ codecov:
ignore: ignore:
- "sanic/__main__.py" - "sanic/__main__.py"
- "sanic/compat.py" - "sanic/compat.py"
- "sanic/reloader_helpers.py"
- "sanic/simple.py" - "sanic/simple.py"
- "sanic/utils.py" - "sanic/utils.py"
- "sanic/cli" - "sanic/cli"

View File

@ -1,6 +1,3 @@
import os
import socket
from sanic import Sanic, response from sanic import Sanic, response
@ -13,13 +10,4 @@ async def test(request):
if __name__ == "__main__": if __name__ == "__main__":
server_address = "./uds_socket" app.run(unix="./uds_socket")
# Make sure the socket does not already exist
try:
os.unlink(server_address)
except OSError:
if os.path.exists(server_address):
raise
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(server_address)
app.run(sock=sock)

View File

@ -3,7 +3,15 @@ from sanic.app import Sanic
from sanic.blueprints import Blueprint from sanic.blueprints import Blueprint
from sanic.constants import HTTPMethod from sanic.constants import HTTPMethod
from sanic.request import Request from sanic.request import Request
from sanic.response import HTTPResponse, html, json, text from sanic.response import (
HTTPResponse,
empty,
file,
html,
json,
redirect,
text,
)
from sanic.server.websockets.impl import WebsocketImplProtocol as Websocket from sanic.server.websockets.impl import WebsocketImplProtocol as Websocket
@ -15,7 +23,10 @@ __all__ = (
"HTTPResponse", "HTTPResponse",
"Request", "Request",
"Websocket", "Websocket",
"empty",
"file",
"html", "html",
"json", "json",
"redirect",
"text", "text",
) )

View File

@ -6,10 +6,10 @@ if OS_IS_WINDOWS:
enable_windows_color_support() enable_windows_color_support()
def main(): def main(args=None):
cli = SanicCLI() cli = SanicCLI()
cli.attach() cli.attach()
cli.run() cli.run(args)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -1 +1 @@
__version__ = "22.6.1" __version__ = "22.9.1"

View File

@ -19,6 +19,7 @@ from collections import defaultdict, deque
from contextlib import suppress from contextlib import suppress
from functools import partial from functools import partial
from inspect import isawaitable from inspect import isawaitable
from os import environ
from socket import socket from socket import socket
from traceback import format_exc from traceback import format_exc
from types import SimpleNamespace from types import SimpleNamespace
@ -70,7 +71,7 @@ from sanic.log import (
logger, logger,
) )
from sanic.mixins.listeners import ListenerEvent from sanic.mixins.listeners import ListenerEvent
from sanic.mixins.runner import RunnerMixin from sanic.mixins.startup import StartupMixin
from sanic.models.futures import ( from sanic.models.futures import (
FutureException, FutureException,
FutureListener, FutureListener,
@ -88,6 +89,9 @@ from sanic.router import Router
from sanic.server.websockets.impl import ConnectionClosed from sanic.server.websockets.impl import ConnectionClosed
from sanic.signals import Signal, SignalRouter from sanic.signals import Signal, SignalRouter
from sanic.touchup import TouchUp, TouchUpMeta from sanic.touchup import TouchUp, TouchUpMeta
from sanic.types.shared_ctx import SharedContext
from sanic.worker.inspector import Inspector
from sanic.worker.manager import WorkerManager
if TYPE_CHECKING: if TYPE_CHECKING:
@ -104,7 +108,7 @@ if OS_IS_WINDOWS: # no cov
filterwarnings("once", category=DeprecationWarning) filterwarnings("once", category=DeprecationWarning)
class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta): class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
""" """
The main application instance The main application instance
""" """
@ -128,6 +132,8 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
"_future_routes", "_future_routes",
"_future_signals", "_future_signals",
"_future_statics", "_future_statics",
"_inspector",
"_manager",
"_state", "_state",
"_task_registry", "_task_registry",
"_test_client", "_test_client",
@ -139,12 +145,14 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
"error_handler", "error_handler",
"go_fast", "go_fast",
"listeners", "listeners",
"multiplexer",
"named_request_middleware", "named_request_middleware",
"named_response_middleware", "named_response_middleware",
"request_class", "request_class",
"request_middleware", "request_middleware",
"response_middleware", "response_middleware",
"router", "router",
"shared_ctx",
"signal_router", "signal_router",
"sock", "sock",
"strict_slashes", "strict_slashes",
@ -171,9 +179,9 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
configure_logging: bool = True, configure_logging: bool = True,
dumps: Optional[Callable[..., AnyStr]] = None, dumps: Optional[Callable[..., AnyStr]] = None,
loads: Optional[Callable[..., Any]] = None, loads: Optional[Callable[..., Any]] = None,
inspector: bool = False,
) -> None: ) -> None:
super().__init__(name=name) super().__init__(name=name)
# logging # logging
if configure_logging: if configure_logging:
dict_config = log_config or LOGGING_CONFIG_DEFAULTS dict_config = log_config or LOGGING_CONFIG_DEFAULTS
@ -187,12 +195,16 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
# First setup config # First setup config
self.config: Config = config or Config(env_prefix=env_prefix) self.config: Config = config or Config(env_prefix=env_prefix)
if inspector:
self.config.INSPECTOR = inspector
# Then we can do the rest # Then we can do the rest
self._asgi_client: Any = None self._asgi_client: Any = None
self._blueprint_order: List[Blueprint] = [] self._blueprint_order: List[Blueprint] = []
self._delayed_tasks: List[str] = [] self._delayed_tasks: List[str] = []
self._future_registry: FutureRegistry = FutureRegistry() self._future_registry: FutureRegistry = FutureRegistry()
self._inspector: Optional[Inspector] = None
self._manager: Optional[WorkerManager] = None
self._state: ApplicationState = ApplicationState(app=self) self._state: ApplicationState = ApplicationState(app=self)
self._task_registry: Dict[str, Task] = {} self._task_registry: Dict[str, Task] = {}
self._test_client: Any = None self._test_client: Any = None
@ -210,6 +222,7 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
self.request_middleware: Deque[MiddlewareType] = deque() self.request_middleware: Deque[MiddlewareType] = deque()
self.response_middleware: Deque[MiddlewareType] = deque() self.response_middleware: Deque[MiddlewareType] = deque()
self.router: Router = router or Router() self.router: Router = router or Router()
self.shared_ctx: SharedContext = SharedContext()
self.signal_router: SignalRouter = signal_router or SignalRouter() self.signal_router: SignalRouter = signal_router or SignalRouter()
self.sock: Optional[socket] = None self.sock: Optional[socket] = None
self.strict_slashes: bool = strict_slashes self.strict_slashes: bool = strict_slashes
@ -243,7 +256,7 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
) )
try: try:
return get_running_loop() return get_running_loop()
except RuntimeError: except RuntimeError: # no cov
if sys.version_info > (3, 10): if sys.version_info > (3, 10):
return asyncio.get_event_loop_policy().get_event_loop() return asyncio.get_event_loop_policy().get_event_loop()
else: else:
@ -1353,6 +1366,7 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
@auto_reload.setter @auto_reload.setter
def auto_reload(self, value: bool): def auto_reload(self, value: bool):
self.config.AUTO_RELOAD = value self.config.AUTO_RELOAD = value
self.state.auto_reload = value
@property @property
def state(self) -> ApplicationState: # type: ignore def state(self) -> ApplicationState: # type: ignore
@ -1470,6 +1484,18 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
cls._app_registry[name] = app cls._app_registry[name] = app
@classmethod
def unregister_app(cls, app: "Sanic") -> None:
"""
Unregister a Sanic instance
"""
if not isinstance(app, cls):
raise SanicException("Registered app must be an instance of Sanic")
name = app.name
if name in cls._app_registry:
del cls._app_registry[name]
@classmethod @classmethod
def get_app( def get_app(
cls, name: Optional[str] = None, *, force_create: bool = False cls, name: Optional[str] = None, *, force_create: bool = False
@ -1489,6 +1515,8 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
try: try:
return cls._app_registry[name] return cls._app_registry[name]
except KeyError: except KeyError:
if name == "__main__":
return cls.get_app("__mp_main__", force_create=force_create)
if force_create: if force_create:
return cls(name) return cls(name)
raise SanicException(f'Sanic app name "{name}" not found.') raise SanicException(f'Sanic app name "{name}" not found.')
@ -1562,6 +1590,9 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
self.state.is_started = True self.state.is_started = True
if hasattr(self, "multiplexer"):
self.multiplexer.ack()
async def _server_event( async def _server_event(
self, self,
concern: str, concern: str,
@ -1590,3 +1621,43 @@ class Sanic(BaseSanic, RunnerMixin, metaclass=TouchUpMeta):
"loop": loop, "loop": loop,
}, },
) )
# -------------------------------------------------------------------- #
# Process Management
# -------------------------------------------------------------------- #
def refresh(
self,
passthru: Optional[Dict[str, Any]] = None,
):
registered = self.__class__.get_app(self.name)
if self is not registered:
if not registered.state.server_info:
registered.state.server_info = self.state.server_info
self = registered
if passthru:
for attr, info in passthru.items():
if isinstance(info, dict):
for key, value in info.items():
setattr(getattr(self, attr), key, value)
else:
setattr(self, attr, info)
if hasattr(self, "multiplexer"):
self.shared_ctx.lock()
return self
@property
def inspector(self):
if environ.get("SANIC_WORKER_PROCESS") or not self._inspector:
raise SanicException(
"Can only access the inspector from the main process"
)
return self._inspector
@property
def manager(self):
if environ.get("SANIC_WORKER_PROCESS") or not self._manager:
raise SanicException(
"Can only access the manager from the main process"
)
return self._manager

View File

@ -1,15 +1,24 @@
from enum import Enum, IntEnum, auto from enum import Enum, IntEnum, auto
class StrEnum(str, Enum): class StrEnum(str, Enum): # no cov
def _generate_next_value_(name: str, *args) -> str: # type: ignore def _generate_next_value_(name: str, *args) -> str: # type: ignore
return name.lower() return name.lower()
def __eq__(self, value: object) -> bool:
value = str(value).upper()
return super().__eq__(value)
def __hash__(self) -> int:
return hash(self.value)
def __str__(self) -> str:
return self.value
class Server(StrEnum): class Server(StrEnum):
SANIC = auto() SANIC = auto()
ASGI = auto() ASGI = auto()
GUNICORN = auto()
class Mode(StrEnum): class Mode(StrEnum):

View File

@ -22,7 +22,7 @@ def setup_ext(app: Sanic, *, fail: bool = False, **kwargs):
with suppress(ModuleNotFoundError): with suppress(ModuleNotFoundError):
sanic_ext = import_module("sanic_ext") sanic_ext = import_module("sanic_ext")
if not sanic_ext: if not sanic_ext: # no cov
if fail: if fail:
raise RuntimeError( raise RuntimeError(
"Sanic Extensions is not installed. You can add it to your " "Sanic Extensions is not installed. You can add it to your "

View File

@ -80,20 +80,23 @@ class MOTDTTY(MOTD):
) )
self.display_length = self.key_width + self.value_width + 2 self.display_length = self.key_width + self.value_width + 2
def display(self): def display(self, version=True, action="Goin' Fast", out=None):
version = f"Sanic v{__version__}".center(self.centering_length) if not out:
out = logger.info
header = "Sanic"
if version:
header += f" v{__version__}"
header = header.center(self.centering_length)
running = ( running = (
f"Goin' Fast @ {self.serve_location}" f"{action} @ {self.serve_location}" if self.serve_location else ""
if self.serve_location
else ""
).center(self.centering_length) ).center(self.centering_length)
length = len(version) + 2 - self.logo_line_length length = len(header) + 2 - self.logo_line_length
first_filler = "" * (self.logo_line_length - 1) first_filler = "" * (self.logo_line_length - 1)
second_filler = "" * length second_filler = "" * length
display_filler = "" * (self.display_length + 2) display_filler = "" * (self.display_length + 2)
lines = [ lines = [
f"\n{first_filler}{second_filler}", f"\n{first_filler}{second_filler}",
f"{version}", f"{header}",
f"{running}", f"{running}",
f"{first_filler}{second_filler}", f"{first_filler}{second_filler}",
] ]
@ -107,7 +110,7 @@ class MOTDTTY(MOTD):
self._render_fill(lines) self._render_fill(lines)
lines.append(f"{first_filler}{second_filler}\n") lines.append(f"{first_filler}{second_filler}\n")
logger.info(indent("\n".join(lines), " ")) out(indent("\n".join(lines), " "))
def _render_data(self, lines, data, start): def _render_data(self, lines, data, start):
offset = 0 offset = 0

View File

@ -1,10 +1,10 @@
import logging
import os import os
import shutil import shutil
import sys import sys
from argparse import ArgumentParser, RawTextHelpFormatter from argparse import ArgumentParser, RawTextHelpFormatter
from importlib import import_module from functools import partial
from pathlib import Path
from textwrap import indent from textwrap import indent
from typing import Any, List, Union from typing import Any, List, Union
@ -12,7 +12,8 @@ from sanic.app import Sanic
from sanic.application.logo import get_logo from sanic.application.logo import get_logo
from sanic.cli.arguments import Group from sanic.cli.arguments import Group
from sanic.log import error_logger from sanic.log import error_logger
from sanic.simple import create_simple_server from sanic.worker.inspector import inspect
from sanic.worker.loader import AppLoader
class SanicArgumentParser(ArgumentParser): class SanicArgumentParser(ArgumentParser):
@ -66,13 +67,17 @@ Or, a path to a directory to run as a simple HTTP server:
instance.attach() instance.attach()
self.groups.append(instance) self.groups.append(instance)
def run(self): def run(self, parse_args=None):
legacy_version = False
if not parse_args:
# This is to provide backwards compat -v to display version # This is to provide backwards compat -v to display version
legacy_version = len(sys.argv) == 2 and sys.argv[-1] == "-v" legacy_version = len(sys.argv) == 2 and sys.argv[-1] == "-v"
parse_args = ["--version"] if legacy_version else None parse_args = ["--version"] if legacy_version else None
elif parse_args == ["-v"]:
parse_args = ["--version"]
if not parse_args: if not legacy_version:
parsed, unknown = self.parser.parse_known_args() parsed, unknown = self.parser.parse_known_args(args=parse_args)
if unknown and parsed.factory: if unknown and parsed.factory:
for arg in unknown: for arg in unknown:
if arg.startswith("--"): if arg.startswith("--"):
@ -80,20 +85,47 @@ Or, a path to a directory to run as a simple HTTP server:
self.args = self.parser.parse_args(args=parse_args) self.args = self.parser.parse_args(args=parse_args)
self._precheck() self._precheck()
app_loader = AppLoader(
self.args.module,
self.args.factory,
self.args.simple,
self.args,
)
try: try:
app = self._get_app() app = self._get_app(app_loader)
kwargs = self._build_run_kwargs() kwargs = self._build_run_kwargs()
except ValueError: except ValueError as e:
error_logger.exception("Failed to run app") error_logger.exception(f"Failed to run app: {e}")
else:
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
else: else:
for http_version in self.args.http: for http_version in self.args.http:
app.prepare(**kwargs, version=http_version) app.prepare(**kwargs, version=http_version)
Sanic.serve() if self.args.inspect or self.args.inspect_raw or self.args.trigger:
action = self.args.trigger or (
"raw" if self.args.inspect_raw else "pretty"
)
inspect(
app.config.INSPECTOR_HOST,
app.config.INSPECTOR_PORT,
action,
)
del os.environ["SANIC_IGNORE_PRODUCTION_WARNING"]
return
if self.args.single:
serve = Sanic.serve_single
elif self.args.legacy:
serve = Sanic.serve_legacy
else:
serve = partial(Sanic.serve, app_loader=app_loader)
serve(app)
def _precheck(self): def _precheck(self):
# # Custom TLS mismatch handling for better diagnostics # Custom TLS mismatch handling for better diagnostics
if self.main_process and ( if self.main_process and (
# one of cert/key missing # one of cert/key missing
bool(self.args.cert) != bool(self.args.key) bool(self.args.cert) != bool(self.args.key)
@ -113,58 +145,14 @@ Or, a path to a directory to run as a simple HTTP server:
) )
error_logger.error(message) error_logger.error(message)
sys.exit(1) sys.exit(1)
if self.args.inspect or self.args.inspect_raw:
logging.disable(logging.CRITICAL)
def _get_app(self): def _get_app(self, app_loader: AppLoader):
try: try:
module_path = os.path.abspath(os.getcwd()) app = app_loader.load()
if module_path not in sys.path:
sys.path.append(module_path)
if self.args.simple:
path = Path(self.args.module)
app = create_simple_server(path)
else:
delimiter = ":" if ":" in self.args.module else "."
module_name, app_name = self.args.module.rsplit(delimiter, 1)
if module_name == "" and os.path.isdir(self.args.module):
raise ValueError(
"App not found.\n"
" Please use --simple if you are passing a "
"directory to sanic.\n"
f" eg. sanic {self.args.module} --simple"
)
if app_name.endswith("()"):
self.args.factory = True
app_name = app_name[:-2]
module = import_module(module_name)
app = getattr(module, app_name, None)
if self.args.factory:
try:
app = app(self.args)
except TypeError:
app = app()
app_type_name = type(app).__name__
if not isinstance(app, Sanic):
if callable(app):
solution = f"sanic {self.args.module} --factory"
raise ValueError(
"Module is not a Sanic app, it is a "
f"{app_type_name}\n"
" If this callable returns a "
f"Sanic instance try: \n{solution}"
)
raise ValueError(
f"Module is not a Sanic app, it is a {app_type_name}\n"
f" Perhaps you meant {self.args.module}:app?"
)
except ImportError as e: except ImportError as e:
if module_name.startswith(e.name): if app_loader.module_name.startswith(e.name): # type: ignore
error_logger.error( error_logger.error(
f"No module named {e.name} found.\n" f"No module named {e.name} found.\n"
" Example File: project/sanic_server.py -> app\n" " Example File: project/sanic_server.py -> app\n"
@ -190,6 +178,7 @@ Or, a path to a directory to run as a simple HTTP server:
elif len(ssl) == 1 and ssl[0] is not None: elif len(ssl) == 1 and ssl[0] is not None:
# Use only one cert, no TLSSelector. # Use only one cert, no TLSSelector.
ssl = ssl[0] ssl = ssl[0]
kwargs = { kwargs = {
"access_log": self.args.access_log, "access_log": self.args.access_log,
"coffee": self.args.coffee, "coffee": self.args.coffee,
@ -204,6 +193,8 @@ Or, a path to a directory to run as a simple HTTP server:
"verbosity": self.args.verbosity or 0, "verbosity": self.args.verbosity or 0,
"workers": self.args.workers, "workers": self.args.workers,
"auto_tls": self.args.auto_tls, "auto_tls": self.args.auto_tls,
"single_process": self.args.single,
"legacy": self.args.legacy,
} }
for maybe_arg in ("auto_reload", "dev"): for maybe_arg in ("auto_reload", "dev"):

View File

@ -30,7 +30,7 @@ class Group:
instance = cls(parser, cls.name) instance = cls(parser, cls.name)
return instance return instance
def add_bool_arguments(self, *args, **kwargs): def add_bool_arguments(self, *args, nullable=False, **kwargs):
group = self.container.add_mutually_exclusive_group() group = self.container.add_mutually_exclusive_group()
kwargs["help"] = kwargs["help"].capitalize() kwargs["help"] = kwargs["help"].capitalize()
group.add_argument(*args, action="store_true", **kwargs) group.add_argument(*args, action="store_true", **kwargs)
@ -38,6 +38,9 @@ class Group:
group.add_argument( group.add_argument(
"--no-" + args[0][2:], *args[1:], action="store_false", **kwargs "--no-" + args[0][2:], *args[1:], action="store_false", **kwargs
) )
if nullable:
params = {args[0][2:].replace("-", "_"): None}
group.set_defaults(**params)
def prepare(self, args) -> None: def prepare(self, args) -> None:
... ...
@ -67,7 +70,8 @@ class ApplicationGroup(Group):
name = "Application" name = "Application"
def attach(self): def attach(self):
self.container.add_argument( group = self.container.add_mutually_exclusive_group()
group.add_argument(
"--factory", "--factory",
action="store_true", action="store_true",
help=( help=(
@ -75,7 +79,7 @@ class ApplicationGroup(Group):
"i.e. a () -> <Sanic app> callable" "i.e. a () -> <Sanic app> callable"
), ),
) )
self.container.add_argument( group.add_argument(
"-s", "-s",
"--simple", "--simple",
dest="simple", dest="simple",
@ -85,6 +89,32 @@ class ApplicationGroup(Group):
"a directory\n(module arg should be a path)" "a directory\n(module arg should be a path)"
), ),
) )
group.add_argument(
"--inspect",
dest="inspect",
action="store_true",
help=("Inspect the state of a running instance, human readable"),
)
group.add_argument(
"--inspect-raw",
dest="inspect_raw",
action="store_true",
help=("Inspect the state of a running instance, JSON output"),
)
group.add_argument(
"--trigger-reload",
dest="trigger",
action="store_const",
const="reload",
help=("Trigger worker processes to reload"),
)
group.add_argument(
"--trigger-shutdown",
dest="trigger",
action="store_const",
const="shutdown",
help=("Trigger all processes to shutdown"),
)
class HTTPVersionGroup(Group): class HTTPVersionGroup(Group):
@ -207,8 +237,22 @@ class WorkerGroup(Group):
action="store_true", action="store_true",
help="Set the number of workers to max allowed", help="Set the number of workers to max allowed",
) )
group.add_argument(
"--single-process",
dest="single",
action="store_true",
help="Do not use multiprocessing, run server in a single process",
)
self.container.add_argument(
"--legacy",
action="store_true",
help="Use the legacy server manager",
)
self.add_bool_arguments( self.add_bool_arguments(
"--access-logs", dest="access_log", help="display access logs" "--access-logs",
dest="access_log",
help="display access logs",
default=None,
) )

View File

@ -18,13 +18,16 @@ SANIC_PREFIX = "SANIC_"
DEFAULT_CONFIG = { DEFAULT_CONFIG = {
"_FALLBACK_ERROR_FORMAT": _default, "_FALLBACK_ERROR_FORMAT": _default,
"ACCESS_LOG": True, "ACCESS_LOG": False,
"AUTO_EXTEND": True, "AUTO_EXTEND": True,
"AUTO_RELOAD": False, "AUTO_RELOAD": False,
"EVENT_AUTOREGISTER": False, "EVENT_AUTOREGISTER": False,
"FORWARDED_FOR_HEADER": "X-Forwarded-For", "FORWARDED_FOR_HEADER": "X-Forwarded-For",
"FORWARDED_SECRET": None, "FORWARDED_SECRET": None,
"GRACEFUL_SHUTDOWN_TIMEOUT": 15.0, # 15 sec "GRACEFUL_SHUTDOWN_TIMEOUT": 15.0, # 15 sec
"INSPECTOR": False,
"INSPECTOR_HOST": "localhost",
"INSPECTOR_PORT": 6457,
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds "KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
"KEEP_ALIVE": True, "KEEP_ALIVE": True,
"LOCAL_CERT_CREATOR": LocalCertCreator.AUTO, "LOCAL_CERT_CREATOR": LocalCertCreator.AUTO,
@ -72,6 +75,9 @@ class Config(dict, metaclass=DescriptorMeta):
FORWARDED_FOR_HEADER: str FORWARDED_FOR_HEADER: str
FORWARDED_SECRET: Optional[str] FORWARDED_SECRET: Optional[str]
GRACEFUL_SHUTDOWN_TIMEOUT: float GRACEFUL_SHUTDOWN_TIMEOUT: float
INSPECTOR: bool
INSPECTOR_HOST: str
INSPECTOR_PORT: int
KEEP_ALIVE_TIMEOUT: int KEEP_ALIVE_TIMEOUT: int
KEEP_ALIVE: bool KEEP_ALIVE: bool
LOCAL_CERT_CREATOR: Union[str, LocalCertCreator] LOCAL_CERT_CREATOR: Union[str, LocalCertCreator]

View File

@ -22,7 +22,7 @@ from sanic.exceptions import PayloadTooLarge, SanicException, ServerError
from sanic.helpers import has_message_body from sanic.helpers import has_message_body
from sanic.http.constants import Stage from sanic.http.constants import Stage
from sanic.http.stream import Stream from sanic.http.stream import Stream
from sanic.http.tls.context import CertSelector, CertSimple, SanicSSLContext from sanic.http.tls.context import CertSelector, SanicSSLContext
from sanic.log import Colors, logger from sanic.log import Colors, logger
from sanic.models.protocol_types import TransportProtocol from sanic.models.protocol_types import TransportProtocol
from sanic.models.server_types import ConnInfo from sanic.models.server_types import ConnInfo
@ -389,8 +389,8 @@ def get_config(
"should be able to use mkcert instead. For more information, see: " "should be able to use mkcert instead. For more information, see: "
"https://github.com/aiortc/aioquic/issues/295." "https://github.com/aiortc/aioquic/issues/295."
) )
if not isinstance(ssl, CertSimple): if not isinstance(ssl, SanicSSLContext):
raise SanicException("SSLContext is not CertSimple") raise SanicException("SSLContext is not SanicSSLContext")
config = QuicConfiguration( config = QuicConfiguration(
alpn_protocols=H3_ALPN + H0_ALPN + ["siduck"], alpn_protocols=H3_ALPN + H0_ALPN + ["siduck"],

View File

@ -240,7 +240,12 @@ class MkcertCreator(CertCreator):
self.cert_path.unlink() self.cert_path.unlink()
self.tmpdir.rmdir() self.tmpdir.rmdir()
return CertSimple(self.cert_path, self.key_path) context = CertSimple(self.cert_path, self.key_path)
context.sanic["creator"] = "mkcert"
context.sanic["localhost"] = localhost
SanicSSLContext.create_from_ssl_context(context)
return context
class TrustmeCreator(CertCreator): class TrustmeCreator(CertCreator):
@ -259,20 +264,23 @@ class TrustmeCreator(CertCreator):
) )
def generate_cert(self, localhost: str) -> ssl.SSLContext: def generate_cert(self, localhost: str) -> ssl.SSLContext:
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) context = SanicSSLContext.create_from_ssl_context(
sanic_context = SanicSSLContext.create_from_ssl_context(context) ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
sanic_context.sanic = { )
context.sanic = {
"cert": self.cert_path.absolute(), "cert": self.cert_path.absolute(),
"key": self.key_path.absolute(), "key": self.key_path.absolute(),
} }
ca = trustme.CA() ca = trustme.CA()
server_cert = ca.issue_cert(localhost) server_cert = ca.issue_cert(localhost)
server_cert.configure_cert(sanic_context) server_cert.configure_cert(context)
ca.configure_trust(context) ca.configure_trust(context)
ca.cert_pem.write_to_path(str(self.cert_path.absolute())) ca.cert_pem.write_to_path(str(self.cert_path.absolute()))
server_cert.private_key_and_cert_chain_pem.write_to_path( server_cert.private_key_and_cert_chain_pem.write_to_path(
str(self.key_path.absolute()) str(self.key_path.absolute())
) )
context.sanic["creator"] = "trustme"
context.sanic["localhost"] = localhost
return context return context

View File

@ -64,10 +64,11 @@ Defult logging configuration
class Colors(str, Enum): # no cov class Colors(str, Enum): # no cov
END = "\033[0m" END = "\033[0m"
BLUE = "\033[01;34m" BOLD = "\033[1m"
GREEN = "\033[01;32m" BLUE = "\033[34m"
PURPLE = "\033[01;35m" GREEN = "\033[32m"
RED = "\033[01;31m" PURPLE = "\033[35m"
RED = "\033[31m"
SANIC = "\033[38;2;255;13;104m" SANIC = "\033[38;2;255;13;104m"
YELLOW = "\033[01;33m" YELLOW = "\033[01;33m"

View File

@ -17,9 +17,12 @@ class ListenerEvent(str, Enum):
BEFORE_SERVER_STOP = "server.shutdown.before" BEFORE_SERVER_STOP = "server.shutdown.before"
AFTER_SERVER_STOP = "server.shutdown.after" AFTER_SERVER_STOP = "server.shutdown.after"
MAIN_PROCESS_START = auto() MAIN_PROCESS_START = auto()
MAIN_PROCESS_READY = auto()
MAIN_PROCESS_STOP = auto() MAIN_PROCESS_STOP = auto()
RELOAD_PROCESS_START = auto() RELOAD_PROCESS_START = auto()
RELOAD_PROCESS_STOP = auto() RELOAD_PROCESS_STOP = auto()
BEFORE_RELOAD_TRIGGER = auto()
AFTER_RELOAD_TRIGGER = auto()
class ListenerMixin(metaclass=SanicMeta): class ListenerMixin(metaclass=SanicMeta):
@ -98,6 +101,11 @@ class ListenerMixin(metaclass=SanicMeta):
) -> ListenerType[Sanic]: ) -> ListenerType[Sanic]:
return self.listener(listener, "main_process_start") return self.listener(listener, "main_process_start")
def main_process_ready(
self, listener: ListenerType[Sanic]
) -> ListenerType[Sanic]:
return self.listener(listener, "main_process_ready")
def main_process_stop( def main_process_stop(
self, listener: ListenerType[Sanic] self, listener: ListenerType[Sanic]
) -> ListenerType[Sanic]: ) -> ListenerType[Sanic]:
@ -113,6 +121,16 @@ class ListenerMixin(metaclass=SanicMeta):
) -> ListenerType[Sanic]: ) -> ListenerType[Sanic]:
return self.listener(listener, "reload_process_stop") return self.listener(listener, "reload_process_stop")
def before_reload_trigger(
self, listener: ListenerType[Sanic]
) -> ListenerType[Sanic]:
return self.listener(listener, "before_reload_trigger")
def after_reload_trigger(
self, listener: ListenerType[Sanic]
) -> ListenerType[Sanic]:
return self.listener(listener, "after_reload_trigger")
def before_server_start( def before_server_start(
self, listener: ListenerType[Sanic] self, listener: ListenerType[Sanic]
) -> ListenerType[Sanic]: ) -> ListenerType[Sanic]:

View File

@ -16,12 +16,15 @@ from asyncio import (
from contextlib import suppress from contextlib import suppress
from functools import partial from functools import partial
from importlib import import_module from importlib import import_module
from multiprocessing import Manager, Pipe, get_context
from multiprocessing.context import BaseContext
from pathlib import Path from pathlib import Path
from socket import socket from socket import socket
from ssl import SSLContext from ssl import SSLContext
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
Any, Any,
Callable,
Dict, Dict,
List, List,
Optional, Optional,
@ -32,7 +35,6 @@ from typing import (
cast, cast,
) )
from sanic import reloader_helpers
from sanic.application.logo import get_logo from sanic.application.logo import get_logo
from sanic.application.motd import MOTD from sanic.application.motd import MOTD
from sanic.application.state import ApplicationServerInfo, Mode, ServerStage from sanic.application.state import ApplicationServerInfo, Mode, ServerStage
@ -41,15 +43,25 @@ from sanic.compat import OS_IS_WINDOWS, is_atty
from sanic.helpers import _default from sanic.helpers import _default
from sanic.http.constants import HTTP from sanic.http.constants import HTTP
from sanic.http.tls import get_ssl_context, process_to_context from sanic.http.tls import get_ssl_context, process_to_context
from sanic.http.tls.context import SanicSSLContext
from sanic.log import Colors, deprecation, error_logger, logger from sanic.log import Colors, deprecation, error_logger, logger
from sanic.models.handler_types import ListenerType from sanic.models.handler_types import ListenerType
from sanic.server import Signal as ServerSignal from sanic.server import Signal as ServerSignal
from sanic.server import try_use_uvloop from sanic.server import try_use_uvloop
from sanic.server.async_server import AsyncioServer from sanic.server.async_server import AsyncioServer
from sanic.server.events import trigger_events from sanic.server.events import trigger_events
from sanic.server.legacy import watchdog
from sanic.server.loop import try_windows_loop
from sanic.server.protocols.http_protocol import HttpProtocol from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.protocols.websocket_protocol import WebSocketProtocol from sanic.server.protocols.websocket_protocol import WebSocketProtocol
from sanic.server.runners import serve, serve_multiple, serve_single from sanic.server.runners import serve, serve_multiple, serve_single
from sanic.server.socket import configure_socket, remove_unix_socket
from sanic.worker.inspector import Inspector
from sanic.worker.loader import AppLoader
from sanic.worker.manager import WorkerManager
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.reloader import Reloader
from sanic.worker.serve import worker_serve
if TYPE_CHECKING: if TYPE_CHECKING:
@ -59,20 +71,35 @@ if TYPE_CHECKING:
SANIC_PACKAGES = ("sanic-routing", "sanic-testing", "sanic-ext") SANIC_PACKAGES = ("sanic-routing", "sanic-testing", "sanic-ext")
if sys.version_info < (3, 8): if sys.version_info < (3, 8): # no cov
HTTPVersion = Union[HTTP, int] HTTPVersion = Union[HTTP, int]
else: else: # no cov
from typing import Literal from typing import Literal
HTTPVersion = Union[HTTP, Literal[1], Literal[3]] HTTPVersion = Union[HTTP, Literal[1], Literal[3]]
class RunnerMixin(metaclass=SanicMeta): class StartupMixin(metaclass=SanicMeta):
_app_registry: Dict[str, Sanic] _app_registry: Dict[str, Sanic]
config: Config config: Config
listeners: Dict[str, List[ListenerType[Any]]] listeners: Dict[str, List[ListenerType[Any]]]
state: ApplicationState state: ApplicationState
websocket_enabled: bool websocket_enabled: bool
multiplexer: WorkerMultiplexer
def setup_loop(self):
if not self.asgi:
if self.config.USE_UVLOOP is True or (
self.config.USE_UVLOOP is _default and not OS_IS_WINDOWS
):
try_use_uvloop()
elif OS_IS_WINDOWS:
try_windows_loop()
@property
def m(self) -> WorkerMultiplexer:
"""Interface for interacting with the worker processes"""
return self.multiplexer
def make_coffee(self, *args, **kwargs): def make_coffee(self, *args, **kwargs):
self.state.coffee = True self.state.coffee = True
@ -103,6 +130,8 @@ class RunnerMixin(metaclass=SanicMeta):
verbosity: int = 0, verbosity: int = 0,
motd_display: Optional[Dict[str, str]] = None, motd_display: Optional[Dict[str, str]] = None,
auto_tls: bool = False, auto_tls: bool = False,
single_process: bool = False,
legacy: bool = False,
) -> None: ) -> None:
""" """
Run the HTTP Server and listen until keyboard interrupt or term Run the HTTP Server and listen until keyboard interrupt or term
@ -163,9 +192,17 @@ class RunnerMixin(metaclass=SanicMeta):
verbosity=verbosity, verbosity=verbosity,
motd_display=motd_display, motd_display=motd_display,
auto_tls=auto_tls, auto_tls=auto_tls,
single_process=single_process,
legacy=legacy,
) )
self.__class__.serve(primary=self) # type: ignore if single_process:
serve = self.__class__.serve_single
elif legacy:
serve = self.__class__.serve_legacy
else:
serve = self.__class__.serve
serve(primary=self) # type: ignore
def prepare( def prepare(
self, self,
@ -193,6 +230,8 @@ class RunnerMixin(metaclass=SanicMeta):
motd_display: Optional[Dict[str, str]] = None, motd_display: Optional[Dict[str, str]] = None,
coffee: bool = False, coffee: bool = False,
auto_tls: bool = False, auto_tls: bool = False,
single_process: bool = False,
legacy: bool = False,
) -> None: ) -> None:
if version == 3 and self.state.server_info: if version == 3 and self.state.server_info:
raise RuntimeError( raise RuntimeError(
@ -205,6 +244,9 @@ class RunnerMixin(metaclass=SanicMeta):
debug = True debug = True
auto_reload = True auto_reload = True
if debug and access_log is None:
access_log = True
self.state.verbosity = verbosity self.state.verbosity = verbosity
if not self.state.auto_reload: if not self.state.auto_reload:
self.state.auto_reload = bool(auto_reload) self.state.auto_reload = bool(auto_reload)
@ -212,6 +254,21 @@ class RunnerMixin(metaclass=SanicMeta):
if fast and workers != 1: if fast and workers != 1:
raise RuntimeError("You cannot use both fast=True and workers=X") raise RuntimeError("You cannot use both fast=True and workers=X")
if single_process and (fast or (workers > 1) or auto_reload):
raise RuntimeError(
"Single process cannot be run with multiple workers "
"or auto-reload"
)
if single_process and legacy:
raise RuntimeError("Cannot run single process and legacy mode")
if register_sys_signals is False and not (single_process or legacy):
raise RuntimeError(
"Cannot run Sanic.serve with register_sys_signals=False. "
"Use either Sanic.serve_single or Sanic.serve_legacy."
)
if motd_display: if motd_display:
self.config.MOTD_DISPLAY.update(motd_display) self.config.MOTD_DISPLAY.update(motd_display)
@ -235,12 +292,6 @@ class RunnerMixin(metaclass=SanicMeta):
"#asynchronous-support" "#asynchronous-support"
) )
if (
self.__class__.should_auto_reload()
and os.environ.get("SANIC_SERVER_RUNNING") != "true"
): # no cov
return
if sock is None: if sock is None:
host, port = self.get_address(host, port, version, auto_tls) host, port = self.get_address(host, port, version, auto_tls)
@ -287,10 +338,10 @@ class RunnerMixin(metaclass=SanicMeta):
ApplicationServerInfo(settings=server_settings) ApplicationServerInfo(settings=server_settings)
) )
if self.config.USE_UVLOOP is True or ( # if self.config.USE_UVLOOP is True or (
self.config.USE_UVLOOP is _default and not OS_IS_WINDOWS # self.config.USE_UVLOOP is _default and not OS_IS_WINDOWS
): # ):
try_use_uvloop() # try_use_uvloop()
async def create_server( async def create_server(
self, self,
@ -399,18 +450,23 @@ class RunnerMixin(metaclass=SanicMeta):
asyncio_server_kwargs=asyncio_server_kwargs, **server_settings asyncio_server_kwargs=asyncio_server_kwargs, **server_settings
) )
def stop(self): def stop(self, terminate: bool = True, unregister: bool = False):
""" """
This kills the Sanic This kills the Sanic
""" """
if terminate and hasattr(self, "multiplexer"):
self.multiplexer.terminate()
if self.state.stage is not ServerStage.STOPPED: if self.state.stage is not ServerStage.STOPPED:
self.shutdown_tasks(timeout=0) self.shutdown_tasks(timeout=0) # type: ignore
for task in all_tasks(): for task in all_tasks():
with suppress(AttributeError): with suppress(AttributeError):
if task.get_name() == "RunServer": if task.get_name() == "RunServer":
task.cancel() task.cancel()
get_event_loop().stop() get_event_loop().stop()
if unregister:
self.__class__.unregister_app(self) # type: ignore
def _helper( def _helper(
self, self,
host: Optional[str] = None, host: Optional[str] = None,
@ -472,7 +528,11 @@ class RunnerMixin(metaclass=SanicMeta):
self.motd(server_settings=server_settings) self.motd(server_settings=server_settings)
if is_atty() and not self.state.is_debug: if (
is_atty()
and not self.state.is_debug
and not os.environ.get("SANIC_IGNORE_PRODUCTION_WARNING")
):
error_logger.warning( error_logger.warning(
f"{Colors.YELLOW}Sanic is running in PRODUCTION mode. " f"{Colors.YELLOW}Sanic is running in PRODUCTION mode. "
"Consider using '--debug' or '--dev' while actively " "Consider using '--debug' or '--dev' while actively "
@ -501,6 +561,13 @@ class RunnerMixin(metaclass=SanicMeta):
serve_location: str = "", serve_location: str = "",
server_settings: Optional[Dict[str, Any]] = None, server_settings: Optional[Dict[str, Any]] = None,
): ):
if (
os.environ.get("SANIC_WORKER_NAME")
or os.environ.get("SANIC_MOTD_OUTPUT")
or os.environ.get("SANIC_WORKER_PROCESS")
or os.environ.get("SANIC_SERVER_RUNNING")
):
return
if serve_location: if serve_location:
deprecation( deprecation(
"Specifying a serve_location in the MOTD is deprecated and " "Specifying a serve_location in the MOTD is deprecated and "
@ -510,6 +577,14 @@ class RunnerMixin(metaclass=SanicMeta):
else: else:
serve_location = self.get_server_location(server_settings) serve_location = self.get_server_location(server_settings)
if self.config.MOTD: if self.config.MOTD:
logo = get_logo(coffee=self.state.coffee)
display, extra = self.get_motd_data(server_settings)
MOTD.output(logo, serve_location, display, extra)
def get_motd_data(
self, server_settings: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
mode = [f"{self.state.mode},"] mode = [f"{self.state.mode},"]
if self.state.fast: if self.state.fast:
mode.append("goin' fast") mode.append("goin' fast")
@ -560,7 +635,7 @@ class RunnerMixin(metaclass=SanicMeta):
packages.append( packages.append(
f"{package_name}=={module.__version__}" # type: ignore f"{package_name}=={module.__version__}" # type: ignore
) )
except ImportError: except ImportError: # no cov
... ...
if packages: if packages:
@ -569,9 +644,7 @@ class RunnerMixin(metaclass=SanicMeta):
if self.config.MOTD_DISPLAY: if self.config.MOTD_DISPLAY:
extra.update(self.config.MOTD_DISPLAY) extra.update(self.config.MOTD_DISPLAY)
logo = get_logo(coffee=self.state.coffee) return display, extra
MOTD.output(logo, serve_location, display, extra)
@property @property
def serve_location(self) -> str: def serve_location(self) -> str:
@ -591,24 +664,20 @@ class RunnerMixin(metaclass=SanicMeta):
if not server_settings: if not server_settings:
return serve_location return serve_location
if server_settings["ssl"] is not None: host = server_settings["host"]
port = server_settings["port"]
if server_settings.get("ssl") is not None:
proto = "https" proto = "https"
if server_settings["unix"]: if server_settings.get("unix"):
serve_location = f'{server_settings["unix"]} {proto}://...' serve_location = f'{server_settings["unix"]} {proto}://...'
elif server_settings["sock"]: elif server_settings.get("sock"):
serve_location = ( host, port, *_ = server_settings["sock"].getsockname()
f'{server_settings["sock"].getsockname()} {proto}://...'
) if not serve_location and host and port:
elif server_settings["host"] and server_settings["port"]:
# colon(:) is legal for a host only in an ipv6 address # colon(:) is legal for a host only in an ipv6 address
display_host = ( display_host = f"[{host}]" if ":" in host else host
f'[{server_settings["host"]}]' serve_location = f"{proto}://{display_host}:{port}"
if ":" in server_settings["host"]
else server_settings["host"]
)
serve_location = (
f'{proto}://{display_host}:{server_settings["port"]}'
)
return serve_location return serve_location
@ -628,7 +697,252 @@ class RunnerMixin(metaclass=SanicMeta):
return any(app.state.auto_reload for app in cls._app_registry.values()) return any(app.state.auto_reload for app in cls._app_registry.values())
@classmethod @classmethod
def serve(cls, primary: Optional[Sanic] = None) -> None: def _get_context(cls) -> BaseContext:
method = (
"spawn"
if "linux" not in sys.platform or cls.should_auto_reload()
else "fork"
)
return get_context(method)
@classmethod
def serve(
cls,
primary: Optional[Sanic] = None,
*,
app_loader: Optional[AppLoader] = None,
factory: Optional[Callable[[], Sanic]] = None,
) -> None:
os.environ["SANIC_MOTD_OUTPUT"] = "true"
apps = list(cls._app_registry.values())
if factory:
primary = factory()
else:
if not primary:
if app_loader:
primary = app_loader.load()
if not primary:
try:
primary = apps[0]
except IndexError:
raise RuntimeError(
"Did not find any applications."
) from None
# This exists primarily for unit testing
if not primary.state.server_info: # no cov
for app in apps:
app.state.server_info.clear()
return
try:
primary_server_info = primary.state.server_info[0]
except IndexError:
raise RuntimeError(
f"No server information found for {primary.name}. Perhaps you "
"need to run app.prepare(...)?\n"
"See ____ for more information."
) from None
socks = []
sync_manager = Manager()
try:
main_start = primary_server_info.settings.pop("main_start", None)
main_stop = primary_server_info.settings.pop("main_stop", None)
app = primary_server_info.settings.pop("app")
app.setup_loop()
loop = new_event_loop()
trigger_events(main_start, loop, primary)
socks = [
sock
for sock in [
configure_socket(server_info.settings)
for app in apps
for server_info in app.state.server_info
]
if sock
]
primary_server_info.settings["run_multiple"] = True
monitor_sub, monitor_pub = Pipe(True)
worker_state: Dict[str, Any] = sync_manager.dict()
kwargs: Dict[str, Any] = {
**primary_server_info.settings,
"monitor_publisher": monitor_pub,
"worker_state": worker_state,
}
if not app_loader:
if factory:
app_loader = AppLoader(factory=factory)
else:
app_loader = AppLoader(
factory=partial(cls.get_app, app.name) # type: ignore
)
kwargs["app_name"] = app.name
kwargs["app_loader"] = app_loader
kwargs["server_info"] = {}
kwargs["passthru"] = {
"auto_reload": app.auto_reload,
"state": {
"verbosity": app.state.verbosity,
"mode": app.state.mode,
},
"config": {
"ACCESS_LOG": app.config.ACCESS_LOG,
"NOISY_EXCEPTIONS": app.config.NOISY_EXCEPTIONS,
},
"shared_ctx": app.shared_ctx.__dict__,
}
for app in apps:
kwargs["server_info"][app.name] = []
for server_info in app.state.server_info:
server_info.settings = {
k: v
for k, v in server_info.settings.items()
if k not in ("main_start", "main_stop", "app", "ssl")
}
kwargs["server_info"][app.name].append(server_info)
ssl = kwargs.get("ssl")
if isinstance(ssl, SanicSSLContext):
kwargs["ssl"] = kwargs["ssl"].sanic
manager = WorkerManager(
primary.state.workers,
worker_serve,
kwargs,
cls._get_context(),
(monitor_pub, monitor_sub),
worker_state,
)
if cls.should_auto_reload():
reload_dirs: Set[Path] = primary.state.reload_dirs.union(
*(app.state.reload_dirs for app in apps)
)
reloader = Reloader(monitor_pub, 1.0, reload_dirs, app_loader)
manager.manage("Reloader", reloader, {}, transient=False)
inspector = None
if primary.config.INSPECTOR:
display, extra = primary.get_motd_data()
packages = [
pkg.strip() for pkg in display["packages"].split(",")
]
module = import_module("sanic")
sanic_version = f"sanic=={module.__version__}" # type: ignore
app_info = {
**display,
"packages": [sanic_version, *packages],
"extra": extra,
}
inspector = Inspector(
monitor_pub,
app_info,
worker_state,
primary.config.INSPECTOR_HOST,
primary.config.INSPECTOR_PORT,
)
manager.manage("Inspector", inspector, {}, transient=False)
primary._inspector = inspector
primary._manager = manager
ready = primary.listeners["main_process_ready"]
trigger_events(ready, loop, primary)
manager.run()
except BaseException:
kwargs = primary_server_info.settings
error_logger.exception(
"Experienced exception while trying to serve"
)
raise
finally:
logger.info("Server Stopped")
for app in apps:
app.state.server_info.clear()
app.router.reset()
app.signal_router.reset()
sync_manager.shutdown()
for sock in socks:
sock.close()
socks = []
trigger_events(main_stop, loop, primary)
loop.close()
cls._cleanup_env_vars()
cls._cleanup_apps()
unix = kwargs.get("unix")
if unix:
remove_unix_socket(unix)
@classmethod
def serve_single(cls, primary: Optional[Sanic] = None) -> None:
os.environ["SANIC_MOTD_OUTPUT"] = "true"
apps = list(cls._app_registry.values())
if not primary:
try:
primary = apps[0]
except IndexError:
raise RuntimeError("Did not find any applications.")
# This exists primarily for unit testing
if not primary.state.server_info: # no cov
for app in apps:
app.state.server_info.clear()
return
primary_server_info = primary.state.server_info[0]
primary.before_server_start(partial(primary._start_servers, apps=apps))
kwargs = {
k: v
for k, v in primary_server_info.settings.items()
if k
not in (
"main_start",
"main_stop",
"app",
)
}
kwargs["app_name"] = primary.name
kwargs["app_loader"] = None
sock = configure_socket(kwargs)
kwargs["server_info"] = {}
kwargs["server_info"][primary.name] = []
for server_info in primary.state.server_info:
server_info.settings = {
k: v
for k, v in server_info.settings.items()
if k not in ("main_start", "main_stop", "app")
}
kwargs["server_info"][primary.name].append(server_info)
try:
worker_serve(monitor_publisher=None, **kwargs)
except BaseException:
error_logger.exception(
"Experienced exception while trying to serve"
)
raise
finally:
logger.info("Server Stopped")
for app in apps:
app.state.server_info.clear()
app.router.reset()
app.signal_router.reset()
if sock:
sock.close()
cls._cleanup_env_vars()
cls._cleanup_apps()
@classmethod
def serve_legacy(cls, primary: Optional[Sanic] = None) -> None:
apps = list(cls._app_registry.values()) apps = list(cls._app_registry.values())
if not primary: if not primary:
@ -649,7 +963,7 @@ class RunnerMixin(metaclass=SanicMeta):
reload_dirs: Set[Path] = primary.state.reload_dirs.union( reload_dirs: Set[Path] = primary.state.reload_dirs.union(
*(app.state.reload_dirs for app in apps) *(app.state.reload_dirs for app in apps)
) )
reloader_helpers.watchdog(1.0, reload_dirs) watchdog(1.0, reload_dirs)
trigger_events(reloader_stop, loop, primary) trigger_events(reloader_stop, loop, primary)
return return
@ -662,11 +976,17 @@ class RunnerMixin(metaclass=SanicMeta):
primary_server_info = primary.state.server_info[0] primary_server_info = primary.state.server_info[0]
primary.before_server_start(partial(primary._start_servers, apps=apps)) primary.before_server_start(partial(primary._start_servers, apps=apps))
deprecation(
f"{Colors.YELLOW}Running {Colors.SANIC}Sanic {Colors.YELLOW}w/ "
f"LEGACY manager.{Colors.END} Support for will be dropped in "
"version 23.3.",
23.3,
)
try: try:
primary_server_info.stage = ServerStage.SERVING primary_server_info.stage = ServerStage.SERVING
if primary.state.workers > 1 and os.name != "posix": # no cov if primary.state.workers > 1 and os.name != "posix": # no cov
logger.warn( logger.warning(
f"Multiprocessing is currently not supported on {os.name}," f"Multiprocessing is currently not supported on {os.name},"
" using workers=1 instead" " using workers=1 instead"
) )
@ -687,10 +1007,9 @@ class RunnerMixin(metaclass=SanicMeta):
finally: finally:
primary_server_info.stage = ServerStage.STOPPED primary_server_info.stage = ServerStage.STOPPED
logger.info("Server Stopped") logger.info("Server Stopped")
for app in apps:
app.state.server_info.clear() cls._cleanup_env_vars()
app.router.reset() cls._cleanup_apps()
app.signal_router.reset()
async def _start_servers( async def _start_servers(
self, self,
@ -728,7 +1047,7 @@ class RunnerMixin(metaclass=SanicMeta):
*server_info.settings.pop("main_start", []), *server_info.settings.pop("main_start", []),
*server_info.settings.pop("main_stop", []), *server_info.settings.pop("main_stop", []),
] ]
if handlers: if handlers: # no cov
error_logger.warning( error_logger.warning(
f"Sanic found {len(handlers)} listener(s) on " f"Sanic found {len(handlers)} listener(s) on "
"secondary applications attached to the main " "secondary applications attached to the main "
@ -741,12 +1060,15 @@ class RunnerMixin(metaclass=SanicMeta):
if not server_info.settings["loop"]: if not server_info.settings["loop"]:
server_info.settings["loop"] = get_running_loop() server_info.settings["loop"] = get_running_loop()
try: serve_args: Dict[str, Any] = {
server_info.server = await serve(
**server_info.settings, **server_info.settings,
run_async=True, "run_async": True,
reuse_port=bool(primary.state.workers - 1), "reuse_port": bool(primary.state.workers - 1),
) }
if "app" not in serve_args:
serve_args["app"] = app
try:
server_info.server = await serve(**serve_args)
except OSError as e: # no cov except OSError as e: # no cov
first_message = ( first_message = (
"An OSError was detected on startup. " "An OSError was detected on startup. "
@ -772,9 +1094,9 @@ class RunnerMixin(metaclass=SanicMeta):
async def _run_server( async def _run_server(
self, self,
app: RunnerMixin, app: StartupMixin,
server_info: ApplicationServerInfo, server_info: ApplicationServerInfo,
) -> None: ) -> None: # no cov
try: try:
# We should never get to this point without a server # We should never get to this point without a server
@ -798,3 +1120,26 @@ class RunnerMixin(metaclass=SanicMeta):
finally: finally:
server_info.stage = ServerStage.STOPPED server_info.stage = ServerStage.STOPPED
server_info.server = None server_info.server = None
@staticmethod
def _cleanup_env_vars():
variables = (
"SANIC_RELOADER_PROCESS",
"SANIC_IGNORE_PRODUCTION_WARNING",
"SANIC_WORKER_NAME",
"SANIC_MOTD_OUTPUT",
"SANIC_WORKER_PROCESS",
"SANIC_SERVER_RUNNING",
)
for var in variables:
try:
del os.environ[var]
except KeyError:
...
@classmethod
def _cleanup_apps(cls):
for app in cls._app_registry.values():
app.state.server_info.clear()
app.router.reset()
app.signal_router.reset()

View File

@ -9,7 +9,6 @@ from time import sleep
def _iter_module_files(): def _iter_module_files():
"""This iterates over all relevant Python files. """This iterates over all relevant Python files.
It goes through all It goes through all
loaded files from modules, all files in folders of already loaded modules loaded files from modules, all files in folders of already loaded modules
as well as all files reachable through a package. as well as all files reachable through a package.
@ -52,7 +51,7 @@ def restart_with_reloader(changed=None):
this one. this one.
""" """
reloaded = ",".join(changed) if changed else "" reloaded = ",".join(changed) if changed else ""
return subprocess.Popen( return subprocess.Popen( # nosec B603
_get_args_for_reloading(), _get_args_for_reloading(),
env={ env={
**os.environ, **os.environ,
@ -79,7 +78,6 @@ def _check_file(filename, mtimes):
def watchdog(sleep_interval, reload_dirs): def watchdog(sleep_interval, reload_dirs):
"""Watch project files, restart worker process if a change happened. """Watch project files, restart worker process if a change happened.
:param sleep_interval: interval in second. :param sleep_interval: interval in second.
:return: Nothing :return: Nothing
""" """

View File

@ -1,4 +1,5 @@
import asyncio import asyncio
import sys
from distutils.util import strtobool from distutils.util import strtobool
from os import getenv from os import getenv
@ -47,3 +48,19 @@ def try_use_uvloop() -> None:
if not isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy): if not isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy):
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
def try_windows_loop():
if not OS_IS_WINDOWS:
error_logger.warning(
"You are trying to use an event loop policy that is not "
"compatible with your system. You can simply let Sanic handle "
"selecting the best loop for you. Sanic will now continue to run "
"using the default event loop."
)
return
if sys.version_info >= (3, 8) and not isinstance(
asyncio.get_event_loop_policy(), asyncio.WindowsSelectorEventLoopPolicy
):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

View File

@ -265,7 +265,6 @@ class HttpProtocol(HttpProtocolMixin, SanicProtocol, metaclass=TouchUpMeta):
error_logger.exception("protocol.connect_made") error_logger.exception("protocol.connect_made")
def data_received(self, data: bytes): def data_received(self, data: bytes):
try: try:
self._time = current_time() self._time = current_time()
if not data: if not data:

View File

@ -129,7 +129,7 @@ def _setup_system_signals(
run_multiple: bool, run_multiple: bool,
register_sys_signals: bool, register_sys_signals: bool,
loop: asyncio.AbstractEventLoop, loop: asyncio.AbstractEventLoop,
) -> None: ) -> None: # no cov
# Ignore SIGINT when run_multiple # Ignore SIGINT when run_multiple
if run_multiple: if run_multiple:
signal_func(SIGINT, SIG_IGN) signal_func(SIGINT, SIG_IGN)
@ -141,7 +141,9 @@ def _setup_system_signals(
ctrlc_workaround_for_windows(app) ctrlc_workaround_for_windows(app)
else: else:
for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]: for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]:
loop.add_signal_handler(_signal, app.stop) loop.add_signal_handler(
_signal, partial(app.stop, terminate=False)
)
def _run_server_forever(loop, before_stop, after_stop, cleanup, unix): def _run_server_forever(loop, before_stop, after_stop, cleanup, unix):
@ -161,6 +163,7 @@ def _run_server_forever(loop, before_stop, after_stop, cleanup, unix):
loop.run_until_complete(after_stop()) loop.run_until_complete(after_stop())
remove_unix_socket(unix) remove_unix_socket(unix)
loop.close()
def _serve_http_1( def _serve_http_1(
@ -197,8 +200,12 @@ def _serve_http_1(
asyncio_server_kwargs = ( asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {} asyncio_server_kwargs if asyncio_server_kwargs else {}
) )
if OS_IS_WINDOWS:
pid = os.getpid()
sock = sock.share(pid)
sock = socket.fromshare(sock)
# UNIX sockets are always bound by us (to preserve semantics between modes) # UNIX sockets are always bound by us (to preserve semantics between modes)
if unix: elif unix:
sock = bind_unix_socket(unix, backlog=backlog) sock = bind_unix_socket(unix, backlog=backlog)
server_coroutine = loop.create_server( server_coroutine = loop.create_server(
server, server,

View File

@ -6,7 +6,10 @@ import socket
import stat import stat
from ipaddress import ip_address from ipaddress import ip_address
from typing import Optional from typing import Any, Dict, Optional
from sanic.exceptions import ServerError
from sanic.http.constants import HTTP
def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket: def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
@ -16,6 +19,8 @@ def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
:param backlog: Maximum number of connections to queue :param backlog: Maximum number of connections to queue
:return: socket.socket object :return: socket.socket object
""" """
location = (host, port)
# socket.share, socket.fromshare
try: # IP address: family must be specified for IPv6 at least try: # IP address: family must be specified for IPv6 at least
ip = ip_address(host) ip = ip_address(host)
host = str(ip) host = str(ip)
@ -25,8 +30,9 @@ def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
except ValueError: # Hostname, may become AF_INET or AF_INET6 except ValueError: # Hostname, may become AF_INET or AF_INET6
sock = socket.socket() sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port)) sock.bind(location)
sock.listen(backlog) sock.listen(backlog)
sock.set_inheritable(True)
return sock return sock
@ -36,7 +42,7 @@ def bind_unix_socket(path: str, *, mode=0o666, backlog=100) -> socket.socket:
:param backlog: Maximum number of connections to queue :param backlog: Maximum number of connections to queue
:return: socket.socket object :return: socket.socket object
""" """
"""Open or atomically replace existing socket with zero downtime."""
# Sanitise and pre-verify socket path # Sanitise and pre-verify socket path
path = os.path.abspath(path) path = os.path.abspath(path)
folder = os.path.dirname(path) folder = os.path.dirname(path)
@ -85,3 +91,37 @@ def remove_unix_socket(path: Optional[str]) -> None:
os.unlink(path) os.unlink(path)
except FileNotFoundError: except FileNotFoundError:
pass pass
def configure_socket(
server_settings: Dict[str, Any]
) -> Optional[socket.SocketType]:
# Create a listening socket or use the one in settings
if server_settings.get("version") is HTTP.VERSION_3:
return None
sock = server_settings.get("sock")
unix = server_settings["unix"]
backlog = server_settings["backlog"]
if unix:
sock = bind_unix_socket(unix, backlog=backlog)
server_settings["unix"] = unix
if sock is None:
try:
sock = bind_socket(
server_settings["host"],
server_settings["port"],
backlog=backlog,
)
except OSError as e: # no cov
raise ServerError(
f"Sanic server could not start: {e}.\n"
"This may have happened if you are running Sanic in the "
"global scope and not inside of a "
'`if __name__ == "__main__"` block. See more information: '
"____."
) from e
sock.set_inheritable(True)
server_settings["sock"] = sock
server_settings["host"] = None
server_settings["port"] = None
return sock

View File

@ -154,13 +154,13 @@ class SignalRouter(BaseRouter):
try: try:
for signal in signals: for signal in signals:
params.pop("__trigger__", None) params.pop("__trigger__", None)
requirements = getattr(
signal.handler, "__requirements__", None
)
if ( if (
(condition is None and signal.ctx.exclusive is False) (condition is None and signal.ctx.exclusive is False)
or ( or (condition is None and not requirements)
condition is None or (condition == requirements)
and not signal.handler.__requirements__
)
or (condition == signal.handler.__requirements__)
) and (signal.ctx.trigger or event == signal.ctx.definition): ) and (signal.ctx.trigger or event == signal.ctx.definition):
maybe_coroutine = signal.handler(**params) maybe_coroutine = signal.handler(**params)
if isawaitable(maybe_coroutine): if isawaitable(maybe_coroutine):
@ -191,7 +191,7 @@ class SignalRouter(BaseRouter):
fail_not_found=fail_not_found and inline, fail_not_found=fail_not_found and inline,
reverse=reverse, reverse=reverse,
) )
logger.debug(f"Dispatching signal: {event}") logger.debug(f"Dispatching signal: {event}", extra={"verbosity": 1})
if inline: if inline:
return await dispatch return await dispatch

55
sanic/types/shared_ctx.py Normal file
View File

@ -0,0 +1,55 @@
import os
from types import SimpleNamespace
from typing import Any, Iterable
from sanic.log import Colors, error_logger
class SharedContext(SimpleNamespace):
SAFE = ("_lock",)
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self._lock = False
def __setattr__(self, name: str, value: Any) -> None:
if self.is_locked:
raise RuntimeError(
f"Cannot set {name} on locked SharedContext object"
)
if not os.environ.get("SANIC_WORKER_NAME"):
to_check: Iterable[Any]
if not isinstance(value, (tuple, frozenset)):
to_check = [value]
else:
to_check = value
for item in to_check:
self._check(name, item)
super().__setattr__(name, value)
def _check(self, name: str, value: Any) -> None:
if name in self.SAFE:
return
try:
module = value.__module__
except AttributeError:
module = ""
if not any(
module.startswith(prefix)
for prefix in ("multiprocessing", "ctypes")
):
error_logger.warning(
f"{Colors.YELLOW}Unsafe object {Colors.PURPLE}{name} "
f"{Colors.YELLOW}with type {Colors.PURPLE}{type(value)} "
f"{Colors.YELLOW}was added to shared_ctx. It may not "
"not function as intended. Consider using the regular "
f"ctx. For more information, please see ____.{Colors.END}"
)
@property
def is_locked(self) -> bool:
return getattr(self, "_lock", False)
def lock(self) -> None:
self._lock = True

0
sanic/worker/__init__.py Normal file
View File

141
sanic/worker/inspector.py Normal file
View File

@ -0,0 +1,141 @@
import sys
from datetime import datetime
from multiprocessing.connection import Connection
from signal import SIGINT, SIGTERM
from signal import signal as signal_func
from socket import AF_INET, SOCK_STREAM, socket, timeout
from textwrap import indent
from typing import Any, Dict
from sanic.application.logo import get_logo
from sanic.application.motd import MOTDTTY
from sanic.log import Colors, error_logger, logger
from sanic.server.socket import configure_socket
try: # no cov
from ujson import dumps, loads
except ModuleNotFoundError: # no cov
from json import dumps, loads # type: ignore
class Inspector:
def __init__(
self,
publisher: Connection,
app_info: Dict[str, Any],
worker_state: Dict[str, Any],
host: str,
port: int,
):
self._publisher = publisher
self.run = True
self.app_info = app_info
self.worker_state = worker_state
self.host = host
self.port = port
def __call__(self) -> None:
sock = configure_socket(
{"host": self.host, "port": self.port, "unix": None, "backlog": 1}
)
assert sock
signal_func(SIGINT, self.stop)
signal_func(SIGTERM, self.stop)
logger.info(f"Inspector started on: {sock.getsockname()}")
sock.settimeout(0.5)
try:
while self.run:
try:
conn, _ = sock.accept()
except timeout:
continue
else:
action = conn.recv(64)
if action == b"reload":
conn.send(b"\n")
self.reload()
elif action == b"shutdown":
conn.send(b"\n")
self.shutdown()
else:
data = dumps(self.state_to_json())
conn.send(data.encode())
conn.close()
finally:
logger.debug("Inspector closing")
sock.close()
def stop(self, *_):
self.run = False
def state_to_json(self):
output = {"info": self.app_info}
output["workers"] = self._make_safe(dict(self.worker_state))
return output
def reload(self):
message = "__ALL_PROCESSES__:"
self._publisher.send(message)
def shutdown(self):
message = "__TERMINATE__"
self._publisher.send(message)
def _make_safe(self, obj: Dict[str, Any]) -> Dict[str, Any]:
for key, value in obj.items():
if isinstance(value, dict):
obj[key] = self._make_safe(value)
elif isinstance(value, datetime):
obj[key] = value.isoformat()
return obj
def inspect(host: str, port: int, action: str):
out = sys.stdout.write
with socket(AF_INET, SOCK_STREAM) as sock:
try:
sock.connect((host, port))
except ConnectionRefusedError:
error_logger.error(
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}{(host, port)}{Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance."
)
sock.close()
sys.exit(1)
sock.sendall(action.encode())
data = sock.recv(4096)
if action == "raw":
out(data.decode())
elif action == "pretty":
loaded = loads(data)
display = loaded.pop("info")
extra = display.pop("extra", {})
display["packages"] = ", ".join(display["packages"])
MOTDTTY(get_logo(), f"{host}:{port}", display, extra).display(
version=False,
action="Inspecting",
out=out,
)
for name, info in loaded["workers"].items():
info = "\n".join(
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
for key, value in info.items()
)
out(
"\n"
+ indent(
"\n".join(
[
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
info,
]
),
" ",
)
+ "\n"
)

126
sanic/worker/loader.py Normal file
View File

@ -0,0 +1,126 @@
from __future__ import annotations
import os
import sys
from importlib import import_module
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Optional,
Type,
Union,
cast,
)
from sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator
if TYPE_CHECKING:
from sanic import Sanic as SanicApp
class AppLoader:
def __init__(
self,
module_input: str = "",
as_factory: bool = False,
as_simple: bool = False,
args: Any = None,
factory: Optional[Callable[[], SanicApp]] = None,
) -> None:
self.module_input = module_input
self.module_name = ""
self.app_name = ""
self.as_factory = as_factory
self.as_simple = as_simple
self.args = args
self.factory = factory
self.cwd = os.getcwd()
if module_input:
delimiter = ":" if ":" in module_input else "."
if module_input.count(delimiter):
module_name, app_name = module_input.rsplit(delimiter, 1)
self.module_name = module_name
self.app_name = app_name
if self.app_name.endswith("()"):
self.as_factory = True
self.app_name = self.app_name[:-2]
def load(self) -> SanicApp:
module_path = os.path.abspath(self.cwd)
if module_path not in sys.path:
sys.path.append(module_path)
if self.factory:
return self.factory()
else:
from sanic.app import Sanic
from sanic.simple import create_simple_server
if self.as_simple:
path = Path(self.module_input)
app = create_simple_server(path)
else:
if self.module_name == "" and os.path.isdir(self.module_input):
raise ValueError(
"App not found.\n"
" Please use --simple if you are passing a "
"directory to sanic.\n"
f" eg. sanic {self.module_input} --simple"
)
module = import_module(self.module_name)
app = getattr(module, self.app_name, None)
if self.as_factory:
try:
app = app(self.args)
except TypeError:
app = app()
app_type_name = type(app).__name__
if (
not isinstance(app, Sanic)
and self.args
and hasattr(self.args, "module")
):
if callable(app):
solution = f"sanic {self.args.module} --factory"
raise ValueError(
"Module is not a Sanic app, it is a "
f"{app_type_name}\n"
" If this callable returns a "
f"Sanic instance try: \n{solution}"
)
raise ValueError(
f"Module is not a Sanic app, it is a {app_type_name}\n"
f" Perhaps you meant {self.args.module}:app?"
)
return app
class CertLoader:
_creator_class: Type[CertCreator]
def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):
creator_name = ssl_data.get("creator")
if creator_name not in ("mkcert", "trustme"):
raise RuntimeError(f"Unknown certificate creator: {creator_name}")
elif creator_name == "mkcert":
self._creator_class = MkcertCreator
elif creator_name == "trustme":
self._creator_class = TrustmeCreator
self._key = ssl_data["key"]
self._cert = ssl_data["cert"]
self._localhost = cast(str, ssl_data["localhost"])
def load(self, app: SanicApp):
creator = self._creator_class(app, self._key, self._cert)
return creator.generate_cert(self._localhost)

181
sanic/worker/manager.py Normal file
View File

@ -0,0 +1,181 @@
import os
import sys
from signal import SIGINT, SIGTERM, Signals
from signal import signal as signal_func
from time import sleep
from typing import List, Optional
from sanic.compat import OS_IS_WINDOWS
from sanic.log import error_logger, logger
from sanic.worker.process import ProcessState, Worker, WorkerProcess
if not OS_IS_WINDOWS:
from signal import SIGKILL
else:
SIGKILL = SIGINT
class WorkerManager:
THRESHOLD = 50
def __init__(
self,
number: int,
serve,
server_settings,
context,
monitor_pubsub,
worker_state,
):
self.num_server = number
self.context = context
self.transient: List[Worker] = []
self.durable: List[Worker] = []
self.monitor_publisher, self.monitor_subscriber = monitor_pubsub
self.worker_state = worker_state
self.worker_state["Sanic-Main"] = {"pid": self.pid}
self.terminated = False
if number == 0:
raise RuntimeError("Cannot serve with no workers")
for i in range(number):
self.manage(
f"{WorkerProcess.SERVER_LABEL}-{i}",
serve,
server_settings,
transient=True,
)
signal_func(SIGINT, self.shutdown_signal)
signal_func(SIGTERM, self.shutdown_signal)
def manage(self, ident, func, kwargs, transient=False):
container = self.transient if transient else self.durable
container.append(
Worker(ident, func, kwargs, self.context, self.worker_state)
)
def run(self):
self.start()
self.monitor()
self.join()
self.terminate()
# self.kill()
def start(self):
for process in self.processes:
process.start()
def join(self):
logger.debug("Joining processes", extra={"verbosity": 1})
joined = set()
for process in self.processes:
logger.debug(
f"Found {process.pid} - {process.state.name}",
extra={"verbosity": 1},
)
if process.state < ProcessState.JOINED:
logger.debug(f"Joining {process.pid}", extra={"verbosity": 1})
joined.add(process.pid)
process.join()
if joined:
self.join()
def terminate(self):
if not self.terminated:
for process in self.processes:
process.terminate()
self.terminated = True
def restart(self, process_names: Optional[List[str]] = None, **kwargs):
for process in self.transient_processes:
if not process_names or process.name in process_names:
process.restart(**kwargs)
def monitor(self):
self.wait_for_ack()
while True:
try:
if self.monitor_subscriber.poll(0.1):
message = self.monitor_subscriber.recv()
logger.debug(
f"Monitor message: {message}", extra={"verbosity": 2}
)
if not message:
break
elif message == "__TERMINATE__":
self.shutdown()
break
split_message = message.split(":", 1)
processes = split_message[0]
reloaded_files = (
split_message[1] if len(split_message) > 1 else None
)
process_names = [
name.strip() for name in processes.split(",")
]
if "__ALL_PROCESSES__" in process_names:
process_names = None
self.restart(
process_names=process_names,
reloaded_files=reloaded_files,
)
except InterruptedError:
if not OS_IS_WINDOWS:
raise
break
def wait_for_ack(self): # no cov
misses = 0
while not self._all_workers_ack():
sleep(0.1)
misses += 1
if misses > self.THRESHOLD:
error_logger.error("Not all workers are ack. Shutting down.")
self.kill()
sys.exit(1)
@property
def workers(self):
return self.transient + self.durable
@property
def processes(self):
for worker in self.workers:
for process in worker.processes:
yield process
@property
def transient_processes(self):
for worker in self.transient:
for process in worker.processes:
yield process
def kill(self):
for process in self.processes:
os.kill(process.pid, SIGKILL)
def shutdown_signal(self, signal, frame):
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
self.monitor_publisher.send(None)
self.shutdown()
def shutdown(self):
for process in self.processes:
if process.is_alive():
process.terminate()
@property
def pid(self):
return os.getpid()
def _all_workers_ack(self):
acked = [
worker_state.get("state") == ProcessState.ACKED.name
for worker_state in self.worker_state.values()
if worker_state.get("server")
]
return all(acked) and len(acked) == self.num_server

View File

@ -0,0 +1,48 @@
from multiprocessing.connection import Connection
from os import environ, getpid
from typing import Any, Dict
from sanic.worker.process import ProcessState
from sanic.worker.state import WorkerState
class WorkerMultiplexer:
def __init__(
self,
monitor_publisher: Connection,
worker_state: Dict[str, Any],
):
self._monitor_publisher = monitor_publisher
self._state = WorkerState(worker_state, self.name)
def ack(self):
self._state._state[self.name] = {
**self._state._state[self.name],
"state": ProcessState.ACKED.name,
}
def restart(self, name: str = ""):
if not name:
name = self.name
self._monitor_publisher.send(name)
reload = restart # no cov
def terminate(self):
self._monitor_publisher.send("__TERMINATE__")
@property
def pid(self) -> int:
return getpid()
@property
def name(self) -> str:
return environ.get("SANIC_WORKER_NAME", "")
@property
def state(self):
return self._state
@property
def workers(self) -> Dict[str, Any]:
return self.state.full()

161
sanic/worker/process.py Normal file
View File

@ -0,0 +1,161 @@
import os
from datetime import datetime, timezone
from enum import IntEnum, auto
from multiprocessing.context import BaseContext
from signal import SIGINT
from typing import Any, Dict, Set
from sanic.log import Colors, logger
def get_now():
now = datetime.now(tz=timezone.utc)
return now
class ProcessState(IntEnum):
IDLE = auto()
STARTED = auto()
ACKED = auto()
JOINED = auto()
TERMINATED = auto()
class WorkerProcess:
SERVER_LABEL = "Server"
def __init__(self, factory, name, target, kwargs, worker_state):
self.state = ProcessState.IDLE
self.factory = factory
self.name = name
self.target = target
self.kwargs = kwargs
self.worker_state = worker_state
if self.name not in self.worker_state:
self.worker_state[self.name] = {
"server": self.SERVER_LABEL in self.name
}
self.spawn()
def set_state(self, state: ProcessState, force=False):
if not force and state < self.state:
raise Exception("...")
self.state = state
self.worker_state[self.name] = {
**self.worker_state[self.name],
"state": self.state.name,
}
def start(self):
os.environ["SANIC_WORKER_NAME"] = self.name
logger.debug(
f"{Colors.BLUE}Starting a process: {Colors.BOLD}"
f"{Colors.SANIC}%s{Colors.END}",
self.name,
)
self.set_state(ProcessState.STARTED)
self._process.start()
if not self.worker_state[self.name].get("starts"):
self.worker_state[self.name] = {
**self.worker_state[self.name],
"pid": self.pid,
"start_at": get_now(),
"starts": 1,
}
del os.environ["SANIC_WORKER_NAME"]
def join(self):
self.set_state(ProcessState.JOINED)
self._process.join()
def terminate(self):
if self.state is not ProcessState.TERMINATED:
logger.debug(
f"{Colors.BLUE}Terminating a process: "
f"{Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self.pid,
)
self.set_state(ProcessState.TERMINATED, force=True)
try:
# self._process.terminate()
os.kill(self.pid, SIGINT)
del self.worker_state[self.name]
except (KeyError, AttributeError, ProcessLookupError):
...
def restart(self, **kwargs):
logger.debug(
f"{Colors.BLUE}Restarting a process: {Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self.pid,
)
self._process.terminate()
self.set_state(ProcessState.IDLE, force=True)
self.kwargs.update(
{"config": {k.upper(): v for k, v in kwargs.items()}}
)
try:
self.spawn()
self.start()
except AttributeError:
raise RuntimeError("Restart failed")
self.worker_state[self.name] = {
**self.worker_state[self.name],
"pid": self.pid,
"starts": self.worker_state[self.name]["starts"] + 1,
"restart_at": get_now(),
}
def is_alive(self):
try:
return self._process.is_alive()
except AssertionError:
return False
def spawn(self):
if self.state is not ProcessState.IDLE:
raise Exception("Cannot spawn a worker process until it is idle.")
self._process = self.factory(
name=self.name,
target=self.target,
kwargs=self.kwargs,
daemon=True,
)
@property
def pid(self):
return self._process.pid
class Worker:
def __init__(
self,
ident: str,
serve,
server_settings,
context: BaseContext,
worker_state: Dict[str, Any],
):
self.ident = ident
self.context = context
self.serve = serve
self.server_settings = server_settings
self.worker_state = worker_state
self.processes: Set[WorkerProcess] = set()
self.create_process()
def create_process(self) -> WorkerProcess:
process = WorkerProcess(
factory=self.context.Process,
name=f"Sanic-{self.ident}-{len(self.processes)}",
target=self.serve,
kwargs={**self.server_settings},
worker_state=self.worker_state,
)
self.processes.add(process)
return process

119
sanic/worker/reloader.py Normal file
View File

@ -0,0 +1,119 @@
from __future__ import annotations
import os
import sys
from asyncio import new_event_loop
from itertools import chain
from multiprocessing.connection import Connection
from pathlib import Path
from signal import SIGINT, SIGTERM
from signal import signal as signal_func
from typing import Dict, Set
from sanic.server.events import trigger_events
from sanic.worker.loader import AppLoader
class Reloader:
def __init__(
self,
publisher: Connection,
interval: float,
reload_dirs: Set[Path],
app_loader: AppLoader,
):
self._publisher = publisher
self.interval = interval
self.reload_dirs = reload_dirs
self.run = True
self.app_loader = app_loader
def __call__(self) -> None:
app = self.app_loader.load()
signal_func(SIGINT, self.stop)
signal_func(SIGTERM, self.stop)
mtimes: Dict[str, float] = {}
reloader_start = app.listeners.get("reload_process_start")
reloader_stop = app.listeners.get("reload_process_stop")
before_trigger = app.listeners.get("before_reload_trigger")
after_trigger = app.listeners.get("after_reload_trigger")
loop = new_event_loop()
if reloader_start:
trigger_events(reloader_start, loop, app)
while self.run:
changed = set()
for filename in self.files():
try:
if self.check_file(filename, mtimes):
path = (
filename
if isinstance(filename, str)
else filename.resolve()
)
changed.add(str(path))
except OSError:
continue
if changed:
if before_trigger:
trigger_events(before_trigger, loop, app)
self.reload(",".join(changed) if changed else "unknown")
if after_trigger:
trigger_events(after_trigger, loop, app)
else:
if reloader_stop:
trigger_events(reloader_stop, loop, app)
def stop(self, *_):
self.run = False
def reload(self, reloaded_files):
message = f"__ALL_PROCESSES__:{reloaded_files}"
self._publisher.send(message)
def files(self):
return chain(
self.python_files(),
*(d.glob("**/*") for d in self.reload_dirs),
)
def python_files(self): # no cov
"""This iterates over all relevant Python files.
It goes through all
loaded files from modules, all files in folders of already loaded
modules as well as all files reachable through a package.
"""
# The list call is necessary on Python 3 in case the module
# dictionary modifies during iteration.
for module in list(sys.modules.values()):
if module is None:
continue
filename = getattr(module, "__file__", None)
if filename:
old = None
while not os.path.isfile(filename):
old = filename
filename = os.path.dirname(filename)
if filename == old:
break
else:
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
yield filename
@staticmethod
def check_file(filename, mtimes) -> bool:
need_reload = False
mtime = os.stat(filename).st_mtime
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
elif mtime > old_time:
mtimes[filename] = mtime
need_reload = True
return need_reload

124
sanic/worker/serve.py Normal file
View File

@ -0,0 +1,124 @@
import asyncio
import os
import socket
from functools import partial
from multiprocessing.connection import Connection
from ssl import SSLContext
from typing import Any, Dict, List, Optional, Type, Union
from sanic.application.constants import ServerStage
from sanic.application.state import ApplicationServerInfo
from sanic.http.constants import HTTP
from sanic.models.server_types import Signal
from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.runners import _serve_http_1, _serve_http_3
from sanic.worker.loader import AppLoader, CertLoader
from sanic.worker.multiplexer import WorkerMultiplexer
def worker_serve(
host,
port,
app_name: str,
monitor_publisher: Optional[Connection],
app_loader: AppLoader,
worker_state: Optional[Dict[str, Any]] = None,
server_info: Optional[Dict[str, List[ApplicationServerInfo]]] = None,
ssl: Optional[
Union[SSLContext, Dict[str, Union[str, os.PathLike]]]
] = None,
sock: Optional[socket.socket] = None,
unix: Optional[str] = None,
reuse_port: bool = False,
loop=None,
protocol: Type[asyncio.Protocol] = HttpProtocol,
backlog: int = 100,
register_sys_signals: bool = True,
run_multiple: bool = False,
run_async: bool = False,
connections=None,
signal=Signal(),
state=None,
asyncio_server_kwargs=None,
version=HTTP.VERSION_1,
config=None,
passthru: Optional[Dict[str, Any]] = None,
):
from sanic import Sanic
if app_loader:
app = app_loader.load()
else:
app = Sanic.get_app(app_name)
app.refresh(passthru)
app.setup_loop()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Hydrate server info if needed
if server_info:
for app_name, server_info_objects in server_info.items():
a = Sanic.get_app(app_name)
if not a.state.server_info:
a.state.server_info = []
for info in server_info_objects:
if not info.settings.get("app"):
info.settings["app"] = a
a.state.server_info.append(info)
if isinstance(ssl, dict):
cert_loader = CertLoader(ssl)
ssl = cert_loader.load(app)
for info in app.state.server_info:
info.settings["ssl"] = ssl
# When in a worker process, do some init
if os.environ.get("SANIC_WORKER_NAME"):
# Hydrate apps with any passed server info
if monitor_publisher is None:
raise RuntimeError("No restart publisher found in worker process")
if worker_state is None:
raise RuntimeError("No worker state found in worker process")
# Run secondary servers
apps = list(Sanic._app_registry.values())
app.before_server_start(partial(app._start_servers, apps=apps))
for a in apps:
a.multiplexer = WorkerMultiplexer(monitor_publisher, worker_state)
if app.debug:
loop.set_debug(app.debug)
app.asgi = False
if app.state.server_info:
primary_server_info = app.state.server_info[0]
primary_server_info.stage = ServerStage.SERVING
if config:
app.update_config(config)
if version is HTTP.VERSION_3:
return _serve_http_3(host, port, app, loop, ssl)
return _serve_http_1(
host,
port,
app,
ssl,
sock,
unix,
reuse_port,
loop,
protocol,
backlog,
register_sys_signals,
run_multiple,
run_async,
connections,
signal,
state,
asyncio_server_kwargs,
)

85
sanic/worker/state.py Normal file
View File

@ -0,0 +1,85 @@
from collections.abc import Mapping
from typing import Any, Dict, ItemsView, Iterator, KeysView, List
from typing import Mapping as MappingType
from typing import ValuesView
dict
class WorkerState(Mapping):
RESTRICTED = (
"health",
"pid",
"requests",
"restart_at",
"server",
"start_at",
"starts",
"state",
)
def __init__(self, state: Dict[str, Any], current: str) -> None:
self._name = current
self._state = state
def __getitem__(self, key: str) -> Any:
return self._state[self._name][key]
def __setitem__(self, key: str, value: Any) -> None:
if key in self.RESTRICTED:
self._write_error([key])
self._state[self._name] = {
**self._state[self._name],
key: value,
}
def __delitem__(self, key: str) -> None:
if key in self.RESTRICTED:
self._write_error([key])
self._state[self._name] = {
k: v for k, v in self._state[self._name].items() if k != key
}
def __iter__(self) -> Iterator[Any]:
return iter(self._state[self._name])
def __len__(self) -> int:
return len(self._state[self._name])
def __repr__(self) -> str:
return repr(self._state[self._name])
def __eq__(self, other: object) -> bool:
return self._state[self._name] == other
def keys(self) -> KeysView[str]:
return self._state[self._name].keys()
def values(self) -> ValuesView[Any]:
return self._state[self._name].values()
def items(self) -> ItemsView[str, Any]:
return self._state[self._name].items()
def update(self, mapping: MappingType[str, Any]) -> None:
if any(k in self.RESTRICTED for k in mapping.keys()):
self._write_error(
[k for k in mapping.keys() if k in self.RESTRICTED]
)
self._state[self._name] = {
**self._state[self._name],
**mapping,
}
def pop(self) -> None:
raise NotImplementedError
def full(self) -> Dict[str, Any]:
return dict(self._state)
def _write_error(self, keys: List[str]) -> None:
raise LookupError(
f"Cannot set restricted key{'s' if len(keys) > 1 else ''} on "
f"WorkerState: {', '.join(keys)}"
)

View File

@ -94,14 +94,11 @@ requirements = [
] ]
tests_require = [ tests_require = [
"sanic-testing>=22.3.0", "sanic-testing>=22.9.0b1",
"pytest==6.2.5", "pytest",
"coverage==5.3", "coverage",
"gunicorn==20.0.4",
"pytest-cov",
"beautifulsoup4", "beautifulsoup4",
"pytest-sanic", "pytest-sanic",
"pytest-sugar",
"pytest-benchmark", "pytest-benchmark",
"chardet==3.*", "chardet==3.*",
"flake8", "flake8",

View File

@ -126,7 +126,7 @@ def sanic_router(app):
except RouteExists: except RouteExists:
pass pass
router.finalize() router.finalize()
return router, added_router return router, tuple(added_router)
return _setup return _setup

View File

@ -1,6 +1,8 @@
import json import json
from sanic import Sanic, text from sanic import Sanic, text
from sanic.application.constants import Mode
from sanic.config import Config
from sanic.log import LOGGING_CONFIG_DEFAULTS, logger from sanic.log import LOGGING_CONFIG_DEFAULTS, logger
@ -16,7 +18,7 @@ async def handler(request):
return text(request.ip) return text(request.ip)
@app.before_server_start @app.main_process_start
async def app_info_dump(app: Sanic, _): async def app_info_dump(app: Sanic, _):
app_data = { app_data = {
"access_log": app.config.ACCESS_LOG, "access_log": app.config.ACCESS_LOG,
@ -27,6 +29,13 @@ async def app_info_dump(app: Sanic, _):
logger.info(json.dumps(app_data)) logger.info(json.dumps(app_data))
@app.main_process_stop
async def app_cleanup(app: Sanic, _):
app.state.auto_reload = False
app.state.mode = Mode.PRODUCTION
app.config = Config()
@app.after_server_start @app.after_server_start
async def shutdown(app: Sanic, _): async def shutdown(app: Sanic, _):
app.stop() app.stop()
@ -38,8 +47,8 @@ def create_app():
def create_app_with_args(args): def create_app_with_args(args):
try: try:
print(f"foo={args.foo}") logger.info(f"foo={args.foo}")
except AttributeError: except AttributeError:
print(f"module={args.module}") logger.info(f"module={args.module}")
return app return app

View File

@ -35,6 +35,7 @@ def test_server_starts_http3(app: Sanic, version, caplog):
"cert": localhost_dir / "fullchain.pem", "cert": localhost_dir / "fullchain.pem",
"key": localhost_dir / "privkey.pem", "key": localhost_dir / "privkey.pem",
}, },
single_process=True,
) )
assert ev.is_set() assert ev.is_set()
@ -69,7 +70,7 @@ def test_server_starts_http1_and_http3(app: Sanic, caplog):
}, },
) )
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
Sanic.serve() Sanic.serve_single()
assert ( assert (
"sanic.root", "sanic.root",

View File

@ -4,6 +4,7 @@ import re
from collections import Counter from collections import Counter
from inspect import isawaitable from inspect import isawaitable
from os import environ
from unittest.mock import Mock, patch from unittest.mock import Mock, patch
import pytest import pytest
@ -15,6 +16,7 @@ from sanic.compat import OS_IS_WINDOWS
from sanic.config import Config from sanic.config import Config
from sanic.exceptions import SanicException from sanic.exceptions import SanicException
from sanic.helpers import _default from sanic.helpers import _default
from sanic.log import LOGGING_CONFIG_DEFAULTS
from sanic.response import text from sanic.response import text
@ -23,7 +25,7 @@ def clear_app_registry():
Sanic._app_registry = {} Sanic._app_registry = {}
def test_app_loop_running(app): def test_app_loop_running(app: Sanic):
@app.get("/test") @app.get("/test")
async def handler(request): async def handler(request):
assert isinstance(app.loop, asyncio.AbstractEventLoop) assert isinstance(app.loop, asyncio.AbstractEventLoop)
@ -33,7 +35,7 @@ def test_app_loop_running(app):
assert response.text == "pass" assert response.text == "pass"
def test_create_asyncio_server(app): def test_create_asyncio_server(app: Sanic):
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
asyncio_srv_coro = app.create_server(return_asyncio_server=True) asyncio_srv_coro = app.create_server(return_asyncio_server=True)
assert isawaitable(asyncio_srv_coro) assert isawaitable(asyncio_srv_coro)
@ -41,7 +43,7 @@ def test_create_asyncio_server(app):
assert srv.is_serving() is True assert srv.is_serving() is True
def test_asyncio_server_no_start_serving(app): def test_asyncio_server_no_start_serving(app: Sanic):
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
asyncio_srv_coro = app.create_server( asyncio_srv_coro = app.create_server(
port=43123, port=43123,
@ -52,7 +54,7 @@ def test_asyncio_server_no_start_serving(app):
assert srv.is_serving() is False assert srv.is_serving() is False
def test_asyncio_server_start_serving(app): def test_asyncio_server_start_serving(app: Sanic):
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
asyncio_srv_coro = app.create_server( asyncio_srv_coro = app.create_server(
port=43124, port=43124,
@ -69,7 +71,7 @@ def test_asyncio_server_start_serving(app):
# Looks like we can't easily test `serve_forever()` # Looks like we can't easily test `serve_forever()`
def test_create_server_main(app, caplog): def test_create_server_main(app: Sanic, caplog):
app.listener("main_process_start")(lambda *_: ...) app.listener("main_process_start")(lambda *_: ...)
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
@ -83,7 +85,7 @@ def test_create_server_main(app, caplog):
) in caplog.record_tuples ) in caplog.record_tuples
def test_create_server_no_startup(app): def test_create_server_no_startup(app: Sanic):
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
asyncio_srv_coro = app.create_server( asyncio_srv_coro = app.create_server(
port=43124, port=43124,
@ -98,7 +100,7 @@ def test_create_server_no_startup(app):
loop.run_until_complete(srv.start_serving()) loop.run_until_complete(srv.start_serving())
def test_create_server_main_convenience(app, caplog): def test_create_server_main_convenience(app: Sanic, caplog):
app.main_process_start(lambda *_: ...) app.main_process_start(lambda *_: ...)
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
@ -112,7 +114,7 @@ def test_create_server_main_convenience(app, caplog):
) in caplog.record_tuples ) in caplog.record_tuples
def test_app_loop_not_running(app): def test_app_loop_not_running(app: Sanic):
with pytest.raises(SanicException) as excinfo: with pytest.raises(SanicException) as excinfo:
app.loop app.loop
@ -122,7 +124,7 @@ def test_app_loop_not_running(app):
) )
def test_app_run_raise_type_error(app): def test_app_run_raise_type_error(app: Sanic):
with pytest.raises(TypeError) as excinfo: with pytest.raises(TypeError) as excinfo:
app.run(loop="loop") app.run(loop="loop")
@ -135,7 +137,7 @@ def test_app_run_raise_type_error(app):
) )
def test_app_route_raise_value_error(app): def test_app_route_raise_value_error(app: Sanic):
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@ -149,11 +151,10 @@ def test_app_route_raise_value_error(app):
) )
def test_app_handle_request_handler_is_none(app, monkeypatch): def test_app_handle_request_handler_is_none(app: Sanic, monkeypatch):
def mockreturn(*args, **kwargs): def mockreturn(*args, **kwargs):
return Mock(), None, {} return Mock(), None, {}
# Not sure how to make app.router.get() return None, so use mock here.
monkeypatch.setattr(app.router, "get", mockreturn) monkeypatch.setattr(app.router, "get", mockreturn)
@app.get("/test") @app.get("/test")
@ -170,7 +171,7 @@ def test_app_handle_request_handler_is_none(app, monkeypatch):
@pytest.mark.parametrize("websocket_enabled", [True, False]) @pytest.mark.parametrize("websocket_enabled", [True, False])
@pytest.mark.parametrize("enable", [True, False]) @pytest.mark.parametrize("enable", [True, False])
def test_app_enable_websocket(app, websocket_enabled, enable): def test_app_enable_websocket(app: Sanic, websocket_enabled, enable):
app.websocket_enabled = websocket_enabled app.websocket_enabled = websocket_enabled
app.enable_websocket(enable=enable) app.enable_websocket(enable=enable)
@ -180,11 +181,11 @@ def test_app_enable_websocket(app, websocket_enabled, enable):
async def handler(request, ws): async def handler(request, ws):
await ws.send("test") await ws.send("test")
assert app.websocket_enabled == True assert app.websocket_enabled is True
@patch("sanic.mixins.runner.WebSocketProtocol") @patch("sanic.mixins.startup.WebSocketProtocol")
def test_app_websocket_parameters(websocket_protocol_mock, app): def test_app_websocket_parameters(websocket_protocol_mock, app: Sanic):
app.config.WEBSOCKET_MAX_SIZE = 44 app.config.WEBSOCKET_MAX_SIZE = 44
app.config.WEBSOCKET_PING_TIMEOUT = 48 app.config.WEBSOCKET_PING_TIMEOUT = 48
app.config.WEBSOCKET_PING_INTERVAL = 50 app.config.WEBSOCKET_PING_INTERVAL = 50
@ -194,9 +195,10 @@ def test_app_websocket_parameters(websocket_protocol_mock, app):
await ws.send("test") await ws.send("test")
try: try:
# This will fail because WebSocketProtocol is mocked and only the call kwargs matter # This will fail because WebSocketProtocol is mocked and only the
# call kwargs matter
app.test_client.get("/ws") app.test_client.get("/ws")
except: except Exception:
pass pass
websocket_protocol_call_args = websocket_protocol_mock.call_args websocket_protocol_call_args = websocket_protocol_mock.call_args
@ -212,11 +214,10 @@ def test_app_websocket_parameters(websocket_protocol_mock, app):
) )
def test_handle_request_with_nested_exception(app, monkeypatch): def test_handle_request_with_nested_exception(app: Sanic, monkeypatch):
err_msg = "Mock Exception" err_msg = "Mock Exception"
# Not sure how to raise an exception in app.error_handler.response(), use mock here
def mock_error_handler_response(*args, **kwargs): def mock_error_handler_response(*args, **kwargs):
raise Exception(err_msg) raise Exception(err_msg)
@ -233,11 +234,10 @@ def test_handle_request_with_nested_exception(app, monkeypatch):
assert response.text == "An error occurred while handling an error" assert response.text == "An error occurred while handling an error"
def test_handle_request_with_nested_exception_debug(app, monkeypatch): def test_handle_request_with_nested_exception_debug(app: Sanic, monkeypatch):
err_msg = "Mock Exception" err_msg = "Mock Exception"
# Not sure how to raise an exception in app.error_handler.response(), use mock here
def mock_error_handler_response(*args, **kwargs): def mock_error_handler_response(*args, **kwargs):
raise Exception(err_msg) raise Exception(err_msg)
@ -252,13 +252,14 @@ def test_handle_request_with_nested_exception_debug(app, monkeypatch):
request, response = app.test_client.get("/", debug=True) request, response = app.test_client.get("/", debug=True)
assert response.status == 500 assert response.status == 500
assert response.text.startswith( assert response.text.startswith(
f"Error while handling error: {err_msg}\nStack: Traceback (most recent call last):\n" f"Error while handling error: {err_msg}\n"
"Stack: Traceback (most recent call last):\n"
) )
def test_handle_request_with_nested_sanic_exception(app, monkeypatch, caplog): def test_handle_request_with_nested_sanic_exception(
app: Sanic, monkeypatch, caplog
# Not sure how to raise an exception in app.error_handler.response(), use mock here ):
def mock_error_handler_response(*args, **kwargs): def mock_error_handler_response(*args, **kwargs):
raise SanicException("Mock SanicException") raise SanicException("Mock SanicException")
@ -301,8 +302,12 @@ def test_app_has_test_mode_sync():
def test_app_registry(): def test_app_registry():
assert len(Sanic._app_registry) == 0
instance = Sanic("test") instance = Sanic("test")
assert len(Sanic._app_registry) == 1
assert Sanic._app_registry["test"] is instance assert Sanic._app_registry["test"] is instance
Sanic.unregister_app(instance)
assert len(Sanic._app_registry) == 0
def test_app_registry_wrong_type(): def test_app_registry_wrong_type():
@ -371,7 +376,7 @@ def test_get_app_default_ambiguous():
Sanic.get_app() Sanic.get_app()
def test_app_set_attribute_warning(app): def test_app_set_attribute_warning(app: Sanic):
message = ( message = (
"Setting variables on Sanic instances is not allowed. You should " "Setting variables on Sanic instances is not allowed. You should "
"change your Sanic instance to use instance.ctx.foo instead." "change your Sanic instance to use instance.ctx.foo instead."
@ -380,7 +385,7 @@ def test_app_set_attribute_warning(app):
app.foo = 1 app.foo = 1
def test_app_set_context(app): def test_app_set_context(app: Sanic):
app.ctx.foo = 1 app.ctx.foo = 1
retrieved = Sanic.get_app(app.name) retrieved = Sanic.get_app(app.name)
@ -426,13 +431,13 @@ def test_custom_context():
@pytest.mark.parametrize("use", (False, True)) @pytest.mark.parametrize("use", (False, True))
def test_uvloop_config(app, monkeypatch, use): def test_uvloop_config(app: Sanic, monkeypatch, use):
@app.get("/test") @app.get("/test")
def handler(request): def handler(request):
return text("ok") return text("ok")
try_use_uvloop = Mock() try_use_uvloop = Mock()
monkeypatch.setattr(sanic.mixins.runner, "try_use_uvloop", try_use_uvloop) monkeypatch.setattr(sanic.mixins.startup, "try_use_uvloop", try_use_uvloop)
# Default config # Default config
app.test_client.get("/test") app.test_client.get("/test")
@ -458,7 +463,7 @@ def test_uvloop_cannot_never_called_with_create_server(caplog, monkeypatch):
apps[2].config.USE_UVLOOP = True apps[2].config.USE_UVLOOP = True
try_use_uvloop = Mock() try_use_uvloop = Mock()
monkeypatch.setattr(sanic.mixins.runner, "try_use_uvloop", try_use_uvloop) monkeypatch.setattr(sanic.mixins.startup, "try_use_uvloop", try_use_uvloop)
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
@ -517,12 +522,133 @@ def test_multiple_uvloop_configs_display_warning(caplog):
assert counter[(logging.WARNING, message)] == 2 assert counter[(logging.WARNING, message)] == 2
def test_cannot_run_fast_and_workers(app): def test_cannot_run_fast_and_workers(app: Sanic):
message = "You cannot use both fast=True and workers=X" message = "You cannot use both fast=True and workers=X"
with pytest.raises(RuntimeError, match=message): with pytest.raises(RuntimeError, match=message):
app.run(fast=True, workers=4) app.run(fast=True, workers=4)
def test_no_workers(app): def test_no_workers(app: Sanic):
with pytest.raises(RuntimeError, match="Cannot serve with no workers"): with pytest.raises(RuntimeError, match="Cannot serve with no workers"):
app.run(workers=0) app.run(workers=0)
@pytest.mark.parametrize(
"extra",
(
{"fast": True},
{"workers": 2},
{"auto_reload": True},
),
)
def test_cannot_run_single_process_and_workers_or_auto_reload(
app: Sanic, extra
):
message = (
"Single process cannot be run with multiple workers or auto-reload"
)
with pytest.raises(RuntimeError, match=message):
app.run(single_process=True, **extra)
def test_cannot_run_single_process_and_legacy(app: Sanic):
message = "Cannot run single process and legacy mode"
with pytest.raises(RuntimeError, match=message):
app.run(single_process=True, legacy=True)
def test_cannot_run_without_sys_signals_with_workers(app: Sanic):
message = (
"Cannot run Sanic.serve with register_sys_signals=False. "
"Use either Sanic.serve_single or Sanic.serve_legacy."
)
with pytest.raises(RuntimeError, match=message):
app.run(register_sys_signals=False, single_process=False, legacy=False)
def test_default_configure_logging():
with patch("sanic.app.logging") as mock:
Sanic("Test")
mock.config.dictConfig.assert_called_with(LOGGING_CONFIG_DEFAULTS)
def test_custom_configure_logging():
with patch("sanic.app.logging") as mock:
Sanic("Test", log_config={"foo": "bar"})
mock.config.dictConfig.assert_called_with({"foo": "bar"})
def test_disable_configure_logging():
with patch("sanic.app.logging") as mock:
Sanic("Test", configure_logging=False)
mock.config.dictConfig.assert_not_called()
@pytest.mark.parametrize("inspector", (True, False))
def test_inspector(inspector):
app = Sanic("Test", inspector=inspector)
assert app.config.INSPECTOR is inspector
def test_build_endpoint_name():
app = Sanic("Test")
name = app._build_endpoint_name("foo", "bar")
assert name == "Test.foo.bar"
def test_manager_in_main_process_only(app: Sanic):
message = "Can only access the manager from the main process"
with pytest.raises(SanicException, match=message):
app.manager
app._manager = 1
environ["SANIC_WORKER_PROCESS"] = "ok"
with pytest.raises(SanicException, match=message):
app.manager
del environ["SANIC_WORKER_PROCESS"]
assert app.manager == 1
def test_inspector_in_main_process_only(app: Sanic):
message = "Can only access the inspector from the main process"
with pytest.raises(SanicException, match=message):
app.inspector
app._inspector = 1
environ["SANIC_WORKER_PROCESS"] = "ok"
with pytest.raises(SanicException, match=message):
app.inspector
del environ["SANIC_WORKER_PROCESS"]
assert app.inspector == 1
def test_stop_trigger_terminate(app: Sanic):
app.multiplexer = Mock()
app.stop()
app.multiplexer.terminate.assert_called_once()
app.multiplexer.reset_mock()
assert len(Sanic._app_registry) == 1
Sanic._app_registry.clear()
app.stop(terminate=True)
app.multiplexer.terminate.assert_called_once()
app.multiplexer.reset_mock()
assert len(Sanic._app_registry) == 0
Sanic._app_registry.clear()
app.stop(unregister=False)
app.multiplexer.terminate.assert_called_once()

View File

@ -1,13 +1,16 @@
import asyncio import asyncio
from sanic import Sanic
def test_bad_request_response(app):
def test_bad_request_response(app: Sanic):
lines = [] lines = []
app.get("/")(lambda x: ...) app.get("/")(lambda x: ...)
@app.listener("after_server_start") @app.listener("after_server_start")
async def _request(sanic, loop): async def _request(sanic, loop):
nonlocal lines
connect = asyncio.open_connection("127.0.0.1", 42101) connect = asyncio.open_connection("127.0.0.1", 42101)
reader, writer = await connect reader, writer = await connect
writer.write(b"not http\r\n\r\n") writer.write(b"not http\r\n\r\n")
@ -18,6 +21,6 @@ def test_bad_request_response(app):
lines.append(line) lines.append(line)
app.stop() app.stop()
app.run(host="127.0.0.1", port=42101, debug=False) app.run(host="127.0.0.1", port=42101, debug=False, single_process=True)
assert lines[0] == b"HTTP/1.1 400 Bad Request\r\n" assert lines[0] == b"HTTP/1.1 400 Bad Request\r\n"
assert b"Bad Request" in lines[-2] assert b"Bad Request" in lines[-2]

View File

@ -1,5 +1,6 @@
import json import json
import subprocess import os
import sys
from pathlib import Path from pathlib import Path
from typing import List, Optional, Tuple from typing import List, Optional, Tuple
@ -9,33 +10,30 @@ import pytest
from sanic_routing import __version__ as __routing_version__ from sanic_routing import __version__ as __routing_version__
from sanic import __version__ from sanic import __version__
from sanic.__main__ import main
def capture(command: List[str]): @pytest.fixture(scope="module", autouse=True)
proc = subprocess.Popen( def tty():
command, orig = sys.stdout.isatty
stdout=subprocess.PIPE, sys.stdout.isatty = lambda: False
stderr=subprocess.PIPE, yield
cwd=Path(__file__).parent, sys.stdout.isatty = orig
)
def capture(command: List[str], caplog):
caplog.clear()
os.chdir(Path(__file__).parent)
try: try:
out, err = proc.communicate(timeout=10) main(command)
except subprocess.TimeoutExpired: except SystemExit:
proc.kill() ...
out, err = proc.communicate() return [record.message for record in caplog.records]
return out, err, proc.returncode
def starting_line(lines: List[str]):
for idx, line in enumerate(lines):
if line.strip().startswith(b"Sanic v"):
return idx
return 0
def read_app_info(lines: List[str]): def read_app_info(lines: List[str]):
for line in lines: for line in lines:
if line.startswith(b"{") and line.endswith(b"}"): if line.startswith("{") and line.endswith("}"): # type: ignore
return json.loads(line) return json.loads(line)
@ -47,59 +45,57 @@ def read_app_info(lines: List[str]):
("fake.server.create_app()", None), ("fake.server.create_app()", None),
), ),
) )
def test_server_run(appname: str, extra: Optional[str]): def test_server_run(
command = ["sanic", appname] appname: str,
extra: Optional[str],
caplog: pytest.LogCaptureFixture,
):
command = [appname]
if extra: if extra:
command.append(extra) command.append(extra)
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
firstline = lines[starting_line(lines) + 1]
assert exitcode != 1 assert "Goin' Fast @ http://127.0.0.1:8000" in lines
assert firstline == b"Goin' Fast @ http://127.0.0.1:8000"
def test_server_run_factory_with_args(): def test_server_run_factory_with_args(caplog):
command = [ command = [
"sanic",
"fake.server.create_app_with_args", "fake.server.create_app_with_args",
"--factory", "--factory",
] ]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
assert exitcode != 1, lines assert "module=fake.server.create_app_with_args" in lines
assert b"module=fake.server.create_app_with_args" in lines
def test_server_run_factory_with_args_arbitrary(): def test_server_run_factory_with_args_arbitrary(caplog):
command = [ command = [
"sanic",
"fake.server.create_app_with_args", "fake.server.create_app_with_args",
"--factory", "--factory",
"--foo=bar", "--foo=bar",
] ]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
assert exitcode != 1, lines assert "foo=bar" in lines
assert b"foo=bar" in lines
def test_error_with_function_as_instance_without_factory_arg(): def test_error_with_function_as_instance_without_factory_arg(caplog):
command = ["sanic", "fake.server.create_app"] command = ["fake.server.create_app"]
out, err, exitcode = capture(command) lines = capture(command, caplog)
assert b"try: \nsanic fake.server.create_app --factory" in err
assert exitcode != 1
def test_error_with_path_as_instance_without_simple_arg():
command = ["sanic", "./fake/"]
out, err, exitcode = capture(command)
assert ( assert (
b"Please use --simple if you are passing a directory to sanic." in err "Failed to run app: Module is not a Sanic app, it is a function\n "
) "If this callable returns a Sanic instance try: \n"
assert exitcode != 1 "sanic fake.server.create_app --factory"
) in lines
def test_error_with_path_as_instance_without_simple_arg(caplog):
command = ["./fake/"]
lines = capture(command, caplog)
assert (
"Failed to run app: App not found.\n Please use --simple if you "
"are passing a directory to sanic.\n eg. sanic ./fake/ --simple"
) in lines
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -120,13 +116,10 @@ def test_error_with_path_as_instance_without_simple_arg():
), ),
), ),
) )
def test_tls_options(cmd: Tuple[str, ...]): def test_tls_options(cmd: Tuple[str, ...], caplog):
command = ["sanic", "fake.server.app", *cmd, "-p=9999", "--debug"] command = ["fake.server.app", *cmd, "--port=9999", "--debug"]
out, err, exitcode = capture(command) lines = capture(command, caplog)
assert exitcode != 1 assert "Goin' Fast @ https://127.0.0.1:9999" in lines
lines = out.split(b"\n")
firstline = lines[starting_line(lines) + 1]
assert firstline == b"Goin' Fast @ https://127.0.0.1:9999"
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -141,14 +134,15 @@ def test_tls_options(cmd: Tuple[str, ...]):
("--tls-strict-host",), ("--tls-strict-host",),
), ),
) )
def test_tls_wrong_options(cmd: Tuple[str, ...]): def test_tls_wrong_options(cmd: Tuple[str, ...], caplog):
command = ["sanic", "fake.server.app", *cmd, "-p=9999", "--debug"] command = ["fake.server.app", *cmd, "-p=9999", "--debug"]
out, err, exitcode = capture(command) lines = capture(command, caplog)
assert exitcode == 1
assert not out
lines = err.decode().split("\n")
assert "TLS certificates must be specified by either of:" in lines assert (
"TLS certificates must be specified by either of:\n "
"--cert certdir/fullchain.pem --key certdir/privkey.pem\n "
"--tls certdir (equivalent to the above)"
) in lines
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -158,65 +152,44 @@ def test_tls_wrong_options(cmd: Tuple[str, ...]):
("-H", "localhost", "-p", "9999"), ("-H", "localhost", "-p", "9999"),
), ),
) )
def test_host_port_localhost(cmd: Tuple[str, ...]): def test_host_port_localhost(cmd: Tuple[str, ...], caplog):
command = ["sanic", "fake.server.app", *cmd] command = ["fake.server.app", *cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n") expected = "Goin' Fast @ http://localhost:9999"
expected = b"Goin' Fast @ http://localhost:9999"
assert exitcode != 1 assert expected in lines
assert expected in lines, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize( @pytest.mark.parametrize(
"cmd", "cmd,expected",
(
(
("--host=localhost", "--port=9999"),
"Goin' Fast @ http://localhost:9999",
),
(
("-H", "localhost", "-p", "9999"),
"Goin' Fast @ http://localhost:9999",
),
( (
("--host=127.0.0.127", "--port=9999"), ("--host=127.0.0.127", "--port=9999"),
"Goin' Fast @ http://127.0.0.127:9999",
),
(
("-H", "127.0.0.127", "-p", "9999"), ("-H", "127.0.0.127", "-p", "9999"),
"Goin' Fast @ http://127.0.0.127:9999",
),
(("--host=::", "--port=9999"), "Goin' Fast @ http://[::]:9999"),
(("-H", "::", "-p", "9999"), "Goin' Fast @ http://[::]:9999"),
(("--host=::1", "--port=9999"), "Goin' Fast @ http://[::1]:9999"),
(("-H", "::1", "-p", "9999"), "Goin' Fast @ http://[::1]:9999"),
), ),
) )
def test_host_port_ipv4(cmd: Tuple[str, ...]): def test_host_port(cmd: Tuple[str, ...], expected: str, caplog):
command = ["sanic", "fake.server.app", *cmd] command = ["fake.server.app", *cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
expected = b"Goin' Fast @ http://127.0.0.127:9999"
assert exitcode != 1 assert expected in lines
assert expected in lines, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize(
"cmd",
(
("--host=::", "--port=9999"),
("-H", "::", "-p", "9999"),
),
)
def test_host_port_ipv6_any(cmd: Tuple[str, ...]):
command = ["sanic", "fake.server.app", *cmd]
out, err, exitcode = capture(command)
lines = out.split(b"\n")
expected = b"Goin' Fast @ http://[::]:9999"
assert exitcode != 1
assert expected in lines, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize(
"cmd",
(
("--host=::1", "--port=9999"),
("-H", "::1", "-p", "9999"),
),
)
def test_host_port_ipv6_loopback(cmd: Tuple[str, ...]):
command = ["sanic", "fake.server.app", *cmd]
out, err, exitcode = capture(command)
lines = out.split(b"\n")
expected = b"Goin' Fast @ http://[::1]:9999"
assert exitcode != 1
assert expected in lines, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -230,82 +203,74 @@ def test_host_port_ipv6_loopback(cmd: Tuple[str, ...]):
(4, ("-w", "4")), (4, ("-w", "4")),
), ),
) )
def test_num_workers(num: int, cmd: Tuple[str, ...]): def test_num_workers(num: int, cmd: Tuple[str, ...], caplog):
command = ["sanic", "fake.server.app", *cmd] command = ["fake.server.app", *cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
if num == 1: if num == 1:
expected = b"mode: production, single worker" expected = "mode: production, single worker"
else: else:
expected = (f"mode: production, w/ {num} workers").encode() expected = f"mode: production, w/ {num} workers"
assert exitcode != 1 assert expected in lines
assert expected in lines, f"Expected {expected}\nLines found: {lines}"
@pytest.mark.parametrize("cmd", ("--debug",)) @pytest.mark.parametrize("cmd", ("--debug",))
def test_debug(cmd: str): def test_debug(cmd: str, caplog):
command = ["sanic", "fake.server.app", cmd] command = ["fake.server.app", cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
info = read_app_info(lines) info = read_app_info(lines)
assert info["debug"] is True, f"Lines found: {lines}\nErr output: {err}" assert info["debug"] is True
assert ( assert info["auto_reload"] is False
info["auto_reload"] is False
), f"Lines found: {lines}\nErr output: {err}"
assert "dev" not in info, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize("cmd", ("--dev", "-d")) @pytest.mark.parametrize("cmd", ("--dev", "-d"))
def test_dev(cmd: str): def test_dev(cmd: str, caplog):
command = ["sanic", "fake.server.app", cmd] command = ["fake.server.app", cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
info = read_app_info(lines) info = read_app_info(lines)
assert info["debug"] is True, f"Lines found: {lines}\nErr output: {err}" assert info["debug"] is True
assert ( assert info["auto_reload"] is True
info["auto_reload"] is True
), f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize("cmd", ("--auto-reload", "-r")) @pytest.mark.parametrize("cmd", ("--auto-reload", "-r"))
def test_auto_reload(cmd: str): def test_auto_reload(cmd: str, caplog):
command = ["sanic", "fake.server.app", cmd] command = ["fake.server.app", cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
info = read_app_info(lines) info = read_app_info(lines)
assert info["debug"] is False, f"Lines found: {lines}\nErr output: {err}" assert info["debug"] is False
assert ( assert info["auto_reload"] is True
info["auto_reload"] is True
), f"Lines found: {lines}\nErr output: {err}"
assert "dev" not in info, f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize( @pytest.mark.parametrize(
"cmd,expected", (("--access-log", True), ("--no-access-log", False)) "cmd,expected",
(
("", False),
("--debug", True),
("--access-log", True),
("--no-access-log", False),
),
) )
def test_access_logs(cmd: str, expected: bool): def test_access_logs(cmd: str, expected: bool, caplog):
command = ["sanic", "fake.server.app", cmd] command = ["fake.server.app"]
out, err, exitcode = capture(command) if cmd:
lines = out.split(b"\n") command.append(cmd)
lines = capture(command, caplog)
info = read_app_info(lines) info = read_app_info(lines)
assert ( assert info["access_log"] is expected
info["access_log"] is expected
), f"Lines found: {lines}\nErr output: {err}"
@pytest.mark.parametrize("cmd", ("--version", "-v")) @pytest.mark.parametrize("cmd", ("--version", "-v"))
def test_version(cmd: str): def test_version(cmd: str, caplog, capsys):
command = ["sanic", cmd] command = [cmd]
out, err, exitcode = capture(command) capture(command, caplog)
version_string = f"Sanic {__version__}; Routing {__routing_version__}\n" version_string = f"Sanic {__version__}; Routing {__routing_version__}\n"
out, _ = capsys.readouterr()
assert out == version_string.encode("utf-8") assert version_string == out
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -315,12 +280,9 @@ def test_version(cmd: str):
("--no-noisy-exceptions", False), ("--no-noisy-exceptions", False),
), ),
) )
def test_noisy_exceptions(cmd: str, expected: bool): def test_noisy_exceptions(cmd: str, expected: bool, caplog):
command = ["sanic", "fake.server.app", cmd] command = ["fake.server.app", cmd]
out, err, exitcode = capture(command) lines = capture(command, caplog)
lines = out.split(b"\n")
info = read_app_info(lines) info = read_app_info(lines)
assert ( assert info["noisy_exceptions"] is expected
info["noisy_exceptions"] is expected
), f"Lines found: {lines}\nErr output: {err}"

View File

@ -293,26 +293,21 @@ def test_config_custom_defaults_with_env():
del environ[key] del environ[key]
def test_config_access_log_passing_in_run(app: Sanic): @pytest.mark.parametrize("access_log", (True, False))
assert app.config.ACCESS_LOG is True def test_config_access_log_passing_in_run(app: Sanic, access_log):
assert app.config.ACCESS_LOG is False
@app.listener("after_server_start") @app.listener("after_server_start")
async def _request(sanic, loop): async def _request(sanic, loop):
app.stop() app.stop()
app.run(port=1340, access_log=False) app.run(port=1340, access_log=access_log, single_process=True)
assert app.config.ACCESS_LOG is False assert app.config.ACCESS_LOG is access_log
app.router.reset()
app.signal_router.reset()
app.run(port=1340, access_log=True)
assert app.config.ACCESS_LOG is True
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_config_access_log_passing_in_create_server(app: Sanic): async def test_config_access_log_passing_in_create_server(app: Sanic):
assert app.config.ACCESS_LOG is True assert app.config.ACCESS_LOG is False
@app.listener("after_server_start") @app.listener("after_server_start")
async def _request(sanic, loop): async def _request(sanic, loop):

View File

@ -1,15 +1,24 @@
import pytest
from sanic import Sanic, text from sanic import Sanic, text
from sanic.application.constants import Mode, Server, ServerStage
from sanic.constants import HTTP_METHODS, HTTPMethod from sanic.constants import HTTP_METHODS, HTTPMethod
def test_string_compat(): @pytest.mark.parametrize("enum", (HTTPMethod, Server, Mode))
assert "GET" == HTTPMethod.GET def test_string_compat(enum):
assert "GET" in HTTP_METHODS for key in enum.__members__.keys():
assert "get" == HTTPMethod.GET assert key.upper() == getattr(enum, key).upper()
assert "get" in HTTP_METHODS assert key.lower() == getattr(enum, key).lower()
assert HTTPMethod.GET.lower() == "get"
assert HTTPMethod.GET.upper() == "GET" def test_http_methods():
for value in HTTPMethod.__members__.values():
assert value in HTTP_METHODS
def test_server_stage():
assert ServerStage.SERVING > ServerStage.PARTIAL > ServerStage.STOPPED
def test_use_in_routes(app: Sanic): def test_use_in_routes(app: Sanic):

View File

@ -1,44 +1,44 @@
# import pytest import pytest
# from sanic.response import text from sanic_routing.exceptions import RouteExists
# from sanic.router import RouteExists
from sanic.response import text
# @pytest.mark.parametrize( @pytest.mark.parametrize(
# "method,attr, expected", "method,attr, expected",
# [ [
# ("get", "text", "OK1 test"), ("get", "text", "OK1 test"),
# ("post", "text", "OK2 test"), ("post", "text", "OK2 test"),
# ("put", "text", "OK2 test"), ("put", "text", "OK2 test"),
# ("delete", "status", 405), ],
# ], )
# ) def test_overload_dynamic_routes(app, method, attr, expected):
# def test_overload_dynamic_routes(app, method, attr, expected): @app.route("/overload/<param>", methods=["GET"])
# @app.route("/overload/<param>", methods=["GET"]) async def handler1(request, param):
# async def handler1(request, param): return text("OK1 " + param)
# return text("OK1 " + param)
# @app.route("/overload/<param>", methods=["POST", "PUT"]) @app.route("/overload/<param>", methods=["POST", "PUT"])
# async def handler2(request, param): async def handler2(request, param):
# return text("OK2 " + param) return text("OK2 " + param)
# request, response = getattr(app.test_client, method)("/overload/test") request, response = getattr(app.test_client, method)("/overload/test")
# assert getattr(response, attr) == expected assert getattr(response, attr) == expected
# def test_overload_dynamic_routes_exist(app): def test_overload_dynamic_routes_exist(app):
# @app.route("/overload/<param>", methods=["GET"]) @app.route("/overload/<param>", methods=["GET"])
# async def handler1(request, param): async def handler1(request, param):
# return text("OK1 " + param) return text("OK1 " + param)
# @app.route("/overload/<param>", methods=["POST", "PUT"]) @app.route("/overload/<param>", methods=["POST", "PUT"])
# async def handler2(request, param): async def handler2(request, param):
# return text("OK2 " + param) return text("OK2 " + param)
# # if this doesn't raise an error, than at least the below should happen: # if this doesn't raise an error, than at least the below should happen:
# # assert response.text == 'Duplicated' # assert response.text == 'Duplicated'
# with pytest.raises(RouteExists): with pytest.raises(RouteExists):
# @app.route("/overload/<param>", methods=["PUT", "DELETE"]) @app.route("/overload/<param>", methods=["PUT", "DELETE"])
# async def handler3(request, param): async def handler3(request, param):
# return text("Duplicated") return text("Duplicated")

View File

@ -353,7 +353,7 @@ def test_config_fallback_before_and_after_startup(app):
_, response = app.test_client.get("/error") _, response = app.test_client.get("/error")
assert response.status == 500 assert response.status == 500
assert response.content_type == "text/plain; charset=utf-8" assert response.content_type == "application/json"
def test_config_fallback_using_update_dict(app): def test_config_fallback_using_update_dict(app):

View File

@ -25,19 +25,19 @@ def stoppable_app(app):
def test_ext_is_loaded(stoppable_app: Sanic, sanic_ext): def test_ext_is_loaded(stoppable_app: Sanic, sanic_ext):
stoppable_app.run() stoppable_app.run(single_process=True)
sanic_ext.Extend.assert_called_once_with(stoppable_app) sanic_ext.Extend.assert_called_once_with(stoppable_app)
def test_ext_is_not_loaded(stoppable_app: Sanic, sanic_ext): def test_ext_is_not_loaded(stoppable_app: Sanic, sanic_ext):
stoppable_app.config.AUTO_EXTEND = False stoppable_app.config.AUTO_EXTEND = False
stoppable_app.run() stoppable_app.run(single_process=True)
sanic_ext.Extend.assert_not_called() sanic_ext.Extend.assert_not_called()
def test_extend_with_args(stoppable_app: Sanic, sanic_ext): def test_extend_with_args(stoppable_app: Sanic, sanic_ext):
stoppable_app.extend(built_in_extensions=False) stoppable_app.extend(built_in_extensions=False)
stoppable_app.run() stoppable_app.run(single_process=True)
sanic_ext.Extend.assert_called_once_with( sanic_ext.Extend.assert_called_once_with(
stoppable_app, built_in_extensions=False, config=None, extensions=None stoppable_app, built_in_extensions=False, config=None, extensions=None
) )
@ -80,5 +80,5 @@ def test_can_access_app_ext_while_running(app: Sanic, sanic_ext, ext_instance):
app.ext.injection(IceCream) app.ext.injection(IceCream)
app.stop() app.stop()
app.run() app.run(single_process=True)
ext_instance.injection.assert_called_with(IceCream) ext_instance.injection.assert_called_with(IceCream)

View File

@ -1,48 +1,61 @@
import asyncio import asyncio
import logging import logging
import time
from multiprocessing import Process from pytest import LogCaptureFixture
import httpx from sanic.response import empty
PORT = 42101 PORT = 42101
def test_no_exceptions_when_cancel_pending_request(app, caplog): def test_no_exceptions_when_cancel_pending_request(
app, caplog: LogCaptureFixture
):
app.config.GRACEFUL_SHUTDOWN_TIMEOUT = 1 app.config.GRACEFUL_SHUTDOWN_TIMEOUT = 1
@app.get("/") @app.get("/")
async def handler(request): async def handler(request):
await asyncio.sleep(5) await asyncio.sleep(5)
@app.after_server_start @app.listener("after_server_start")
def shutdown(app, _): async def _request(sanic, loop):
time.sleep(0.2) connect = asyncio.open_connection("127.0.0.1", 8000)
_, writer = await connect
writer.write(b"GET / HTTP/1.1\r\n\r\n")
app.stop() app.stop()
def ping(): with caplog.at_level(logging.INFO):
time.sleep(0.1) app.run(single_process=True, access_log=True)
response = httpx.get("http://127.0.0.1:8000")
print(response.status_code)
p = Process(target=ping) assert "Request: GET http:/// stopped. Transport is closed." in caplog.text
p.start()
def test_completes_request(app, caplog: LogCaptureFixture):
app.config.GRACEFUL_SHUTDOWN_TIMEOUT = 1
@app.get("/")
async def handler(request):
await asyncio.sleep(0.5)
return empty()
@app.listener("after_server_start")
async def _request(sanic, loop):
connect = asyncio.open_connection("127.0.0.1", 8000)
_, writer = await connect
writer.write(b"GET / HTTP/1.1\r\n\r\n")
app.stop()
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
app.run() app.run(single_process=True, access_log=True)
p.kill() assert ("sanic.access", 20, "") in caplog.record_tuples
info = 0 # Make sure that the server starts shutdown process before access log
for record in caplog.record_tuples: index_stopping = 0
assert record[1] != logging.ERROR for idx, record in enumerate(caplog.records):
if record[1] == logging.INFO: if record.message.startswith("Stopping worker"):
info += 1 index_stopping = idx
if record[2].startswith("Request:"): break
assert record[2] == ( index_request = caplog.record_tuples.index(("sanic.access", 20, ""))
"Request: GET http://127.0.0.1:8000/ stopped. " assert index_request > index_stopping > 0
"Transport is closed."
)
assert info == 11

View File

@ -61,6 +61,6 @@ def test_http1_response_has_alt_svc():
version=1, version=1,
port=PORT, port=PORT,
) )
Sanic.serve() Sanic.serve_single(app)
assert f'alt-svc: h3=":{PORT}"\r\n'.encode() in response assert f'alt-svc: h3=":{PORT}"\r\n'.encode() in response

25
tests/test_init.py Normal file
View File

@ -0,0 +1,25 @@
from importlib import import_module
import pytest
@pytest.mark.parametrize(
"item",
(
"__version__",
"Sanic",
"Blueprint",
"HTTPMethod",
"HTTPResponse",
"Request",
"Websocket",
"empty",
"file",
"html",
"json",
"redirect",
"text",
),
)
def test_imports(item):
import_module("sanic", item)

View File

@ -166,6 +166,7 @@ def test_access_log_client_ip_remote_addr(monkeypatch):
monkeypatch.setattr(sanic.http.http1, "access_logger", access) monkeypatch.setattr(sanic.http.http1, "access_logger", access)
app = Sanic("test_logging") app = Sanic("test_logging")
app.config.ACCESS_LOG = True
app.config.PROXIES_COUNT = 2 app.config.PROXIES_COUNT = 2
@app.route("/") @app.route("/")
@ -193,6 +194,7 @@ def test_access_log_client_ip_reqip(monkeypatch):
monkeypatch.setattr(sanic.http.http1, "access_logger", access) monkeypatch.setattr(sanic.http.http1, "access_logger", access)
app = Sanic("test_logging") app = Sanic("test_logging")
app.config.ACCESS_LOG = True
@app.route("/") @app.route("/")
async def handler(request): async def handler(request):

View File

@ -30,9 +30,12 @@ def test_get_logo_returns_expected_logo(tty, full, expected):
def test_get_logo_returns_no_colors_on_apple_terminal(): def test_get_logo_returns_no_colors_on_apple_terminal():
with patch("sys.stdout.isatty") as isatty: platform = sys.platform
isatty.return_value = False
sys.platform = "darwin" sys.platform = "darwin"
os.environ["TERM_PROGRAM"] = "Apple_Terminal" os.environ["TERM_PROGRAM"] = "Apple_Terminal"
with patch("sys.stdout.isatty") as isatty:
isatty.return_value = False
logo = get_logo() logo = get_logo()
assert "\033" not in logo assert "\033" not in logo
sys.platform = platform
del os.environ["TERM_PROGRAM"]

View File

@ -3,15 +3,23 @@ import os
import platform import platform
import sys import sys
from unittest.mock import Mock from unittest.mock import Mock, patch
import pytest import pytest
from sanic import Sanic, __version__ from sanic import __version__
from sanic.application.logo import BASE_LOGO from sanic.application.logo import BASE_LOGO
from sanic.application.motd import MOTD, MOTDTTY from sanic.application.motd import MOTD, MOTDTTY
@pytest.fixture(autouse=True)
def reset():
try:
del os.environ["SANIC_MOTD_OUTPUT"]
except KeyError:
...
def test_logo_base(app, run_startup): def test_logo_base(app, run_startup):
logs = run_startup(app) logs = run_startup(app)
@ -63,20 +71,13 @@ def test_motd_display(caplog):
@pytest.mark.skipif(sys.version_info < (3, 8), reason="Not on 3.7") @pytest.mark.skipif(sys.version_info < (3, 8), reason="Not on 3.7")
def test_reload_dirs(app): def test_reload_dirs(app):
app.config.LOGO = None app.config.LOGO = None
app.config.MOTD = True
app.config.AUTO_RELOAD = True app.config.AUTO_RELOAD = True
app.prepare(reload_dir="./", auto_reload=True, motd_display={"foo": "bar"})
existing = MOTD.output with patch.object(MOTD, "output") as mock:
MOTD.output = Mock() app.prepare(
reload_dir="./", auto_reload=True, motd_display={"foo": "bar"}
app.motd("foo")
MOTD.output.assert_called_once()
assert (
MOTD.output.call_args.args[2]["auto-reload"]
== f"enabled, {os.getcwd()}"
) )
assert MOTD.output.call_args.args[3] == {"foo": "bar"} mock.assert_called()
assert mock.call_args.args[2]["auto-reload"] == f"enabled, {os.getcwd()}"
MOTD.output = existing assert mock.call_args.args[3] == {"foo": "bar"}
Sanic._app_registry = {}

View File

@ -1,207 +1,207 @@
import logging # import logging
from unittest.mock import Mock # from unittest.mock import Mock
import pytest # import pytest
from sanic import Sanic # from sanic import Sanic
from sanic.response import text # from sanic.response import text
from sanic.server.async_server import AsyncioServer # from sanic.server.async_server import AsyncioServer
from sanic.signals import Event # from sanic.signals import Event
from sanic.touchup.schemes.ode import OptionalDispatchEvent # from sanic.touchup.schemes.ode import OptionalDispatchEvent
try: # try:
from unittest.mock import AsyncMock # from unittest.mock import AsyncMock
except ImportError: # except ImportError:
from tests.asyncmock import AsyncMock # type: ignore # from tests.asyncmock import AsyncMock # type: ignore
@pytest.fixture # @pytest.fixture
def app_one(): # def app_one():
app = Sanic("One") # app = Sanic("One")
@app.get("/one") # @app.get("/one")
async def one(request): # async def one(request):
return text("one") # return text("one")
return app # return app
@pytest.fixture # @pytest.fixture
def app_two(): # def app_two():
app = Sanic("Two") # app = Sanic("Two")
@app.get("/two") # @app.get("/two")
async def two(request): # async def two(request):
return text("two") # return text("two")
return app # return app
@pytest.fixture(autouse=True) # @pytest.fixture(autouse=True)
def clean(): # def clean():
Sanic._app_registry = {} # Sanic._app_registry = {}
yield # yield
def test_serve_same_app_multiple_tuples(app_one, run_multi): # def test_serve_same_app_multiple_tuples(app_one, run_multi):
app_one.prepare(port=23456) # app_one.prepare(port=23456)
app_one.prepare(port=23457) # app_one.prepare(port=23457)
logs = run_multi(app_one) # logs = run_multi(app_one)
assert ( # assert (
"sanic.root", # "sanic.root",
logging.INFO, # logging.INFO,
"Goin' Fast @ http://127.0.0.1:23456", # "Goin' Fast @ http://127.0.0.1:23456",
) in logs # ) in logs
assert ( # assert (
"sanic.root", # "sanic.root",
logging.INFO, # logging.INFO,
"Goin' Fast @ http://127.0.0.1:23457", # "Goin' Fast @ http://127.0.0.1:23457",
) in logs # ) in logs
def test_serve_multiple_apps(app_one, app_two, run_multi): # def test_serve_multiple_apps(app_one, app_two, run_multi):
app_one.prepare(port=23456) # app_one.prepare(port=23456)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
logs = run_multi(app_one) # logs = run_multi(app_one)
assert ( # assert (
"sanic.root", # "sanic.root",
logging.INFO, # logging.INFO,
"Goin' Fast @ http://127.0.0.1:23456", # "Goin' Fast @ http://127.0.0.1:23456",
) in logs # ) in logs
assert ( # assert (
"sanic.root", # "sanic.root",
logging.INFO, # logging.INFO,
"Goin' Fast @ http://127.0.0.1:23457", # "Goin' Fast @ http://127.0.0.1:23457",
) in logs # ) in logs
def test_listeners_on_secondary_app(app_one, app_two, run_multi): # def test_listeners_on_secondary_app(app_one, app_two, run_multi):
app_one.prepare(port=23456) # app_one.prepare(port=23456)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
before_start = AsyncMock() # before_start = AsyncMock()
after_start = AsyncMock() # after_start = AsyncMock()
before_stop = AsyncMock() # before_stop = AsyncMock()
after_stop = AsyncMock() # after_stop = AsyncMock()
app_two.before_server_start(before_start) # app_two.before_server_start(before_start)
app_two.after_server_start(after_start) # app_two.after_server_start(after_start)
app_two.before_server_stop(before_stop) # app_two.before_server_stop(before_stop)
app_two.after_server_stop(after_stop) # app_two.after_server_stop(after_stop)
run_multi(app_one) # run_multi(app_one)
before_start.assert_awaited_once() # before_start.assert_awaited_once()
after_start.assert_awaited_once() # after_start.assert_awaited_once()
before_stop.assert_awaited_once() # before_stop.assert_awaited_once()
after_stop.assert_awaited_once() # after_stop.assert_awaited_once()
@pytest.mark.parametrize( # @pytest.mark.parametrize(
"events", # "events",
( # (
(Event.HTTP_LIFECYCLE_BEGIN,), # (Event.HTTP_LIFECYCLE_BEGIN,),
(Event.HTTP_LIFECYCLE_BEGIN, Event.HTTP_LIFECYCLE_COMPLETE), # (Event.HTTP_LIFECYCLE_BEGIN, Event.HTTP_LIFECYCLE_COMPLETE),
( # (
Event.HTTP_LIFECYCLE_BEGIN, # Event.HTTP_LIFECYCLE_BEGIN,
Event.HTTP_LIFECYCLE_COMPLETE, # Event.HTTP_LIFECYCLE_COMPLETE,
Event.HTTP_LIFECYCLE_REQUEST, # Event.HTTP_LIFECYCLE_REQUEST,
), # ),
), # ),
) # )
def test_signal_synchronization(app_one, app_two, run_multi, events): # def test_signal_synchronization(app_one, app_two, run_multi, events):
app_one.prepare(port=23456) # app_one.prepare(port=23456)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
for event in events: # for event in events:
app_one.signal(event)(AsyncMock()) # app_one.signal(event)(AsyncMock())
run_multi(app_one) # run_multi(app_one)
assert len(app_two.signal_router.routes) == len(events) + 1 # assert len(app_two.signal_router.routes) == len(events) + 1
signal_handlers = { # signal_handlers = {
signal.handler # signal.handler
for signal in app_two.signal_router.routes # for signal in app_two.signal_router.routes
if signal.name.startswith("http") # if signal.name.startswith("http")
} # }
assert len(signal_handlers) == 1 # assert len(signal_handlers) == 1
assert list(signal_handlers)[0] is OptionalDispatchEvent.noop # assert list(signal_handlers)[0] is OptionalDispatchEvent.noop
def test_warning_main_process_listeners_on_secondary( # def test_warning_main_process_listeners_on_secondary(
app_one, app_two, run_multi # app_one, app_two, run_multi
): # ):
app_two.main_process_start(AsyncMock()) # app_two.main_process_start(AsyncMock())
app_two.main_process_stop(AsyncMock()) # app_two.main_process_stop(AsyncMock())
app_one.prepare(port=23456) # app_one.prepare(port=23456)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
log = run_multi(app_one) # log = run_multi(app_one)
message = ( # message = (
f"Sanic found 2 listener(s) on " # f"Sanic found 2 listener(s) on "
"secondary applications attached to the main " # "secondary applications attached to the main "
"process. These will be ignored since main " # "process. These will be ignored since main "
"process listeners can only be attached to your " # "process listeners can only be attached to your "
"primary application: " # "primary application: "
f"{repr(app_one)}" # f"{repr(app_one)}"
) # )
assert ("sanic.error", logging.WARNING, message) in log # assert ("sanic.error", logging.WARNING, message) in log
def test_no_applications(): # def test_no_applications():
Sanic._app_registry = {} # Sanic._app_registry = {}
message = "Did not find any applications." # message = "Did not find any applications."
with pytest.raises(RuntimeError, match=message): # with pytest.raises(RuntimeError, match=message):
Sanic.serve() # Sanic.serve()
def test_oserror_warning(app_one, app_two, run_multi, capfd): # def test_oserror_warning(app_one, app_two, run_multi, capfd):
orig = AsyncioServer.__await__ # orig = AsyncioServer.__await__
AsyncioServer.__await__ = Mock(side_effect=OSError("foo")) # AsyncioServer.__await__ = Mock(side_effect=OSError("foo"))
app_one.prepare(port=23456, workers=2) # app_one.prepare(port=23456, workers=2)
app_two.prepare(port=23457, workers=2) # app_two.prepare(port=23457, workers=2)
run_multi(app_one) # run_multi(app_one)
captured = capfd.readouterr() # captured = capfd.readouterr()
assert ( # assert (
"An OSError was detected on startup. The encountered error was: foo" # "An OSError was detected on startup. The encountered error was: foo"
) in captured.err # ) in captured.err
AsyncioServer.__await__ = orig # AsyncioServer.__await__ = orig
def test_running_multiple_offset_warning(app_one, app_two, run_multi, capfd): # def test_running_multiple_offset_warning(app_one, app_two, run_multi, capfd):
app_one.prepare(port=23456, workers=2) # app_one.prepare(port=23456, workers=2)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
run_multi(app_one) # run_multi(app_one)
captured = capfd.readouterr() # captured = capfd.readouterr()
assert ( # assert (
f"The primary application {repr(app_one)} is running " # f"The primary application {repr(app_one)} is running "
"with 2 worker(s). All " # "with 2 worker(s). All "
"application instances will run with the same number. " # "application instances will run with the same number. "
f"You requested {repr(app_two)} to run with " # f"You requested {repr(app_two)} to run with "
"1 worker(s), which will be ignored " # "1 worker(s), which will be ignored "
"in favor of the primary application." # "in favor of the primary application."
) in captured.err # ) in captured.err
def test_running_multiple_secondary(app_one, app_two, run_multi, capfd): # def test_running_multiple_secondary(app_one, app_two, run_multi, capfd):
app_one.prepare(port=23456, workers=2) # app_one.prepare(port=23456, workers=2)
app_two.prepare(port=23457) # app_two.prepare(port=23457)
before_start = AsyncMock() # before_start = AsyncMock()
app_two.before_server_start(before_start) # app_two.before_server_start(before_start)
run_multi(app_one) # run_multi(app_one)
before_start.await_count == 2 # before_start.await_count == 2

View File

@ -4,13 +4,15 @@ import pickle
import random import random
import signal import signal
from asyncio import sleep
import pytest import pytest
from sanic_testing.testing import HOST, PORT from sanic_testing.testing import HOST, PORT
from sanic import Blueprint from sanic import Blueprint, text
from sanic.log import logger from sanic.log import logger
from sanic.response import text from sanic.server.socket import configure_socket
@pytest.mark.skipif( @pytest.mark.skipif(
@ -24,14 +26,108 @@ def test_multiprocessing(app):
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1)) num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
process_list = set() process_list = set()
@app.after_server_start
async def shutdown(app):
await sleep(2.1)
app.stop()
def stop_on_alarm(*args): def stop_on_alarm(*args):
for process in multiprocessing.active_children(): for process in multiprocessing.active_children():
process_list.add(process.pid) process_list.add(process.pid)
process.terminate()
signal.signal(signal.SIGALRM, stop_on_alarm) signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(3) signal.alarm(2)
app.run(HOST, PORT, workers=num_workers) app.run(HOST, 4120, workers=num_workers, debug=True)
assert len(process_list) == num_workers + 1
@pytest.mark.skipif(
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
def test_multiprocessing_legacy(app):
"""Tests that the number of children we produce is correct"""
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
process_list = set()
@app.after_server_start
async def shutdown(app):
await sleep(2.1)
app.stop()
def stop_on_alarm(*args):
for process in multiprocessing.active_children():
process_list.add(process.pid)
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(2)
app.run(HOST, 4121, workers=num_workers, debug=True, legacy=True)
assert len(process_list) == num_workers
@pytest.mark.skipif(
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
def test_multiprocessing_legacy_sock(app):
"""Tests that the number of children we produce is correct"""
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
process_list = set()
@app.after_server_start
async def shutdown(app):
await sleep(2.1)
app.stop()
def stop_on_alarm(*args):
for process in multiprocessing.active_children():
process_list.add(process.pid)
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(2)
sock = configure_socket(
{
"host": HOST,
"port": 4121,
"unix": None,
"backlog": 100,
}
)
app.run(workers=num_workers, debug=True, legacy=True, sock=sock)
sock.close()
assert len(process_list) == num_workers
@pytest.mark.skipif(
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
def test_multiprocessing_legacy_unix(app):
"""Tests that the number of children we produce is correct"""
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
process_list = set()
@app.after_server_start
async def shutdown(app):
await sleep(2.1)
app.stop()
def stop_on_alarm(*args):
for process in multiprocessing.active_children():
process_list.add(process.pid)
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(2)
app.run(workers=num_workers, debug=True, legacy=True, unix="./test.sock")
assert len(process_list) == num_workers assert len(process_list) == num_workers
@ -45,19 +141,23 @@ def test_multiprocessing_with_blueprint(app):
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1)) num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
process_list = set() process_list = set()
@app.after_server_start
async def shutdown(app):
await sleep(2.1)
app.stop()
def stop_on_alarm(*args): def stop_on_alarm(*args):
for process in multiprocessing.active_children(): for process in multiprocessing.active_children():
process_list.add(process.pid) process_list.add(process.pid)
process.terminate()
signal.signal(signal.SIGALRM, stop_on_alarm) signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(3) signal.alarm(2)
bp = Blueprint("test_text") bp = Blueprint("test_text")
app.blueprint(bp) app.blueprint(bp)
app.run(HOST, PORT, workers=num_workers) app.run(HOST, 4121, workers=num_workers, debug=True)
assert len(process_list) == num_workers assert len(process_list) == num_workers + 1
# this function must be outside a test function so that it can be # this function must be outside a test function so that it can be
@ -66,62 +166,58 @@ def handler(request):
return text("Hello") return text("Hello")
def stop(app):
app.stop()
# Multiprocessing on Windows requires app to be able to be pickled # Multiprocessing on Windows requires app to be able to be pickled
@pytest.mark.parametrize("protocol", [3, 4]) @pytest.mark.parametrize("protocol", [3, 4])
def test_pickle_app(app, protocol): def test_pickle_app(app, protocol):
app.route("/")(handler) app.route("/")(handler)
app.router.finalize() app.after_server_start(stop)
app.router.reset() app.router.reset()
app.signal_router.reset()
p_app = pickle.dumps(app, protocol=protocol) p_app = pickle.dumps(app, protocol=protocol)
del app del app
up_p_app = pickle.loads(p_app) up_p_app = pickle.loads(p_app)
up_p_app.router.finalize()
assert up_p_app assert up_p_app
request, response = up_p_app.test_client.get("/") up_p_app.run(single_process=True)
assert response.text == "Hello"
@pytest.mark.parametrize("protocol", [3, 4]) @pytest.mark.parametrize("protocol", [3, 4])
def test_pickle_app_with_bp(app, protocol): def test_pickle_app_with_bp(app, protocol):
bp = Blueprint("test_text") bp = Blueprint("test_text")
bp.route("/")(handler) bp.route("/")(handler)
bp.after_server_start(stop)
app.blueprint(bp) app.blueprint(bp)
app.router.finalize()
app.router.reset() app.router.reset()
app.signal_router.reset()
p_app = pickle.dumps(app, protocol=protocol) p_app = pickle.dumps(app, protocol=protocol)
del app del app
up_p_app = pickle.loads(p_app) up_p_app = pickle.loads(p_app)
up_p_app.router.finalize()
assert up_p_app assert up_p_app
request, response = up_p_app.test_client.get("/") up_p_app.run(single_process=True)
assert response.text == "Hello"
@pytest.mark.parametrize("protocol", [3, 4]) @pytest.mark.parametrize("protocol", [3, 4])
def test_pickle_app_with_static(app, protocol): def test_pickle_app_with_static(app, protocol):
app.route("/")(handler) app.route("/")(handler)
app.after_server_start(stop)
app.static("/static", "/tmp/static") app.static("/static", "/tmp/static")
app.router.finalize()
app.router.reset() app.router.reset()
app.signal_router.reset()
p_app = pickle.dumps(app, protocol=protocol) p_app = pickle.dumps(app, protocol=protocol)
del app del app
up_p_app = pickle.loads(p_app) up_p_app = pickle.loads(p_app)
up_p_app.router.finalize()
assert up_p_app assert up_p_app
request, response = up_p_app.test_client.get("/static/missing.txt") up_p_app.run(single_process=True)
assert response.status == 404
def test_main_process_event(app, caplog): def test_main_process_event(app, caplog):
# Selects a number at random so we can spot check # Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1)) num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
def stop_on_alarm(*args): app.after_server_start(stop)
for process in multiprocessing.active_children():
process.terminate()
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(1)
@app.listener("main_process_start") @app.listener("main_process_start")
def main_process_start(app, loop): def main_process_start(app, loop):

View File

@ -1,4 +1,3 @@
from httpx import AsyncByteStream
from sanic_testing.reusable import ReusableClient from sanic_testing.reusable import ReusableClient
from sanic.response import json, text from sanic.response import json, text

View File

@ -17,6 +17,10 @@ def no_skip():
yield yield
Sanic._app_registry = {} Sanic._app_registry = {}
Sanic.should_auto_reload = should_auto_reload Sanic.should_auto_reload = should_auto_reload
try:
del os.environ["SANIC_MOTD_OUTPUT"]
except KeyError:
...
def get_primary(app: Sanic) -> ApplicationServerInfo: def get_primary(app: Sanic) -> ApplicationServerInfo:
@ -55,17 +59,21 @@ def test_reload_dir(app: Sanic, dirs, caplog):
assert ("sanic.root", logging.WARNING, message) in caplog.record_tuples assert ("sanic.root", logging.WARNING, message) in caplog.record_tuples
def test_fast(app: Sanic, run_multi): def test_fast(app: Sanic, caplog):
app.prepare(fast=True) @app.after_server_start
async def stop(app, _):
app.stop()
try: try:
workers = len(os.sched_getaffinity(0)) workers = len(os.sched_getaffinity(0))
except AttributeError: except AttributeError:
workers = os.cpu_count() or 1 workers = os.cpu_count() or 1
with caplog.at_level(logging.INFO):
app.prepare(fast=True)
assert app.state.fast assert app.state.fast
assert app.state.workers == workers assert app.state.workers == workers
logs = run_multi(app, logging.INFO) messages = [m[2] for m in caplog.record_tuples]
messages = [m[2] for m in logs]
assert f"mode: production, goin' fast w/ {workers} workers" in messages assert f"mode: production, goin' fast w/ {workers} workers" in messages

View File

@ -568,7 +568,7 @@ def test_streaming_echo():
@app.listener("after_server_start") @app.listener("after_server_start")
async def client_task(app, loop): async def client_task(app, loop):
try: try:
reader, writer = await asyncio.open_connection(*addr) reader, writer = await asyncio.open_connection("localhost", 8000)
await client(app, reader, writer) await client(app, reader, writer)
finally: finally:
writer.close() writer.close()
@ -576,7 +576,7 @@ def test_streaming_echo():
async def client(app, reader, writer): async def client(app, reader, writer):
# Unfortunately httpx does not support 2-way streaming, so do it by hand. # Unfortunately httpx does not support 2-way streaming, so do it by hand.
host = f"host: {addr[0]}:{addr[1]}\r\n".encode() host = f"host: localhost:8000\r\n".encode()
writer.write( writer.write(
b"POST /echo HTTP/1.1\r\n" + host + b"content-length: 2\r\n" b"POST /echo HTTP/1.1\r\n" + host + b"content-length: 2\r\n"
b"content-type: text/plain; charset=utf-8\r\n" b"content-type: text/plain; charset=utf-8\r\n"
@ -625,6 +625,4 @@ def test_streaming_echo():
# Use random port for tests # Use random port for tests
with closing(socket()) as sock: with closing(socket()) as sock:
sock.bind(("127.0.0.1", 0)) app.run(access_log=False)
addr = sock.getsockname()
app.run(sock=sock, access_log=False)

View File

@ -3,41 +3,51 @@ import logging
from time import sleep from time import sleep
import pytest
from sanic import Sanic from sanic import Sanic
from sanic.exceptions import ServiceUnavailable from sanic.exceptions import ServiceUnavailable
from sanic.log import LOGGING_CONFIG_DEFAULTS from sanic.log import LOGGING_CONFIG_DEFAULTS
from sanic.response import text from sanic.response import text
response_timeout_app = Sanic("test_response_timeout") @pytest.fixture
response_timeout_default_app = Sanic("test_response_timeout_default") def response_timeout_app():
response_handler_cancelled_app = Sanic("test_response_handler_cancelled") app = Sanic("test_response_timeout")
app.config.RESPONSE_TIMEOUT = 1
response_timeout_app.config.RESPONSE_TIMEOUT = 1 @app.route("/1")
response_timeout_default_app.config.RESPONSE_TIMEOUT = 1
response_handler_cancelled_app.config.RESPONSE_TIMEOUT = 1
response_handler_cancelled_app.ctx.flag = False
@response_timeout_app.route("/1")
async def handler_1(request): async def handler_1(request):
await asyncio.sleep(2) await asyncio.sleep(2)
return text("OK") return text("OK")
@app.exception(ServiceUnavailable)
@response_timeout_app.exception(ServiceUnavailable)
def handler_exception(request, exception): def handler_exception(request, exception):
return text("Response Timeout from error_handler.", 503) return text("Response Timeout from error_handler.", 503)
return app
@response_timeout_default_app.route("/1")
@pytest.fixture
def response_timeout_default_app():
app = Sanic("test_response_timeout_default")
app.config.RESPONSE_TIMEOUT = 1
@app.route("/1")
async def handler_2(request): async def handler_2(request):
await asyncio.sleep(2) await asyncio.sleep(2)
return text("OK") return text("OK")
return app
@response_handler_cancelled_app.exception(asyncio.CancelledError)
@pytest.fixture
def response_handler_cancelled_app():
app = Sanic("test_response_handler_cancelled")
app.config.RESPONSE_TIMEOUT = 1
app.ctx.flag = False
@app.exception(asyncio.CancelledError)
def handler_cancelled(request, exception): def handler_cancelled(request, exception):
# If we get a CancelledError, it means sanic has already sent a response, # If we get a CancelledError, it means sanic has already sent a response,
# we should not ever have to handle a CancelledError. # we should not ever have to handle a CancelledError.
@ -46,26 +56,27 @@ def handler_cancelled(request, exception):
# The client will never receive this response, because the socket # The client will never receive this response, because the socket
# is already closed when we get a CancelledError. # is already closed when we get a CancelledError.
@app.route("/1")
@response_handler_cancelled_app.route("/1")
async def handler_3(request): async def handler_3(request):
await asyncio.sleep(2) await asyncio.sleep(2)
return text("OK") return text("OK")
return app
def test_server_error_response_timeout():
def test_server_error_response_timeout(response_timeout_app):
request, response = response_timeout_app.test_client.get("/1") request, response = response_timeout_app.test_client.get("/1")
assert response.status == 503 assert response.status == 503
assert response.text == "Response Timeout from error_handler." assert response.text == "Response Timeout from error_handler."
def test_default_server_error_response_timeout(): def test_default_server_error_response_timeout(response_timeout_default_app):
request, response = response_timeout_default_app.test_client.get("/1") request, response = response_timeout_default_app.test_client.get("/1")
assert response.status == 503 assert response.status == 503
assert "Response Timeout" in response.text assert "Response Timeout" in response.text
def test_response_handler_cancelled(): def test_response_handler_cancelled(response_handler_cancelled_app):
request, response = response_handler_cancelled_app.test_client.get("/1") request, response = response_handler_cancelled_app.test_client.get("/1")
assert response.status == 503 assert response.status == 503
assert "Response Timeout" in response.text assert "Response Timeout" in response.text

View File

@ -18,12 +18,6 @@ AVAILABLE_LISTENERS = [
"after_server_stop", "after_server_stop",
] ]
skipif_no_alarm = pytest.mark.skipif(
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
def create_listener(listener_name, in_list): def create_listener(listener_name, in_list):
async def _listener(app, loop): async def _listener(app, loop):
@ -42,18 +36,17 @@ def create_listener_no_loop(listener_name, in_list):
def start_stop_app(random_name_app, **run_kwargs): def start_stop_app(random_name_app, **run_kwargs):
def stop_on_alarm(signum, frame): @random_name_app.after_server_start
random_name_app.stop() async def shutdown(app):
await asyncio.sleep(1.1)
app.stop()
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(1)
try: try:
random_name_app.run(HOST, PORT, **run_kwargs) random_name_app.run(HOST, PORT, single_process=True, **run_kwargs)
except KeyboardInterrupt: except KeyboardInterrupt:
pass pass
@skipif_no_alarm
@pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS) @pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS)
def test_single_listener(app, listener_name): def test_single_listener(app, listener_name):
"""Test that listeners on their own work""" """Test that listeners on their own work"""
@ -64,7 +57,6 @@ def test_single_listener(app, listener_name):
assert app.name + listener_name == output.pop() assert app.name + listener_name == output.pop()
@skipif_no_alarm
@pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS) @pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS)
def test_single_listener_no_loop(app, listener_name): def test_single_listener_no_loop(app, listener_name):
"""Test that listeners on their own work""" """Test that listeners on their own work"""
@ -75,7 +67,6 @@ def test_single_listener_no_loop(app, listener_name):
assert app.name + listener_name == output.pop() assert app.name + listener_name == output.pop()
@skipif_no_alarm
@pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS) @pytest.mark.parametrize("listener_name", AVAILABLE_LISTENERS)
def test_register_listener(app, listener_name): def test_register_listener(app, listener_name):
""" """
@ -90,7 +81,6 @@ def test_register_listener(app, listener_name):
assert app.name + listener_name == output.pop() assert app.name + listener_name == output.pop()
@skipif_no_alarm
def test_all_listeners(app): def test_all_listeners(app):
output = [] output = []
for listener_name in AVAILABLE_LISTENERS: for listener_name in AVAILABLE_LISTENERS:
@ -101,7 +91,6 @@ def test_all_listeners(app):
assert app.name + listener_name == output.pop() assert app.name + listener_name == output.pop()
@skipif_no_alarm
def test_all_listeners_as_convenience(app): def test_all_listeners_as_convenience(app):
output = [] output = []
for listener_name in AVAILABLE_LISTENERS: for listener_name in AVAILABLE_LISTENERS:
@ -159,7 +148,6 @@ def test_create_server_trigger_events(app):
async def stop(app, loop): async def stop(app, loop):
nonlocal flag1 nonlocal flag1
flag1 = True flag1 = True
signal.alarm(1)
async def before_stop(app, loop): async def before_stop(app, loop):
nonlocal flag2 nonlocal flag2
@ -178,10 +166,13 @@ def test_create_server_trigger_events(app):
# Use random port for tests # Use random port for tests
signal.signal(signal.SIGALRM, stop_on_alarm) signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(1)
with closing(socket()) as sock: with closing(socket()) as sock:
sock.bind(("127.0.0.1", 0)) sock.bind(("127.0.0.1", 0))
serv_coro = app.create_server(return_asyncio_server=True, sock=sock) serv_coro = app.create_server(
return_asyncio_server=True, sock=sock, debug=True
)
serv_task = asyncio.ensure_future(serv_coro, loop=loop) serv_task = asyncio.ensure_future(serv_coro, loop=loop)
server = loop.run_until_complete(serv_task) server = loop.run_until_complete(serv_task)
loop.run_until_complete(server.startup()) loop.run_until_complete(server.startup())
@ -199,7 +190,6 @@ def test_create_server_trigger_events(app):
loop.run_until_complete(close_task) loop.run_until_complete(close_task)
# Complete all tasks on the loop # Complete all tasks on the loop
signal.stopped = True
for connection in server.connections: for connection in server.connections:
connection.close_if_idle() connection.close_if_idle()
loop.run_until_complete(server.after_stop()) loop.run_until_complete(server.after_stop())

View File

@ -33,6 +33,7 @@ def set_loop(app, loop):
def after(app, loop): def after(app, loop):
print("...")
calledq.put(mock.called) calledq.put(mock.called)
@ -48,10 +49,31 @@ def test_register_system_signals(app):
app.listener("before_server_start")(set_loop) app.listener("before_server_start")(set_loop)
app.listener("after_server_stop")(after) app.listener("after_server_stop")(after)
app.run(HOST, PORT) app.run(HOST, PORT, single_process=True)
assert calledq.get() is True assert calledq.get() is True
@pytest.mark.skipif(os.name == "nt", reason="May hang CI on py38/windows")
def test_no_register_system_signals_fails(app):
"""Test if sanic don't register system signals"""
@app.route("/hello")
async def hello_route(request):
return HTTPResponse()
app.listener("after_server_start")(stop)
app.listener("before_server_start")(set_loop)
app.listener("after_server_stop")(after)
message = (
"Cannot run Sanic.serve with register_sys_signals=False. Use "
"either Sanic.serve_single or Sanic.serve_legacy."
)
with pytest.raises(RuntimeError, match=message):
app.prepare(HOST, PORT, register_sys_signals=False)
assert calledq.empty()
@pytest.mark.skipif(os.name == "nt", reason="May hang CI on py38/windows") @pytest.mark.skipif(os.name == "nt", reason="May hang CI on py38/windows")
def test_dont_register_system_signals(app): def test_dont_register_system_signals(app):
"""Test if sanic don't register system signals""" """Test if sanic don't register system signals"""
@ -64,7 +86,7 @@ def test_dont_register_system_signals(app):
app.listener("before_server_start")(set_loop) app.listener("before_server_start")(set_loop)
app.listener("after_server_stop")(after) app.listener("after_server_stop")(after)
app.run(HOST, PORT, register_sys_signals=False) app.run(HOST, PORT, register_sys_signals=False, single_process=True)
assert calledq.get() is False assert calledq.get() is False

View File

@ -610,24 +610,24 @@ def test_get_ssl_context_only_mkcert(
MockTrustmeCreator.generate_cert.assert_not_called() MockTrustmeCreator.generate_cert.assert_not_called()
def test_no_http3_with_trustme( # def test_no_http3_with_trustme(
app, # app,
monkeypatch, # monkeypatch,
MockTrustmeCreator, # MockTrustmeCreator,
): # ):
monkeypatch.setattr( # monkeypatch.setattr(
sanic.http.tls.creators, "TrustmeCreator", MockTrustmeCreator # sanic.http.tls.creators, "TrustmeCreator", MockTrustmeCreator
) # )
MockTrustmeCreator.SUPPORTED = True # MockTrustmeCreator.SUPPORTED = True
app.config.LOCAL_CERT_CREATOR = "TRUSTME" # app.config.LOCAL_CERT_CREATOR = "TRUSTME"
with pytest.raises( # with pytest.raises(
SanicException, # SanicException,
match=( # match=(
"Sorry, you cannot currently use trustme as a local certificate " # "Sorry, you cannot currently use trustme as a local certificate "
"generator for an HTTP/3 server" # "generator for an HTTP/3 server"
), # ),
): # ):
app.run(version=3, debug=True) # app.run(version=3, debug=True)
def test_sanic_ssl_context_create(): def test_sanic_ssl_context_create():

View File

@ -1,9 +1,6 @@
import asyncio # import asyncio
import logging import logging
import os import os
import platform
import subprocess
import sys
from asyncio import AbstractEventLoop from asyncio import AbstractEventLoop
from string import ascii_lowercase from string import ascii_lowercase
@ -19,6 +16,11 @@ from sanic.request import Request
from sanic.response import text from sanic.response import text
# import platform
# import subprocess
# import sys
pytestmark = pytest.mark.skipif(os.name != "posix", reason="UNIX only") pytestmark = pytest.mark.skipif(os.name != "posix", reason="UNIX only")
SOCKPATH = "/tmp/sanictest.sock" SOCKPATH = "/tmp/sanictest.sock"
SOCKPATH2 = "/tmp/sanictest2.sock" SOCKPATH2 = "/tmp/sanictest2.sock"
@ -49,6 +51,9 @@ def socket_cleanup():
pass pass
@pytest.mark.xfail(
reason="Flaky Test on Non Linux Infra",
)
def test_unix_socket_creation(caplog: LogCaptureFixture): def test_unix_socket_creation(caplog: LogCaptureFixture):
from socket import AF_UNIX, socket from socket import AF_UNIX, socket
@ -59,14 +64,14 @@ def test_unix_socket_creation(caplog: LogCaptureFixture):
app = Sanic(name="test") app = Sanic(name="test")
@app.listener("after_server_start") @app.after_server_start
def running(app: Sanic, loop: AbstractEventLoop): def running(app: Sanic):
assert os.path.exists(SOCKPATH) assert os.path.exists(SOCKPATH)
assert ino != os.stat(SOCKPATH).st_ino assert ino != os.stat(SOCKPATH).st_ino
app.stop() app.stop()
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
app.run(unix=SOCKPATH) app.run(unix=SOCKPATH, single_process=True)
assert ( assert (
"sanic.root", "sanic.root",
@ -79,9 +84,9 @@ def test_unix_socket_creation(caplog: LogCaptureFixture):
@pytest.mark.parametrize("path", (".", "no-such-directory/sanictest.sock")) @pytest.mark.parametrize("path", (".", "no-such-directory/sanictest.sock"))
def test_invalid_paths(path: str): def test_invalid_paths(path: str):
app = Sanic(name="test") app = Sanic(name="test")
#
with pytest.raises((FileExistsError, FileNotFoundError)): with pytest.raises((FileExistsError, FileNotFoundError)):
app.run(unix=path) app.run(unix=path, single_process=True)
def test_dont_replace_file(): def test_dont_replace_file():
@ -90,12 +95,12 @@ def test_dont_replace_file():
app = Sanic(name="test") app = Sanic(name="test")
@app.listener("after_server_start") @app.after_server_start
def stop(app: Sanic, loop: AbstractEventLoop): def stop(app: Sanic):
app.stop() app.stop()
with pytest.raises(FileExistsError): with pytest.raises(FileExistsError):
app.run(unix=SOCKPATH) app.run(unix=SOCKPATH, single_process=True)
def test_dont_follow_symlink(): def test_dont_follow_symlink():
@ -107,36 +112,36 @@ def test_dont_follow_symlink():
app = Sanic(name="test") app = Sanic(name="test")
@app.listener("after_server_start") @app.after_server_start
def stop(app: Sanic, loop: AbstractEventLoop): def stop(app: Sanic):
app.stop() app.stop()
with pytest.raises(FileExistsError): with pytest.raises(FileExistsError):
app.run(unix=SOCKPATH) app.run(unix=SOCKPATH, single_process=True)
def test_socket_deleted_while_running(): def test_socket_deleted_while_running():
app = Sanic(name="test") app = Sanic(name="test")
@app.listener("after_server_start") @app.after_server_start
async def hack(app: Sanic, loop: AbstractEventLoop): async def hack(app: Sanic):
os.unlink(SOCKPATH) os.unlink(SOCKPATH)
app.stop() app.stop()
app.run(host="myhost.invalid", unix=SOCKPATH) app.run(host="myhost.invalid", unix=SOCKPATH, single_process=True)
def test_socket_replaced_with_file(): def test_socket_replaced_with_file():
app = Sanic(name="test") app = Sanic(name="test")
@app.listener("after_server_start") @app.after_server_start
async def hack(app: Sanic, loop: AbstractEventLoop): async def hack(app: Sanic):
os.unlink(SOCKPATH) os.unlink(SOCKPATH)
with open(SOCKPATH, "w") as f: with open(SOCKPATH, "w") as f:
f.write("Not a socket") f.write("Not a socket")
app.stop() app.stop()
app.run(host="myhost.invalid", unix=SOCKPATH) app.run(host="myhost.invalid", unix=SOCKPATH, single_process=True)
def test_unix_connection(): def test_unix_connection():
@ -146,8 +151,8 @@ def test_unix_connection():
def handler(request: Request): def handler(request: Request):
return text(f"{request.conn_info.server}") return text(f"{request.conn_info.server}")
@app.listener("after_server_start") @app.after_server_start
async def client(app: Sanic, loop: AbstractEventLoop): async def client(app: Sanic):
if httpx_version >= (0, 20): if httpx_version >= (0, 20):
transport = httpx.AsyncHTTPTransport(uds=SOCKPATH) transport = httpx.AsyncHTTPTransport(uds=SOCKPATH)
else: else:
@ -160,10 +165,7 @@ def test_unix_connection():
finally: finally:
app.stop() app.stop()
app.run(host="myhost.invalid", unix=SOCKPATH) app.run(host="myhost.invalid", unix=SOCKPATH, single_process=True)
app_multi = Sanic(name="test")
def handler(request: Request): def handler(request: Request):
@ -181,86 +183,87 @@ async def client(app: Sanic, loop: AbstractEventLoop):
def test_unix_connection_multiple_workers(): def test_unix_connection_multiple_workers():
app_multi = Sanic(name="test")
app_multi.get("/")(handler) app_multi.get("/")(handler)
app_multi.listener("after_server_start")(client) app_multi.listener("after_server_start")(client)
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2) app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
@pytest.mark.xfail( # @pytest.mark.xfail(
condition=platform.system() != "Linux", # condition=platform.system() != "Linux",
reason="Flaky Test on Non Linux Infra", # reason="Flaky Test on Non Linux Infra",
) # )
async def test_zero_downtime(): # async def test_zero_downtime():
"""Graceful server termination and socket replacement on restarts""" # """Graceful server termination and socket replacement on restarts"""
from signal import SIGINT # from signal import SIGINT
from time import monotonic as current_time # from time import monotonic as current_time
async def client(): # async def client():
if httpx_version >= (0, 20): # if httpx_version >= (0, 20):
transport = httpx.AsyncHTTPTransport(uds=SOCKPATH) # transport = httpx.AsyncHTTPTransport(uds=SOCKPATH)
else: # else:
transport = httpcore.AsyncConnectionPool(uds=SOCKPATH) # transport = httpcore.AsyncConnectionPool(uds=SOCKPATH)
for _ in range(40): # for _ in range(40):
async with httpx.AsyncClient(transport=transport) as client: # async with httpx.AsyncClient(transport=transport) as client:
r = await client.get("http://localhost/sleep/0.1") # r = await client.get("http://localhost/sleep/0.1")
assert r.status_code == 200, r.text # assert r.status_code == 200, r.text
assert r.text == "Slept 0.1 seconds.\n" # assert r.text == "Slept 0.1 seconds.\n"
def spawn(): # def spawn():
command = [ # command = [
sys.executable, # sys.executable,
"-m", # "-m",
"sanic", # "sanic",
"--debug", # "--debug",
"--unix", # "--unix",
SOCKPATH, # SOCKPATH,
"examples.delayed_response.app", # "examples.delayed_response.app",
] # ]
DN = subprocess.DEVNULL # DN = subprocess.DEVNULL
return subprocess.Popen( # return subprocess.Popen(
command, stdin=DN, stdout=DN, stderr=subprocess.PIPE # command, stdin=DN, stdout=DN, stderr=subprocess.PIPE
) # )
try: # try:
processes = [spawn()] # processes = [spawn()]
while not os.path.exists(SOCKPATH): # while not os.path.exists(SOCKPATH):
if processes[0].poll() is not None: # if processes[0].poll() is not None:
raise Exception( # raise Exception(
"Worker did not start properly. " # "Worker did not start properly. "
f"stderr: {processes[0].stderr.read()}" # f"stderr: {processes[0].stderr.read()}"
) # )
await asyncio.sleep(0.0001) # await asyncio.sleep(0.0001)
ino = os.stat(SOCKPATH).st_ino # ino = os.stat(SOCKPATH).st_ino
task = asyncio.get_event_loop().create_task(client()) # task = asyncio.get_event_loop().create_task(client())
start_time = current_time() # start_time = current_time()
while current_time() < start_time + 6: # while current_time() < start_time + 6:
# Start a new one and wait until the socket is replaced # # Start a new one and wait until the socket is replaced
processes.append(spawn()) # processes.append(spawn())
while ino == os.stat(SOCKPATH).st_ino: # while ino == os.stat(SOCKPATH).st_ino:
await asyncio.sleep(0.001) # await asyncio.sleep(0.001)
ino = os.stat(SOCKPATH).st_ino # ino = os.stat(SOCKPATH).st_ino
# Graceful termination of the previous one # # Graceful termination of the previous one
processes[-2].send_signal(SIGINT) # processes[-2].send_signal(SIGINT)
# Wait until client has completed all requests # # Wait until client has completed all requests
await task # await task
processes[-1].send_signal(SIGINT) # processes[-1].send_signal(SIGINT)
for worker in processes: # for worker in processes:
try: # try:
worker.wait(1.0) # worker.wait(1.0)
except subprocess.TimeoutExpired: # except subprocess.TimeoutExpired:
raise Exception( # raise Exception(
f"Worker would not terminate:\n{worker.stderr}" # f"Worker would not terminate:\n{worker.stderr}"
) # )
finally: # finally:
for worker in processes: # for worker in processes:
worker.kill() # worker.kill()
# Test for clean run and termination # # Test for clean run and termination
return_codes = [worker.poll() for worker in processes] # return_codes = [worker.poll() for worker in processes]
# Removing last process which seems to be flappy # # Removing last process which seems to be flappy
return_codes.pop() # return_codes.pop()
assert len(processes) > 5 # assert len(processes) > 5
assert all(code == 0 for code in return_codes) # assert all(code == 0 for code in return_codes)
# Removing this check that seems to be flappy # # Removing this check that seems to be flappy
# assert not os.path.exists(SOCKPATH) # # assert not os.path.exists(SOCKPATH)

View File

@ -0,0 +1,167 @@
import json
from datetime import datetime
from logging import ERROR, INFO
from socket import AF_INET, SOCK_STREAM, timeout
from unittest.mock import Mock, patch
import pytest
from sanic.log import Colors
from sanic.worker.inspector import Inspector, inspect
DATA = {
"info": {
"packages": ["foo"],
},
"extra": {
"more": "data",
},
"workers": {"Worker-Name": {"some": "state"}},
}
SERIALIZED = json.dumps(DATA)
def test_inspector_stop():
inspector = Inspector(Mock(), {}, {}, "", 1)
assert inspector.run is True
inspector.stop()
assert inspector.run is False
@patch("sanic.worker.inspector.sys.stdout.write")
@patch("sanic.worker.inspector.socket")
@pytest.mark.parametrize("command", ("foo", "raw", "pretty"))
def test_send_inspect(socket: Mock, write: Mock, command: str):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.recv.return_value = SERIALIZED.encode()
inspect("localhost", 9999, command)
socket.sendall.assert_called_once_with(command.encode())
socket.recv.assert_called_once_with(4096)
socket.connect.assert_called_once_with(("localhost", 9999))
socket.assert_called_once_with(AF_INET, SOCK_STREAM)
if command == "raw":
write.assert_called_once_with(SERIALIZED)
elif command == "pretty":
write.assert_called()
else:
write.assert_not_called()
@patch("sanic.worker.inspector.sys")
@patch("sanic.worker.inspector.socket")
def test_send_inspect_conn_refused(socket: Mock, sys: Mock, caplog):
with caplog.at_level(INFO):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.connect.side_effect = ConnectionRefusedError()
inspect("localhost", 9999, "foo")
socket.close.assert_called_once()
sys.exit.assert_called_once_with(1)
message = (
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}('localhost', 9999){Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance."
)
assert ("sanic.error", ERROR, message) in caplog.record_tuples
@patch("sanic.worker.inspector.configure_socket")
@pytest.mark.parametrize("action", (b"reload", b"shutdown", b"foo"))
def test_run_inspector(configure_socket: Mock, action: bytes):
sock = Mock()
conn = Mock()
conn.recv.return_value = action
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def accept():
inspector.run = False
return conn, ...
sock.accept = accept
inspector()
configure_socket.assert_called_once_with(
{"host": "localhost", "port": 9999, "unix": None, "backlog": 1}
)
conn.recv.assert_called_with(64)
if action == b"reload":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
elif action == b"shutdown":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_not_called()
inspector.shutdown.assert_called()
inspector.state_to_json.assert_not_called()
else:
conn.send.assert_called_with(b'"foo"')
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_called()
@patch("sanic.worker.inspector.configure_socket")
def test_accept_timeout(configure_socket: Mock):
sock = Mock()
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def accept():
inspector.run = False
raise timeout
sock.accept = accept
inspector()
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
def test_state_to_json():
now = datetime.now()
now_iso = now.isoformat()
app_info = {"app": "hello"}
worker_state = {"Test": {"now": now, "nested": {"foo": now}}}
inspector = Inspector(Mock(), app_info, worker_state, "", 0)
state = inspector.state_to_json()
assert state == {
"info": app_info,
"workers": {"Test": {"now": now_iso, "nested": {"foo": now_iso}}},
}
def test_reload():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.reload()
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
def test_shutdown():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.shutdown()
publisher.send.assert_called_once_with("__TERMINATE__")

102
tests/worker/test_loader.py Normal file
View File

@ -0,0 +1,102 @@
import sys
from os import getcwd
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import Mock, patch
import pytest
from sanic.app import Sanic
from sanic.worker.loader import AppLoader, CertLoader
STATIC = Path.cwd() / "tests" / "static"
@pytest.mark.parametrize(
"module_input", ("tests.fake.server:app", "tests.fake.server.app")
)
def test_load_app_instance(module_input):
loader = AppLoader(module_input)
app = loader.load()
assert isinstance(app, Sanic)
@pytest.mark.parametrize(
"module_input",
("tests.fake.server:create_app", "tests.fake.server:create_app()"),
)
def test_load_app_factory(module_input):
loader = AppLoader(module_input, as_factory=True)
app = loader.load()
assert isinstance(app, Sanic)
def test_load_app_simple():
loader = AppLoader(str(STATIC), as_simple=True)
app = loader.load()
assert isinstance(app, Sanic)
def test_create_with_factory():
loader = AppLoader(factory=lambda: Sanic("Test"))
app = loader.load()
assert isinstance(app, Sanic)
def test_cwd_in_path():
AppLoader("tests.fake.server:app").load()
assert getcwd() in sys.path
def test_input_is_dir():
loader = AppLoader(str(STATIC))
message = (
"App not found.\n Please use --simple if you are passing a "
f"directory to sanic.\n eg. sanic {str(STATIC)} --simple"
)
with pytest.raises(ValueError, match=message):
loader.load()
def test_input_is_factory():
ns = SimpleNamespace(module="foo")
loader = AppLoader("tests.fake.server:create_app", args=ns)
message = (
"Module is not a Sanic app, it is a function\n If this callable "
"returns a Sanic instance try: \nsanic foo --factory"
)
with pytest.raises(ValueError, match=message):
loader.load()
def test_input_is_module():
ns = SimpleNamespace(module="foo")
loader = AppLoader("tests.fake.server", args=ns)
message = (
"Module is not a Sanic app, it is a module\n "
"Perhaps you meant foo:app?"
)
with pytest.raises(ValueError, match=message):
loader.load()
@pytest.mark.parametrize("creator", ("mkcert", "trustme"))
@patch("sanic.worker.loader.TrustmeCreator")
@patch("sanic.worker.loader.MkcertCreator")
def test_cert_loader(MkcertCreator: Mock, TrustmeCreator: Mock, creator: str):
MkcertCreator.return_value = MkcertCreator
TrustmeCreator.return_value = TrustmeCreator
data = {
"creator": creator,
"key": Path.cwd() / "tests" / "certs" / "localhost" / "privkey.pem",
"cert": Path.cwd() / "tests" / "certs" / "localhost" / "fullchain.pem",
"localhost": "localhost",
}
app = Sanic("Test")
loader = CertLoader(data) # type: ignore
loader.load(app)
creator_class = MkcertCreator if creator == "mkcert" else TrustmeCreator
creator_class.assert_called_once_with(app, data["key"], data["cert"])
creator_class.generate_cert.assert_called_once_with("localhost")

View File

@ -0,0 +1,217 @@
from signal import SIGINT, SIGKILL
from unittest.mock import Mock, call, patch
import pytest
from sanic.worker.manager import WorkerManager
def fake_serve():
...
def test_manager_no_workers():
message = "Cannot serve with no workers"
with pytest.raises(RuntimeError, match=message):
WorkerManager(
0,
fake_serve,
{},
Mock(),
(Mock(), Mock()),
{},
)
@patch("sanic.worker.process.os")
def test_terminate(os_mock: Mock):
process = Mock()
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
assert manager.terminated is False
manager.terminate()
assert manager.terminated is True
os_mock.kill.assert_called_once_with(1234, SIGINT)
@patch("sanic.worker.process.os")
def test_shutown(os_mock: Mock):
process = Mock()
process.pid = 1234
process.is_alive.return_value = True
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager.shutdown()
os_mock.kill.assert_called_once_with(1234, SIGINT)
@patch("sanic.worker.manager.os")
def test_kill(os_mock: Mock):
process = Mock()
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager.kill()
os_mock.kill.assert_called_once_with(1234, SIGKILL)
def test_restart_all():
p1 = Mock()
p2 = Mock()
context = Mock()
context.Process.side_effect = [p1, p2, p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
assert len(list(manager.transient_processes))
manager.restart()
p1.terminate.assert_called_once()
p2.terminate.assert_called_once()
context.Process.assert_has_calls(
[
call(
name="Sanic-Server-0-0",
target=fake_serve,
kwargs={"config": {}},
daemon=True,
),
call(
name="Sanic-Server-1-0",
target=fake_serve,
kwargs={"config": {}},
daemon=True,
),
call(
name="Sanic-Server-0-0",
target=fake_serve,
kwargs={"config": {}},
daemon=True,
),
call(
name="Sanic-Server-1-0",
target=fake_serve,
kwargs={"config": {}},
daemon=True,
),
]
)
def test_monitor_all():
p1 = Mock()
p2 = Mock()
sub = Mock()
sub.recv.side_effect = ["__ALL_PROCESSES__:", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
manager.restart.assert_called_once_with(
process_names=None, reloaded_files=""
)
def test_monitor_all_with_files():
p1 = Mock()
p2 = Mock()
sub = Mock()
sub.recv.side_effect = ["__ALL_PROCESSES__:foo,bar", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
manager.restart.assert_called_once_with(
process_names=None, reloaded_files="foo,bar"
)
def test_monitor_one_process():
p1 = Mock()
p1.name = "Testing"
p2 = Mock()
sub = Mock()
sub.recv.side_effect = [f"{p1.name}:foo,bar", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
manager.restart.assert_called_once_with(
process_names=[p1.name], reloaded_files="foo,bar"
)
def test_shutdown_signal():
pub = Mock()
manager = WorkerManager(
1,
fake_serve,
{},
Mock(),
(pub, Mock()),
{},
)
manager.shutdown = Mock() # type: ignore
manager.shutdown_signal(SIGINT, None)
pub.send.assert_called_with(None)
manager.shutdown.assert_called_once_with()

View File

@ -0,0 +1,119 @@
from multiprocessing import Event
from os import environ, getpid
from typing import Any, Dict
from unittest.mock import Mock
import pytest
from sanic import Sanic
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.state import WorkerState
@pytest.fixture
def monitor_publisher():
return Mock()
@pytest.fixture
def worker_state():
return {}
@pytest.fixture
def m(monitor_publisher, worker_state):
environ["SANIC_WORKER_NAME"] = "Test"
worker_state["Test"] = {}
yield WorkerMultiplexer(monitor_publisher, worker_state)
del environ["SANIC_WORKER_NAME"]
def test_has_multiplexer_default(app: Sanic):
event = Event()
@app.main_process_start
async def setup(app, _):
app.shared_ctx.event = event
@app.after_server_start
def stop(app):
if hasattr(app, "m") and isinstance(app.m, WorkerMultiplexer):
app.shared_ctx.event.set()
app.stop()
app.run()
assert event.is_set()
def test_not_have_multiplexer_single(app: Sanic):
event = Event()
@app.main_process_start
async def setup(app, _):
app.shared_ctx.event = event
@app.after_server_start
def stop(app):
if hasattr(app, "m") and isinstance(app.m, WorkerMultiplexer):
app.shared_ctx.event.set()
app.stop()
app.run(single_process=True)
assert not event.is_set()
def test_not_have_multiplexer_legacy(app: Sanic):
event = Event()
@app.main_process_start
async def setup(app, _):
app.shared_ctx.event = event
@app.after_server_start
def stop(app):
if hasattr(app, "m") and isinstance(app.m, WorkerMultiplexer):
app.shared_ctx.event.set()
app.stop()
app.run(legacy=True)
assert not event.is_set()
def test_ack(worker_state: Dict[str, Any], m: WorkerMultiplexer):
worker_state["Test"] = {"foo": "bar"}
m.ack()
assert worker_state["Test"] == {"foo": "bar", "state": "ACKED"}
def test_restart_self(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart()
monitor_publisher.send.assert_called_once_with("Test")
def test_restart_foo(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart("foo")
monitor_publisher.send.assert_called_once_with("foo")
def test_reload_alias(monitor_publisher: Mock, m: WorkerMultiplexer):
m.reload()
monitor_publisher.send.assert_called_once_with("Test")
def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
m.terminate()
monitor_publisher.send.assert_called_once_with("__TERMINATE__")
def test_properties(
monitor_publisher: Mock, worker_state: Dict[str, Any], m: WorkerMultiplexer
):
assert m.reload == m.restart
assert m.pid == getpid()
assert m.name == "Test"
assert m.workers == worker_state
assert m.state == worker_state["Test"]
assert isinstance(m.state, WorkerState)

View File

@ -0,0 +1,156 @@
import signal
from asyncio import Event
from pathlib import Path
from unittest.mock import Mock
import pytest
from sanic.app import Sanic
from sanic.worker.loader import AppLoader
from sanic.worker.reloader import Reloader
@pytest.fixture
def reloader():
...
@pytest.fixture
def app():
app = Sanic("Test")
@app.route("/")
def handler(_):
...
return app
@pytest.fixture
def app_loader(app):
return AppLoader(factory=lambda: app)
def run_reloader(reloader):
def stop(*_):
reloader.stop()
signal.signal(signal.SIGALRM, stop)
signal.alarm(1)
reloader()
def is_python_file(filename):
return (isinstance(filename, Path) and (filename.suffix == "py")) or (
isinstance(filename, str) and filename.endswith(".py")
)
def test_reload_send():
publisher = Mock()
reloader = Reloader(publisher, 0.1, set(), Mock())
reloader.reload("foobar")
publisher.send.assert_called_once_with("__ALL_PROCESSES__:foobar")
def test_iter_files():
reloader = Reloader(Mock(), 0.1, set(), Mock())
len_python_files = len(list(reloader.files()))
assert len_python_files > 0
static_dir = Path(__file__).parent.parent / "static"
len_static_files = len(list(static_dir.glob("**/*")))
reloader = Reloader(Mock(), 0.1, set({static_dir}), Mock())
len_total_files = len(list(reloader.files()))
assert len_static_files > 0
assert len_total_files == len_python_files + len_static_files
def test_reloader_triggers_start_stop_listeners(
app: Sanic, app_loader: AppLoader
):
results = []
@app.reload_process_start
def reload_process_start(_):
results.append("reload_process_start")
@app.reload_process_stop
def reload_process_stop(_):
results.append("reload_process_stop")
reloader = Reloader(Mock(), 0.1, set(), app_loader)
run_reloader(reloader)
assert results == ["reload_process_start", "reload_process_stop"]
def test_not_triggered(app_loader):
reload_dir = Path(__file__).parent.parent / "fake"
publisher = Mock()
reloader = Reloader(publisher, 0.1, {reload_dir}, app_loader)
run_reloader(reloader)
publisher.send.assert_not_called()
def test_triggered(app_loader):
paths = set()
def check_file(filename, mtimes):
if (isinstance(filename, Path) and (filename.name == "server.py")) or (
isinstance(filename, str) and "sanic/app.py" in filename
):
paths.add(str(filename))
return True
return False
reload_dir = Path(__file__).parent.parent / "fake"
publisher = Mock()
reloader = Reloader(publisher, 0.1, {reload_dir}, app_loader)
reloader.check_file = check_file # type: ignore
run_reloader(reloader)
assert len(paths) == 2
publisher.send.assert_called()
call_arg = publisher.send.call_args_list[0][0][0]
assert call_arg.startswith("__ALL_PROCESSES__:")
assert call_arg.count(",") == 1
for path in paths:
assert str(path) in call_arg
def test_reloader_triggers_reload_listeners(app: Sanic, app_loader: AppLoader):
before = Event()
after = Event()
def check_file(filename, mtimes):
return not after.is_set()
@app.before_reload_trigger
async def before_reload_trigger(_):
before.set()
@app.after_reload_trigger
async def after_reload_trigger(_):
after.set()
reloader = Reloader(Mock(), 0.1, set(), app_loader)
reloader.check_file = check_file # type: ignore
run_reloader(reloader)
assert before.is_set()
assert after.is_set()
def test_check_file(tmp_path):
current = tmp_path / "testing.txt"
current.touch()
mtimes = {}
assert Reloader.check_file(current, mtimes) is False
assert len(mtimes) == 1
assert Reloader.check_file(current, mtimes) is False
mtimes[current] = mtimes[current] - 1
assert Reloader.check_file(current, mtimes) is True

View File

@ -0,0 +1,53 @@
from unittest.mock import Mock, call, patch
import pytest
from sanic.app import Sanic
from sanic.http.constants import HTTP
from sanic.server.runners import _run_server_forever, serve
@patch("sanic.server.runners._serve_http_1")
@patch("sanic.server.runners._serve_http_3")
def test_run_http_1(_serve_http_3: Mock, _serve_http_1: Mock, app: Sanic):
serve("", 0, app)
_serve_http_3.assert_not_called()
_serve_http_1.assert_called_once()
@patch("sanic.server.runners._serve_http_1")
@patch("sanic.server.runners._serve_http_3")
def test_run_http_3(_serve_http_3: Mock, _serve_http_1: Mock, app: Sanic):
serve("", 0, app, version=HTTP.VERSION_3)
_serve_http_1.assert_not_called()
_serve_http_3.assert_called_once()
@patch("sanic.server.runners.remove_unix_socket")
@pytest.mark.parametrize("do_cleanup", (True, False))
def test_run_server_forever(remove_unix_socket: Mock, do_cleanup: bool):
loop = Mock()
cleanup = Mock()
loop.run_forever = Mock(side_effect=KeyboardInterrupt())
before_stop = Mock()
before_stop.return_value = Mock()
after_stop = Mock()
after_stop.return_value = Mock()
unix = Mock()
_run_server_forever(
loop, before_stop, after_stop, cleanup if do_cleanup else None, unix
)
loop.run_forever.assert_called_once_with()
loop.run_until_complete.assert_has_calls(
[call(before_stop.return_value), call(after_stop.return_value)]
)
if do_cleanup:
cleanup.assert_called_once_with()
else:
cleanup.assert_not_called()
remove_unix_socket.assert_called_once_with(unix)
loop.close.assert_called_once_with()

View File

@ -0,0 +1,82 @@
# 18
# 21-29
# 26
# 36-37
# 42
# 55
# 38->
import logging
from ctypes import c_int32
from multiprocessing import Pipe, Queue, Value
from os import environ
from typing import Any
import pytest
from sanic.types.shared_ctx import SharedContext
@pytest.mark.parametrize(
"item,okay",
(
(Pipe(), True),
(Value("i", 0), True),
(Queue(), True),
(c_int32(1), True),
(1, False),
("thing", False),
(object(), False),
),
)
def test_set_items(item: Any, okay: bool, caplog):
ctx = SharedContext()
with caplog.at_level(logging.INFO):
ctx.item = item
assert ctx.is_locked is False
assert len(caplog.record_tuples) == 0 if okay else 1
if not okay:
assert caplog.record_tuples[0][0] == "sanic.error"
assert caplog.record_tuples[0][1] == logging.WARNING
assert "Unsafe object" in caplog.record_tuples[0][2]
@pytest.mark.parametrize(
"item",
(
Pipe(),
Value("i", 0),
Queue(),
c_int32(1),
1,
"thing",
object(),
),
)
def test_set_items_in_worker(item: Any, caplog):
ctx = SharedContext()
environ["SANIC_WORKER_NAME"] = "foo"
with caplog.at_level(logging.INFO):
ctx.item = item
del environ["SANIC_WORKER_NAME"]
assert ctx.is_locked is False
assert len(caplog.record_tuples) == 0
def test_lock():
ctx = SharedContext()
assert ctx.is_locked is False
ctx.lock()
assert ctx.is_locked is True
message = "Cannot set item on locked SharedContext object"
with pytest.raises(RuntimeError, match=message):
ctx.item = 1

View File

@ -0,0 +1,27 @@
from pathlib import Path
from sanic.server.socket import (
bind_unix_socket,
configure_socket,
remove_unix_socket,
)
def test_setup_and_teardown_unix():
socket_address = "./test.sock"
path = Path.cwd() / socket_address
assert not path.exists()
bind_unix_socket(socket_address)
assert path.exists()
remove_unix_socket(socket_address)
assert not path.exists()
def test_configure_socket():
socket_address = "./test.sock"
path = Path.cwd() / socket_address
assert not path.exists()
configure_socket({"unix": socket_address, "backlog": 100})
assert path.exists()
remove_unix_socket(socket_address)
assert not path.exists()

View File

@ -0,0 +1,91 @@
import pytest
from sanic.worker.state import WorkerState
def gen_state(**kwargs):
return WorkerState({"foo": kwargs}, "foo")
def test_set_get_state():
state = gen_state()
state["additional"] = 123
assert state["additional"] == 123
assert state.get("additional") == 123
assert state._state == {"foo": {"additional": 123}}
def test_del_state():
state = gen_state(one=1)
assert state["one"] == 1
del state["one"]
assert state._state == {"foo": {}}
def test_iter_state():
result = [item for item in gen_state(one=1, two=2)]
assert result == ["one", "two"]
def test_state_len():
result = [item for item in gen_state(one=1, two=2)]
assert len(result) == 2
def test_state_repr():
assert repr(gen_state(one=1, two=2)) == repr({"one": 1, "two": 2})
def test_state_eq():
state = gen_state(one=1, two=2)
assert state == {"one": 1, "two": 2}
assert state != {"one": 1}
def test_state_keys():
assert list(gen_state(one=1, two=2).keys()) == list(
{"one": 1, "two": 2}.keys()
)
def test_state_values():
assert list(gen_state(one=1, two=2).values()) == list(
{"one": 1, "two": 2}.values()
)
def test_state_items():
assert list(gen_state(one=1, two=2).items()) == list(
{"one": 1, "two": 2}.items()
)
def test_state_update():
state = gen_state()
assert len(state) == 0
state.update({"nine": 9})
assert len(state) == 1
assert state["nine"] == 9
def test_state_pop():
state = gen_state(one=1)
with pytest.raises(NotImplementedError):
state.pop()
def test_state_full():
state = gen_state(one=1)
assert state.full() == {"foo": {"one": 1}}
@pytest.mark.parametrize("key", WorkerState.RESTRICTED)
def test_state_restricted_operation(key):
state = gen_state()
message = f"Cannot set restricted key on WorkerState: {key}"
with pytest.raises(LookupError, match=message):
state[key] = "Nope"
del state[key]
with pytest.raises(LookupError, match=message):
state.update({"okay": True, key: "bad"})

View File

@ -0,0 +1,113 @@
from os import environ
from unittest.mock import Mock, patch
import pytest
from sanic.app import Sanic
from sanic.worker.loader import AppLoader
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.serve import worker_serve
@pytest.fixture
def mock_app():
app = Mock()
server_info = Mock()
server_info.settings = {"app": app}
app.state.workers = 1
app.listeners = {"main_process_ready": []}
app.get_motd_data.return_value = ({"packages": ""}, {})
app.state.server_info = [server_info]
return app
def args(app, **kwargs):
params = {**kwargs}
params.setdefault("host", "127.0.0.1")
params.setdefault("port", 9999)
params.setdefault("app_name", "test_config_app")
params.setdefault("monitor_publisher", None)
params.setdefault("app_loader", AppLoader(factory=lambda: app))
return params
def test_config_app(mock_app: Mock):
with patch("sanic.worker.serve._serve_http_1"):
worker_serve(**args(mock_app, config={"FOO": "BAR"}))
mock_app.update_config.assert_called_once_with({"FOO": "BAR"})
def test_bad_process(mock_app: Mock):
environ["SANIC_WORKER_NAME"] = "FOO"
message = "No restart publisher found in worker process"
with pytest.raises(RuntimeError, match=message):
worker_serve(**args(mock_app))
message = "No worker state found in worker process"
with pytest.raises(RuntimeError, match=message):
worker_serve(**args(mock_app, monitor_publisher=Mock()))
del environ["SANIC_WORKER_NAME"]
def test_has_multiplexer(app: Sanic):
environ["SANIC_WORKER_NAME"] = "FOO"
Sanic.register_app(app)
with patch("sanic.worker.serve._serve_http_1"):
worker_serve(
**args(app, monitor_publisher=Mock(), worker_state=Mock())
)
assert isinstance(app.multiplexer, WorkerMultiplexer)
del environ["SANIC_WORKER_NAME"]
@patch("sanic.mixins.startup.WorkerManager")
def test_serve_app_implicit(wm: Mock, app):
app.prepare()
Sanic.serve()
wm.call_args[0] == app.state.workers
@patch("sanic.mixins.startup.WorkerManager")
def test_serve_app_explicit(wm: Mock, mock_app):
Sanic.serve(mock_app)
wm.call_args[0] == mock_app.state.workers
@patch("sanic.mixins.startup.WorkerManager")
def test_serve_app_loader(wm: Mock, mock_app):
Sanic.serve(app_loader=AppLoader(factory=lambda: mock_app))
wm.call_args[0] == mock_app.state.workers
# Sanic.serve(factory=lambda: mock_app)
@patch("sanic.mixins.startup.WorkerManager")
def test_serve_app_factory(wm: Mock, mock_app):
Sanic.serve(factory=lambda: mock_app)
wm.call_args[0] == mock_app.state.workers
@patch("sanic.mixins.startup.WorkerManager")
@patch("sanic.mixins.startup.Inspector")
@pytest.mark.parametrize("config", (True, False))
def test_serve_with_inspector(
Inspector: Mock, WorkerManager: Mock, mock_app: Mock, config: bool
):
mock_app.config.INSPECTOR = config
inspector = Mock()
Inspector.return_value = inspector
WorkerManager.return_value = WorkerManager
Sanic.serve(mock_app)
if config:
Inspector.assert_called_once()
WorkerManager.manage.assert_called_once_with(
"Inspector", inspector, {}, transient=False
)
else:
Inspector.assert_not_called()
WorkerManager.manage.assert_not_called()

View File

@ -13,7 +13,7 @@ allowlist_externals =
pytest pytest
coverage coverage
commands = commands =
pytest {posargs:tests --cov sanic} coverage run --source ./sanic -m pytest {posargs:tests}
- coverage combine --append - coverage combine --append
coverage report -m -i coverage report -m -i
coverage html -i coverage html -i
@ -43,7 +43,7 @@ markers =
[testenv:security] [testenv:security]
commands = commands =
bandit --recursive sanic --skip B404,B101 --exclude sanic/reloader_helpers.py bandit --recursive sanic --skip B404,B101
[testenv:docs] [testenv:docs]
platform = linux|linux2|darwin platform = linux|linux2|darwin
@ -54,4 +54,6 @@ commands =
[testenv:coverage] [testenv:coverage]
commands = commands =
pytest tests --cov=./sanic --cov-report=xml coverage run --source ./sanic -m pytest {posargs:tests}
- coverage combine --append
coverage xml -i