Compare commits

..

10 Commits

Author SHA1 Message Date
Adam Hopkins
f83553be9e Remove unused import 2022-12-12 15:12:47 +02:00
Adam Hopkins
0c3527b8b2 Cleanup typing 2022-12-12 12:20:55 +02:00
Adam Hopkins
232bbce1e0 Cleanup tests 2022-12-12 12:04:12 +02:00
Adam Hopkins
f034a31d29 Reorganize tests 2022-12-12 11:44:42 +02:00
Adam Hopkins
3536c0af27 Fix caps on worker prefix 2022-12-12 11:29:02 +02:00
Adam Hopkins
0e7ee94574 Add test 2022-12-12 11:26:29 +02:00
Adam Hopkins
4e4e2b036b Merge branch 'main' of github.com:sanic-org/sanic into monitor-restart 2022-12-12 09:41:09 +02:00
Adam Hopkins
7f682cea02 Add ordering config 2022-12-11 10:59:30 +02:00
Adam Hopkins
ae1669cd8f Merge branch 'main' of github.com:sanic-org/sanic into monitor-restart 2022-12-11 10:38:30 +02:00
Adam Hopkins
3c4c136090 Wait for new process to ACK before termination of old on restart 2022-12-08 14:46:45 +02:00
70 changed files with 609 additions and 1677 deletions

View File

@@ -20,7 +20,6 @@ jobs:
- { python-version: 3.8, tox-env: security}
- { python-version: 3.9, tox-env: security}
- { python-version: "3.10", tox-env: security}
- { python-version: "3.11", tox-env: security}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
config:
- {python-version: "3.10", tox-env: "docs"}
- {python-version: "3.8", tox-env: "docs"}
fail-fast: false

View File

@@ -16,7 +16,7 @@ jobs:
matrix:
os: [ubuntu-latest]
config:
- { python-version: "3.10", tox-env: lint}
- { python-version: 3.8, tox-env: lint}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

View File

@@ -1,47 +0,0 @@
name: Python 3.11 Tests
on:
pull_request:
branches:
- main
- "*LTS"
types: [opened, synchronize, reopened, ready_for_review]
jobs:
testPy311:
if: github.event.pull_request.draft == false
name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
# os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest]
config:
- {
python-version: "3.11",
tox-env: py311,
ignore-error-flake: "false",
command-timeout: "0",
}
- {
python-version: "3.11",
tox-env: py311-no-ext,
ignore-error-flake: "true",
command-timeout: "600000",
}
steps:
- name: Checkout the Repository
uses: actions/checkout@v2
id: checkout-branch
- name: Run Unit Tests
uses: harshanarayana/custom-actions@main
with:
python-version: ${{ matrix.config.python-version }}
test-infra-tool: tox
test-infra-version: latest
action: tests
test-additional-args: "-e=${{ matrix.config.tox-env }},-vv=''"
experimental-ignore-error: "${{ matrix.config.ignore-error-flake }}"
command-timeout: "${{ matrix.config.command-timeout }}"
test-failure-retry: "3"

View File

@@ -20,7 +20,6 @@ jobs:
- { python-version: 3.8, tox-env: type-checking}
- { python-version: 3.9, tox-env: type-checking}
- { python-version: "3.10", tox-env: type-checking}
- { python-version: "3.11", tox-env: type-checking}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

View File

@@ -19,7 +19,6 @@ jobs:
- { python-version: 3.8, tox-env: py38-no-ext }
- { python-version: 3.9, tox-env: py39-no-ext }
- { python-version: "3.10", tox-env: py310-no-ext }
- { python-version: "3.11", tox-env: py310-no-ext }
- { python-version: pypy-3.7, tox-env: pypy37-no-ext }
steps:

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
fail-fast: true
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python-version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- name: Checkout repository

View File

@@ -11,7 +11,7 @@ jobs:
strategy:
fail-fast: true
matrix:
python-version: ["3.10"]
python-version: ["3.8"]
steps:
- name: Checkout Repository

View File

@@ -313,8 +313,8 @@ Version 21.3.0
`#2074 <https://github.com/sanic-org/sanic/pull/2074>`_
Performance adjustments in ``handle_request_``
Version 20.12.3
---------------
Version 20.12.3 🔷
------------------
`Current LTS version`
@@ -350,8 +350,8 @@ Version 19.12.5
`#2027 <https://github.com/sanic-org/sanic/pull/2027>`_
Remove old chardet requirement, add in hard multidict requirement
Version 20.12.0
---------------
Version 20.12.0 🔹
-----------------
**Features**

View File

@@ -7,15 +7,14 @@ Sanic releases long term support release once a year in December. LTS releases r
| Version | LTS | Supported |
| ------- | ------------- | ----------------------- |
| 22.12 | until 2024-12 | :white_check_mark: |
| 22.9 | | :x: |
| 22.9 | | :white_check_mark: |
| 22.6 | | :x: |
| 22.3 | | :x: |
| 21.12 | until 2023-12 | :ballot_box_with_check: |
| 21.12 | until 2023-12 | :white_check_mark: |
| 21.9 | | :x: |
| 21.6 | | :x: |
| 21.3 | | :x: |
| 20.12 | | :x: |
| 20.12 | until 2022-12 | :ballot_box_with_check: |
| 20.9 | | :x: |
| 20.6 | | :x: |
| 20.3 | | :x: |

View File

@@ -1,7 +1,6 @@
📜 Changelog
============
.. mdinclude:: ./releases/22/22.12.md
.. mdinclude:: ./releases/22/22.9.md
.. mdinclude:: ./releases/22/22.6.md
.. mdinclude:: ./releases/22/22.3.md

View File

@@ -1,55 +0,0 @@
## Version 22.12.0 🔶
_Current version_
### Features
- [#2569](https://github.com/sanic-org/sanic/pull/2569) Add `JSONResponse` class with some convenient methods when updating a response object
- [#2598](https://github.com/sanic-org/sanic/pull/2598) Change `uvloop` requirement to `>=0.15.0`
- [#2609](https://github.com/sanic-org/sanic/pull/2609) Add compatibility with `websockets` v11.0
- [#2610](https://github.com/sanic-org/sanic/pull/2610) Kill server early on worker error
- Raise deadlock timeout to 30s
- [#2617](https://github.com/sanic-org/sanic/pull/2617) Scale number of running server workers
- [#2621](https://github.com/sanic-org/sanic/pull/2621) [#2634](https://github.com/sanic-org/sanic/pull/2634) Send `SIGKILL` on subsequent `ctrl+c` to force worker exit
- [#2622](https://github.com/sanic-org/sanic/pull/2622) Add API to restart all workers from the multiplexer
- [#2624](https://github.com/sanic-org/sanic/pull/2624) Default to `spawn` for all subprocesses unless specifically set:
```python
from sanic import Sanic
Sanic.start_method = "fork"
```
- [#2625](https://github.com/sanic-org/sanic/pull/2625) Filename normalisation of form-data/multipart file uploads
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector:
- Remote access to inspect running Sanic instances
- TLS support for encrypted calls to Inspector
- Authentication to Inspector with API key
- Ability to extend Inspector with custom commands
- [#2632](https://github.com/sanic-org/sanic/pull/2632) Control order of restart operations
- [#2633](https://github.com/sanic-org/sanic/pull/2633) Move reload interval to class variable
- [#2636](https://github.com/sanic-org/sanic/pull/2636) Add `priority` to `register_middleware` method
- [#2639](https://github.com/sanic-org/sanic/pull/2639) Add `unquote` to `add_route` method
- [#2640](https://github.com/sanic-org/sanic/pull/2640) ASGI websockets to receive `text` or `bytes`
### Bugfixes
- [#2607](https://github.com/sanic-org/sanic/pull/2607) Force socket shutdown before close to allow rebinding
- [#2590](https://github.com/sanic-org/sanic/pull/2590) Use actual `StrEnum` in Python 3.11+
- [#2615](https://github.com/sanic-org/sanic/pull/2615) Ensure middleware executes only once per request timeout
- [#2627](https://github.com/sanic-org/sanic/pull/2627) Crash ASGI application on lifespan failure
- [#2635](https://github.com/sanic-org/sanic/pull/2635) Resolve error with low-level server creation on Windows
### Deprecations and Removals
- [#2608](https://github.com/sanic-org/sanic/pull/2608) [#2630](https://github.com/sanic-org/sanic/pull/2630) Signal conditions and triggers saved on `signal.extra`
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector
- 🚨 *BREAKING CHANGE*: Moves the Inspector to a Sanic app from a simple TCP socket with a custom protocol
- *DEPRECATE*: The `--inspect*` commands have been deprecated in favor of `inspect ...` commands
- [#2628](https://github.com/sanic-org/sanic/pull/2628) Replace deprecated `distutils.strtobool`
### Developer infrastructure
- [#2612](https://github.com/sanic-org/sanic/pull/2612) Add CI testing for Python 3.11

View File

@@ -1,33 +1,6 @@
## Version 22.9.1
## Version 22.9.0 🔶
### Features
- [#2585](https://github.com/sanic-org/sanic/pull/2585) Improved error message when no applications have been registered
### Bugfixes
- [#2578](https://github.com/sanic-org/sanic/pull/2578) Add certificate loader for in process certificate creation
- [#2591](https://github.com/sanic-org/sanic/pull/2591) Do not use sentinel identity for `spawn` compatibility
- [#2592](https://github.com/sanic-org/sanic/pull/2592) Fix properties in nested blueprint groups
- [#2595](https://github.com/sanic-org/sanic/pull/2595) Introduce sleep interval on new worker reloader
### Deprecations and Removals
### Developer infrastructure
- [#2588](https://github.com/sanic-org/sanic/pull/2588) Markdown templates on issue forms
### Improved Documentation
- [#2556](https://github.com/sanic-org/sanic/pull/2556) v22.9 documentation
- [#2582](https://github.com/sanic-org/sanic/pull/2582) Cleanup documentation on Windows support
## Version 22.9.0
_Current version_
### Features

View File

@@ -22,6 +22,5 @@ module = [
"httptools.*",
"trustme.*",
"sanic_routing.*",
"aioquic.*",
]
ignore_missing_imports = true

View File

@@ -1 +1 @@
__version__ = "22.12.0"
__version__ = "22.12.0a0"

View File

@@ -61,7 +61,7 @@ from sanic.exceptions import (
URLBuildError,
)
from sanic.handlers import ErrorHandler
from sanic.helpers import Default, _default
from sanic.helpers import Default
from sanic.http import Stage
from sanic.log import (
LOGGING_CONFIG_DEFAULTS,
@@ -69,7 +69,6 @@ from sanic.log import (
error_logger,
logger,
)
from sanic.middleware import Middleware, MiddlewareLocation
from sanic.mixins.listeners import ListenerEvent
from sanic.mixins.startup import StartupMixin
from sanic.models.futures import (
@@ -141,7 +140,6 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
"configure_logging",
"ctx",
"error_handler",
"inspector_class",
"go_fast",
"listeners",
"multiplexer",
@@ -164,7 +162,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
def __init__(
self,
name: Optional[str] = None,
name: str = None,
config: Optional[Config] = None,
ctx: Optional[Any] = None,
router: Optional[Router] = None,
@@ -178,7 +176,6 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
dumps: Optional[Callable[..., AnyStr]] = None,
loads: Optional[Callable[..., Any]] = None,
inspector: bool = False,
inspector_class: Optional[Type[Inspector]] = None,
) -> None:
super().__init__(name=name)
# logging
@@ -214,7 +211,6 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
self.configure_logging: bool = configure_logging
self.ctx: Any = ctx or SimpleNamespace()
self.error_handler: ErrorHandler = error_handler or ErrorHandler()
self.inspector_class: Type[Inspector] = inspector_class or Inspector
self.listeners: Dict[str, List[ListenerType[Any]]] = defaultdict(list)
self.named_request_middleware: Dict[str, Deque[MiddlewareType]] = {}
self.named_response_middleware: Dict[str, Deque[MiddlewareType]] = {}
@@ -295,12 +291,8 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
return listener
def register_middleware(
self,
middleware: Union[MiddlewareType, Middleware],
attach_to: str = "request",
*,
priority: Union[Default, int] = _default,
) -> Union[MiddlewareType, Middleware]:
self, middleware: MiddlewareType, attach_to: str = "request"
) -> MiddlewareType:
"""
Register an application level middleware that will be attached
to all the API URLs registered under this application.
@@ -316,37 +308,19 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
**response** - Invoke before the response is returned back
:return: decorated method
"""
retval = middleware
location = MiddlewareLocation[attach_to.upper()]
if not isinstance(middleware, Middleware):
middleware = Middleware(
middleware,
location=location,
priority=priority if isinstance(priority, int) else 0,
)
elif middleware.priority != priority and isinstance(priority, int):
middleware = Middleware(
middleware.func,
location=middleware.location,
priority=priority,
)
if location is MiddlewareLocation.REQUEST:
if attach_to == "request":
if middleware not in self.request_middleware:
self.request_middleware.append(middleware)
if location is MiddlewareLocation.RESPONSE:
if attach_to == "response":
if middleware not in self.response_middleware:
self.response_middleware.appendleft(middleware)
return retval
return middleware
def register_named_middleware(
self,
middleware: MiddlewareType,
route_names: Iterable[str],
attach_to: str = "request",
*,
priority: Union[Default, int] = _default,
):
"""
Method for attaching middleware to specific routes. This is mainly an
@@ -360,35 +334,19 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
defaults to "request"
:type attach_to: str, optional
"""
retval = middleware
location = MiddlewareLocation[attach_to.upper()]
if not isinstance(middleware, Middleware):
middleware = Middleware(
middleware,
location=location,
priority=priority if isinstance(priority, int) else 0,
)
elif middleware.priority != priority and isinstance(priority, int):
middleware = Middleware(
middleware.func,
location=middleware.location,
priority=priority,
)
if location is MiddlewareLocation.REQUEST:
if attach_to == "request":
for _rn in route_names:
if _rn not in self.named_request_middleware:
self.named_request_middleware[_rn] = deque()
if middleware not in self.named_request_middleware[_rn]:
self.named_request_middleware[_rn].append(middleware)
if location is MiddlewareLocation.RESPONSE:
if attach_to == "response":
for _rn in route_names:
if _rn not in self.named_response_middleware:
self.named_response_middleware[_rn] = deque()
if middleware not in self.named_response_middleware[_rn]:
self.named_response_middleware[_rn].appendleft(middleware)
return retval
return middleware
def _apply_exception_handler(
self,
@@ -1573,7 +1531,6 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
self.state.is_started = True
def ack(self):
if hasattr(self, "multiplexer"):
self.multiplexer.ack()

View File

@@ -9,7 +9,7 @@ from sanic.compat import Header
from sanic.exceptions import ServerError
from sanic.helpers import Default
from sanic.http import Stage
from sanic.log import error_logger, logger
from sanic.log import logger
from sanic.models.asgi import ASGIReceive, ASGIScope, ASGISend, MockTransport
from sanic.request import Request
from sanic.response import BaseHTTPResponse
@@ -85,26 +85,12 @@ class Lifespan:
) -> None:
message = await receive()
if message["type"] == "lifespan.startup":
try:
await self.startup()
except Exception as e:
error_logger.exception(e)
await send(
{"type": "lifespan.startup.failed", "message": str(e)}
)
else:
await send({"type": "lifespan.startup.complete"})
message = await receive()
if message["type"] == "lifespan.shutdown":
try:
await self.shutdown()
except Exception as e:
error_logger.exception(e)
await send(
{"type": "lifespan.shutdown.failed", "message": str(e)}
)
else:
await send({"type": "lifespan.shutdown.complete"})

View File

@@ -1,6 +1,6 @@
import re
from typing import Any, Optional
from typing import Any
from sanic.base.meta import SanicMeta
from sanic.exceptions import SanicException
@@ -24,9 +24,7 @@ class BaseSanic(
):
__slots__ = ("name",)
def __init__(
self, name: Optional[str] = None, *args: Any, **kwargs: Any
) -> None:
def __init__(self, name: str = None, *args: Any, **kwargs: Any) -> None:
class_name = self.__class__.__name__
if name is None:

View File

@@ -442,7 +442,7 @@ class Blueprint(BaseSanic):
events.add(signal.ctx.event)
return asyncio.wait(
[asyncio.create_task(event.wait()) for event in events],
[event.wait() for event in events],
return_when=asyncio.FIRST_COMPLETED,
timeout=timeout,
)

View File

@@ -3,21 +3,23 @@ import os
import shutil
import sys
from argparse import Namespace
from argparse import ArgumentParser, RawTextHelpFormatter
from functools import partial
from textwrap import indent
from typing import List, Union, cast
from typing import Any, List, Union
from sanic.app import Sanic
from sanic.application.logo import get_logo
from sanic.cli.arguments import Group
from sanic.cli.base import SanicArgumentParser, SanicHelpFormatter
from sanic.cli.inspector import make_inspector_parser
from sanic.cli.inspector_client import InspectorClient
from sanic.log import Colors, error_logger
from sanic.log import error_logger
from sanic.worker.inspector import inspect
from sanic.worker.loader import AppLoader
class SanicArgumentParser(ArgumentParser):
...
class SanicCLI:
DESCRIPTION = indent(
f"""
@@ -44,7 +46,7 @@ Or, a path to a directory to run as a simple HTTP server:
self.parser = SanicArgumentParser(
prog="sanic",
description=self.DESCRIPTION,
formatter_class=lambda prog: SanicHelpFormatter(
formatter_class=lambda prog: RawTextHelpFormatter(
prog,
max_help_position=36 if width > 96 else 24,
indent_increment=4,
@@ -56,27 +58,16 @@ Or, a path to a directory to run as a simple HTTP server:
self.main_process = (
os.environ.get("SANIC_RELOADER_PROCESS", "") != "true"
)
self.args: Namespace = Namespace()
self.args: List[Any] = []
self.groups: List[Group] = []
self.inspecting = False
def attach(self):
if len(sys.argv) > 1 and sys.argv[1] == "inspect":
self.inspecting = True
self.parser.description = get_logo(True)
make_inspector_parser(self.parser)
return
for group in Group._registry:
instance = group.create(self.parser)
instance.attach()
self.groups.append(instance)
def run(self, parse_args=None):
if self.inspecting:
self._inspector()
return
legacy_version = False
if not parse_args:
# This is to provide backwards compat -v to display version
@@ -95,21 +86,36 @@ Or, a path to a directory to run as a simple HTTP server:
self.args = self.parser.parse_args(args=parse_args)
self._precheck()
app_loader = AppLoader(
self.args.module, self.args.factory, self.args.simple, self.args
self.args.module,
self.args.factory,
self.args.simple,
self.args,
)
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
self._inspector_legacy(app_loader)
return
try:
app = self._get_app(app_loader)
kwargs = self._build_run_kwargs()
except ValueError as e:
error_logger.exception(f"Failed to run app: {e}")
else:
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
else:
for http_version in self.args.http:
app.prepare(**kwargs, version=http_version)
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
action = self.args.trigger or (
"raw" if self.args.inspect_raw else "pretty"
)
inspect(
app.config.INSPECTOR_HOST,
app.config.INSPECTOR_PORT,
action,
)
del os.environ["SANIC_IGNORE_PRODUCTION_WARNING"]
return
if self.args.single:
serve = Sanic.serve_single
elif self.args.legacy:
@@ -118,64 +124,6 @@ Or, a path to a directory to run as a simple HTTP server:
serve = partial(Sanic.serve, app_loader=app_loader)
serve(app)
def _inspector_legacy(self, app_loader: AppLoader):
host = port = None
module = cast(str, self.args.module)
if ":" in module:
maybe_host, maybe_port = module.rsplit(":", 1)
if maybe_port.isnumeric():
host, port = maybe_host, int(maybe_port)
if not host:
app = self._get_app(app_loader)
host, port = app.config.INSPECTOR_HOST, app.config.INSPECTOR_PORT
action = self.args.trigger or "info"
InspectorClient(
str(host), int(port or 6457), False, self.args.inspect_raw, ""
).do(action)
sys.stdout.write(
f"\n{Colors.BOLD}{Colors.YELLOW}WARNING:{Colors.END} "
"You are using the legacy CLI command that will be removed in "
f"{Colors.RED}v23.3{Colors.END}. See "
"https://sanic.dev/en/guide/release-notes/v22.12.html"
"#deprecations-and-removals or checkout the new "
"style commands:\n\n\t"
f"{Colors.YELLOW}sanic inspect --help{Colors.END}\n"
)
def _inspector(self):
args = sys.argv[2:]
self.args, unknown = self.parser.parse_known_args(args=args)
if unknown:
for arg in unknown:
if arg.startswith("--"):
try:
key, value = arg.split("=")
key = key.lstrip("-")
except ValueError:
value = False if arg.startswith("--no-") else True
key = (
arg.replace("--no-", "")
.lstrip("-")
.replace("-", "_")
)
setattr(self.args, key, value)
kwargs = {**self.args.__dict__}
host = kwargs.pop("host")
port = kwargs.pop("port")
secure = kwargs.pop("secure")
raw = kwargs.pop("raw")
action = kwargs.pop("action") or "info"
api_key = kwargs.pop("api_key")
positional = kwargs.pop("positional", None)
if action == "<custom>" and positional:
action = positional[0]
if len(positional) > 1:
kwargs["args"] = positional[1:]
InspectorClient(host, port, secure, raw, api_key).do(action, **kwargs)
def _precheck(self):
# Custom TLS mismatch handling for better diagnostics
if self.main_process and (

View File

@@ -1,35 +0,0 @@
from argparse import (
SUPPRESS,
Action,
ArgumentParser,
RawTextHelpFormatter,
_SubParsersAction,
)
from typing import Any
class SanicArgumentParser(ArgumentParser):
def _check_value(self, action: Action, value: Any) -> None:
if isinstance(action, SanicSubParsersAction):
return
super()._check_value(action, value)
class SanicHelpFormatter(RawTextHelpFormatter):
def add_usage(self, usage, actions, groups, prefix=None):
if not usage:
usage = SUPPRESS
# Add one linebreak, but not two
self.add_text("\x1b[1A")
super().add_usage(usage, actions, groups, prefix)
class SanicSubParsersAction(_SubParsersAction):
def __call__(self, parser, namespace, values, option_string=None):
self._name_parser_map
parser_name = values[0]
if parser_name not in self._name_parser_map:
self._name_parser_map[parser_name] = parser
values = ["<custom>", *values]
super().__call__(parser, namespace, values, option_string)

View File

@@ -1,105 +0,0 @@
from argparse import ArgumentParser
from sanic.application.logo import get_logo
from sanic.cli.base import SanicHelpFormatter, SanicSubParsersAction
def _add_shared(parser: ArgumentParser) -> None:
parser.add_argument(
"--host",
"-H",
default="localhost",
help="Inspector host address [default 127.0.0.1]",
)
parser.add_argument(
"--port",
"-p",
default=6457,
type=int,
help="Inspector port [default 6457]",
)
parser.add_argument(
"--secure",
"-s",
action="store_true",
help="Whether to access the Inspector via TLS encryption",
)
parser.add_argument("--api-key", "-k", help="Inspector authentication key")
parser.add_argument(
"--raw",
action="store_true",
help="Whether to output the raw response information",
)
class InspectorSubParser(ArgumentParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
_add_shared(self)
if not self.description:
self.description = ""
self.description = get_logo(True) + self.description
def make_inspector_parser(parser: ArgumentParser) -> None:
_add_shared(parser)
subparsers = parser.add_subparsers(
action=SanicSubParsersAction,
dest="action",
description=(
"Run one or none of the below subcommands. Using inspect without "
"a subcommand will fetch general information about the state "
"of the application instance.\n\n"
"Or, you can optionally follow inspect with a subcommand. "
"If you have created a custom "
"Inspector instance, then you can run custom commands. See "
"https://sanic.dev/en/guide/deployment/inspector.html "
"for more details."
),
title=" Subcommands",
parser_class=InspectorSubParser,
)
reloader = subparsers.add_parser(
"reload",
help="Trigger a reload of the server workers",
formatter_class=SanicHelpFormatter,
)
reloader.add_argument(
"--zero-downtime",
action="store_true",
help=(
"Whether to wait for the new process to be online before "
"terminating the old"
),
)
subparsers.add_parser(
"shutdown",
help="Shutdown the application and all processes",
formatter_class=SanicHelpFormatter,
)
scale = subparsers.add_parser(
"scale",
help="Scale the number of workers",
formatter_class=SanicHelpFormatter,
)
scale.add_argument(
"replicas",
type=int,
help="Number of workers requested",
)
custom = subparsers.add_parser(
"<custom>",
help="Run a custom command",
description=(
"keyword arguments:\n When running a custom command, you can "
"add keyword arguments by appending them to your command\n\n"
"\tsanic inspect foo --one=1 --two=2"
),
formatter_class=SanicHelpFormatter,
)
custom.add_argument(
"positional",
nargs="*",
help="Add one or more non-keyword args to your custom command",
)

View File

@@ -1,119 +0,0 @@
from __future__ import annotations
import sys
from http.client import RemoteDisconnected
from textwrap import indent
from typing import Any, Dict, Optional
from urllib.error import URLError
from urllib.request import Request as URequest
from urllib.request import urlopen
from sanic.application.logo import get_logo
from sanic.application.motd import MOTDTTY
from sanic.log import Colors
try: # no cov
from ujson import dumps, loads
except ModuleNotFoundError: # no cov
from json import dumps, loads # type: ignore
class InspectorClient:
def __init__(
self,
host: str,
port: int,
secure: bool,
raw: bool,
api_key: Optional[str],
) -> None:
self.scheme = "https" if secure else "http"
self.host = host
self.port = port
self.raw = raw
self.api_key = api_key
for scheme in ("http", "https"):
full = f"{scheme}://"
if self.host.startswith(full):
self.scheme = scheme
self.host = self.host[len(full) :] # noqa E203
def do(self, action: str, **kwargs: Any) -> None:
if action == "info":
self.info()
return
result = self.request(action, **kwargs).get("result")
if result:
out = (
dumps(result)
if isinstance(result, (list, dict))
else str(result)
)
sys.stdout.write(out + "\n")
def info(self) -> None:
out = sys.stdout.write
response = self.request("", "GET")
if self.raw or not response:
return
data = response["result"]
display = data.pop("info")
extra = display.pop("extra", {})
display["packages"] = ", ".join(display["packages"])
MOTDTTY(get_logo(), self.base_url, display, extra).display(
version=False,
action="Inspecting",
out=out,
)
for name, info in data["workers"].items():
info = "\n".join(
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
for key, value in info.items()
)
out(
"\n"
+ indent(
"\n".join(
[
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
info,
]
),
" ",
)
+ "\n"
)
def request(self, action: str, method: str = "POST", **kwargs: Any) -> Any:
url = f"{self.base_url}/{action}"
params: Dict[str, Any] = {"method": method, "headers": {}}
if kwargs:
params["data"] = dumps(kwargs).encode()
params["headers"]["content-type"] = "application/json"
if self.api_key:
params["headers"]["authorization"] = f"Bearer {self.api_key}"
request = URequest(url, **params)
try:
with urlopen(request) as response: # nosec B310
raw = response.read()
loaded = loads(raw)
if self.raw:
sys.stdout.write(dumps(loaded.get("result")) + "\n")
return {}
return loaded
except (URLError, RemoteDisconnected) as e:
sys.stderr.write(
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}{self.base_url}{Colors.END}\n"
"Either the application is not running, or it did not start "
f"an inspector instance.\n{e}\n"
)
sys.exit(1)
@property
def base_url(self):
return f"{self.scheme}://{self.host}:{self.port}"

View File

@@ -3,23 +3,11 @@ import os
import signal
import sys
from contextlib import contextmanager
from enum import Enum
from typing import Awaitable, Union
from typing import Awaitable
from multidict import CIMultiDict # type: ignore
from sanic.helpers import Default
if sys.version_info < (3, 8): # no cov
StartMethod = Union[Default, str]
else: # no cov
from typing import Literal
StartMethod = Union[
Default, Literal["fork"], Literal["forkserver"], Literal["spawn"]
]
OS_IS_WINDOWS = os.name == "nt"
UVLOOP_INSTALLED = False
@@ -31,6 +19,7 @@ try:
except ImportError:
pass
# Python 3.11 changed the way Enum formatting works for mixed-in types.
if sys.version_info < (3, 11, 0):
@@ -56,16 +45,6 @@ class UpperStrEnum(StrEnum):
return self.value
@contextmanager
def use_context(method: StartMethod):
from sanic import Sanic
orig = Sanic.start_method
Sanic.start_method = method
yield
Sanic.start_method = orig
def enable_windows_color_support():
import ctypes

View File

@@ -2,14 +2,13 @@ from __future__ import annotations
import sys
from abc import ABCMeta
from inspect import getmembers, isclass, isdatadescriptor
from os import environ
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Sequence, Union
from warnings import filterwarnings
from sanic.constants import LocalCertCreator
from sanic.constants import LocalCertCreator, RestartOrder
from sanic.errorpages import DEFAULT_FORMAT, check_error_format
from sanic.helpers import Default, _default
from sanic.http import Http
@@ -47,9 +46,6 @@ DEFAULT_CONFIG = {
"INSPECTOR": False,
"INSPECTOR_HOST": "localhost",
"INSPECTOR_PORT": 6457,
"INSPECTOR_TLS_KEY": _default,
"INSPECTOR_TLS_CERT": _default,
"INSPECTOR_API_KEY": "",
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
"KEEP_ALIVE": True,
"LOCAL_CERT_CREATOR": LocalCertCreator.AUTO,
@@ -67,6 +63,7 @@ DEFAULT_CONFIG = {
"REQUEST_MAX_SIZE": 100000000, # 100 megabytes
"REQUEST_TIMEOUT": 60, # 60 seconds
"RESPONSE_TIMEOUT": 60, # 60 seconds
"RESTART_ORDER": RestartOrder.SHUTDOWN_FIRST,
"TLS_CERT_PASSWORD": "",
"TOUCHUP": _default,
"USE_UVLOOP": _default,
@@ -76,7 +73,7 @@ DEFAULT_CONFIG = {
}
class DescriptorMeta(ABCMeta):
class DescriptorMeta(type):
def __init__(cls, *_):
cls.__setters__ = {name for name, _ in getmembers(cls, cls._is_setter)}
@@ -97,9 +94,6 @@ class Config(dict, metaclass=DescriptorMeta):
INSPECTOR: bool
INSPECTOR_HOST: str
INSPECTOR_PORT: int
INSPECTOR_TLS_KEY: Union[Path, str, Default]
INSPECTOR_TLS_CERT: Union[Path, str, Default]
INSPECTOR_API_KEY: str
KEEP_ALIVE_TIMEOUT: int
KEEP_ALIVE: bool
LOCAL_CERT_CREATOR: Union[str, LocalCertCreator]
@@ -117,6 +111,7 @@ class Config(dict, metaclass=DescriptorMeta):
REQUEST_MAX_SIZE: int
REQUEST_TIMEOUT: int
RESPONSE_TIMEOUT: int
RESTART_ORDER: Union[str, RestartOrder]
SERVER_NAME: str
TLS_CERT_PASSWORD: str
TOUCHUP: Union[Default, bool]
@@ -127,9 +122,7 @@ class Config(dict, metaclass=DescriptorMeta):
def __init__(
self,
defaults: Optional[
Dict[str, Union[str, bool, int, float, None]]
] = None,
defaults: Dict[str, Union[str, bool, int, float, None]] = None,
env_prefix: Optional[str] = SANIC_PREFIX,
keep_alive: Optional[bool] = None,
*,
@@ -203,6 +196,10 @@ class Config(dict, metaclass=DescriptorMeta):
self.LOCAL_CERT_CREATOR = LocalCertCreator[
self.LOCAL_CERT_CREATOR.upper()
]
elif attr == "RESTART_ORDER" and not isinstance(
self.RESTART_ORDER, RestartOrder
):
self.RESTART_ORDER = RestartOrder[self.RESTART_ORDER.upper()]
elif attr == "DEPRECATION_FILTER":
self._configure_warnings()

View File

@@ -21,6 +21,12 @@ class LocalCertCreator(UpperStrEnum):
MKCERT = auto()
class RestartOrder(UpperStrEnum):
SHUTDOWN_FIRST = auto()
STARTUP_FIRST = auto()
HTTP_METHODS = tuple(HTTPMethod.__members__.values())
SAFE_HTTP_METHODS = (HTTPMethod.GET, HTTPMethod.HEAD, HTTPMethod.OPTIONS)
IDEMPOTENT_HTTP_METHODS = (

View File

@@ -36,6 +36,14 @@ class ErrorHandler:
self.debug = False
self.base = base
@classmethod
def finalize(cls, *args, **kwargs):
deprecation(
"ErrorHandler.finalize is deprecated and no longer needed. "
"Please remove update your code to remove it. ",
22.12,
)
def _full_lookup(self, exception, route_name: Optional[str] = None):
return self.lookup(exception, route_name)

View File

@@ -71,6 +71,7 @@ class Http(Stream, metaclass=TouchUpMeta):
"request_body",
"request_bytes",
"request_bytes_left",
"request_max_size",
"response",
"response_func",
"response_size",

View File

@@ -19,7 +19,7 @@ class Stream:
request_max_size: Union[int, float]
__touchup__: Tuple[str, ...] = tuple()
__slots__ = ("request_max_size",)
__slots__ = ()
def respond(
self, response: BaseHTTPResponse

View File

@@ -24,14 +24,12 @@ def create_context(
certfile: Optional[str] = None,
keyfile: Optional[str] = None,
password: Optional[str] = None,
purpose: ssl.Purpose = ssl.Purpose.CLIENT_AUTH,
) -> ssl.SSLContext:
"""Create a context with secure crypto and HTTP/1.1 in protocols."""
context = ssl.create_default_context(purpose=purpose)
context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
context.minimum_version = ssl.TLSVersion.TLSv1_2
context.set_ciphers(":".join(CIPHERS_TLS12))
context.set_alpn_protocols(["http/1.1"])
if purpose is ssl.Purpose.CLIENT_AUTH:
context.sni_callback = server_name_callback
if certfile and keyfile:
context.load_cert_chain(certfile, keyfile, password)

View File

@@ -1,22 +1,10 @@
import logging
import sys
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict
from typing import Any, Dict
from warnings import warn
from sanic.compat import is_atty
# Python 3.11 changed the way Enum formatting works for mixed-in types.
if sys.version_info < (3, 11, 0):
class StrEnum(str, Enum):
pass
else:
if not TYPE_CHECKING:
from enum import StrEnum
from sanic.compat import StrEnum, is_atty
LOGGING_CONFIG_DEFAULTS: Dict[str, Any] = dict( # no cov

View File

@@ -32,9 +32,6 @@ class Middleware:
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def __hash__(self) -> int:
return hash(self.func)
def __repr__(self) -> str:
return (
f"{self.__class__.__name__}("

View File

@@ -218,7 +218,6 @@ class RouteMixin(metaclass=SanicMeta):
stream: bool = False,
version_prefix: str = "/v",
error_format: Optional[str] = None,
unquote: bool = False,
**ctx_kwargs: Any,
) -> RouteHandler:
"""A helper method to register class instance or
@@ -265,7 +264,6 @@ class RouteMixin(metaclass=SanicMeta):
name=name,
version_prefix=version_prefix,
error_format=error_format,
unquote=unquote,
**ctx_kwargs,
)(handler)
return handler

View File

@@ -20,7 +20,7 @@ class SignalMixin(metaclass=SanicMeta):
event: Union[str, Enum],
*,
apply: bool = True,
condition: Optional[Dict[str, Any]] = None,
condition: Dict[str, Any] = None,
exclusive: bool = True,
) -> Callable[[SignalHandler], SignalHandler]:
"""
@@ -64,7 +64,7 @@ class SignalMixin(metaclass=SanicMeta):
self,
handler: Optional[Callable[..., Any]],
event: str,
condition: Optional[Dict[str, Any]] = None,
condition: Dict[str, Any] = None,
exclusive: bool = True,
):
if not handler:

View File

@@ -27,7 +27,6 @@ from typing import (
Callable,
Dict,
List,
Mapping,
Optional,
Set,
Tuple,
@@ -41,9 +40,10 @@ from sanic.application.logo import get_logo
from sanic.application.motd import MOTD
from sanic.application.state import ApplicationServerInfo, Mode, ServerStage
from sanic.base.meta import SanicMeta
from sanic.compat import OS_IS_WINDOWS, StartMethod, is_atty
from sanic.compat import OS_IS_WINDOWS, is_atty
from sanic.constants import RestartOrder
from sanic.exceptions import ServerKilled
from sanic.helpers import Default, _default
from sanic.helpers import Default
from sanic.http.constants import HTTP
from sanic.http.tls import get_ssl_context, process_to_context
from sanic.http.tls.context import SanicSSLContext
@@ -59,6 +59,7 @@ from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.protocols.websocket_protocol import WebSocketProtocol
from sanic.server.runners import serve, serve_multiple, serve_single
from sanic.server.socket import configure_socket, remove_unix_socket
from sanic.worker.inspector import Inspector
from sanic.worker.loader import AppLoader
from sanic.worker.manager import WorkerManager
from sanic.worker.multiplexer import WorkerMultiplexer
@@ -88,7 +89,6 @@ class StartupMixin(metaclass=SanicMeta):
state: ApplicationState
websocket_enabled: bool
multiplexer: WorkerMultiplexer
start_method: StartMethod = _default
def setup_loop(self):
if not self.asgi:
@@ -126,7 +126,7 @@ class StartupMixin(metaclass=SanicMeta):
register_sys_signals: bool = True,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
loop: Optional[AbstractEventLoop] = None,
loop: AbstractEventLoop = None,
reload_dir: Optional[Union[List[str], str]] = None,
noisy_exceptions: Optional[bool] = None,
motd: bool = True,
@@ -225,7 +225,7 @@ class StartupMixin(metaclass=SanicMeta):
register_sys_signals: bool = True,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
loop: Optional[AbstractEventLoop] = None,
loop: AbstractEventLoop = None,
reload_dir: Optional[Union[List[str], str]] = None,
noisy_exceptions: Optional[bool] = None,
motd: bool = True,
@@ -355,12 +355,12 @@ class StartupMixin(metaclass=SanicMeta):
debug: bool = False,
ssl: Union[None, SSLContext, dict, str, list, tuple] = None,
sock: Optional[socket] = None,
protocol: Optional[Type[Protocol]] = None,
protocol: Type[Protocol] = None,
backlog: int = 100,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
return_asyncio_server: bool = False,
asyncio_server_kwargs: Optional[Dict[str, Any]] = None,
asyncio_server_kwargs: Dict[str, Any] = None,
noisy_exceptions: Optional[bool] = None,
) -> Optional[AsyncioServer]:
"""
@@ -481,7 +481,7 @@ class StartupMixin(metaclass=SanicMeta):
sock: Optional[socket] = None,
unix: Optional[str] = None,
workers: int = 1,
loop: Optional[AbstractEventLoop] = None,
loop: AbstractEventLoop = None,
protocol: Type[Protocol] = HttpProtocol,
backlog: int = 100,
register_sys_signals: bool = True,
@@ -692,18 +692,13 @@ class StartupMixin(metaclass=SanicMeta):
def should_auto_reload(cls) -> bool:
return any(app.state.auto_reload for app in cls._app_registry.values())
@classmethod
def _get_startup_method(cls) -> str:
return (
cls.start_method
if not isinstance(cls.start_method, Default)
else "spawn"
)
@classmethod
def _get_context(cls) -> BaseContext:
method = cls._get_startup_method()
logger.debug("Creating multiprocessing context using '%s'", method)
method = (
"spawn"
if "linux" not in sys.platform or cls.should_auto_reload()
else "fork"
)
return get_context(method)
@classmethod
@@ -769,7 +764,7 @@ class StartupMixin(metaclass=SanicMeta):
]
primary_server_info.settings["run_multiple"] = True
monitor_sub, monitor_pub = Pipe(True)
worker_state: Mapping[str, Any] = sync_manager.dict()
worker_state: Dict[str, Any] = sync_manager.dict()
kwargs: Dict[str, Any] = {
**primary_server_info.settings,
"monitor_publisher": monitor_pub,
@@ -820,12 +815,13 @@ class StartupMixin(metaclass=SanicMeta):
cls._get_context(),
(monitor_pub, monitor_sub),
worker_state,
cast(RestartOrder, primary.config.RESTART_ORDER),
)
if cls.should_auto_reload():
reload_dirs: Set[Path] = primary.state.reload_dirs.union(
*(app.state.reload_dirs for app in apps)
)
reloader = Reloader(monitor_pub, 0, reload_dirs, app_loader)
reloader = Reloader(monitor_pub, 1.0, reload_dirs, app_loader)
manager.manage("Reloader", reloader, {}, transient=False)
inspector = None
@@ -841,15 +837,12 @@ class StartupMixin(metaclass=SanicMeta):
"packages": [sanic_version, *packages],
"extra": extra,
}
inspector = primary.inspector_class(
inspector = Inspector(
monitor_pub,
app_info,
worker_state,
primary.config.INSPECTOR_HOST,
primary.config.INSPECTOR_PORT,
primary.config.INSPECTOR_API_KEY,
primary.config.INSPECTOR_TLS_KEY,
primary.config.INSPECTOR_TLS_CERT,
)
manager.manage("Inspector", inspector, {}, transient=False)

View File

@@ -27,7 +27,6 @@ if TYPE_CHECKING:
from sanic.app import Sanic
import email.utils
import unicodedata
import uuid
from collections import defaultdict
@@ -1085,16 +1084,6 @@ def parse_multipart_form(body, boundary):
form_parameters["filename*"]
)
file_name = unquote(value, encoding=encoding)
# Normalize to NFC (Apple MacOS/iOS send NFD)
# Notes:
# - No effect for Windows, Linux or Android clients which
# already send NFC
# - Python open() is tricky (creates files in NFC no matter
# which form you use)
if file_name is not None:
file_name = unicodedata.normalize("NFC", file_name)
elif form_header_field == "content-type":
content_type = form_header_value
content_charset = form_parameters.get("charset", "utf-8")

View File

@@ -1,11 +1,11 @@
import asyncio
import sys
from distutils.util import strtobool
from os import getenv
from sanic.compat import OS_IS_WINDOWS
from sanic.log import error_logger
from sanic.utils import str_to_bool
def try_use_uvloop() -> None:
@@ -35,7 +35,7 @@ def try_use_uvloop() -> None:
)
return
uvloop_install_removed = str_to_bool(getenv("SANIC_NO_UVLOOP", "no"))
uvloop_install_removed = strtobool(getenv("SANIC_NO_UVLOOP", "no"))
if uvloop_install_removed:
error_logger.info(
"You are requesting to run Sanic using uvloop, but the "

View File

@@ -200,7 +200,7 @@ def _serve_http_1(
asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {}
)
if OS_IS_WINDOWS and sock:
if OS_IS_WINDOWS:
pid = os.getpid()
sock = sock.share(pid)
sock = socket.fromshare(sock)
@@ -229,7 +229,6 @@ def _serve_http_1(
loop.run_until_complete(app._startup())
loop.run_until_complete(app._server_event("init", "before"))
app.ack()
try:
http_server = loop.run_until_complete(server_coroutine)
@@ -307,7 +306,6 @@ def _serve_http_3(
server = AsyncioServer(app, loop, coro, [])
loop.run_until_complete(server.startup())
loop.run_until_complete(server.before_start())
app.ack()
loop.run_until_complete(server)
_setup_system_signals(app, run_multiple, register_sys_signals, loop)
loop.run_until_complete(server.after_start())

View File

@@ -9,10 +9,8 @@ from typing import (
Union,
)
from sanic.exceptions import InvalidUsage
ASGIMessage = MutableMapping[str, Any]
ASIMessage = MutableMapping[str, Any]
class WebSocketConnection:
@@ -27,8 +25,8 @@ class WebSocketConnection:
def __init__(
self,
send: Callable[[ASGIMessage], Awaitable[None]],
receive: Callable[[], Awaitable[ASGIMessage]],
send: Callable[[ASIMessage], Awaitable[None]],
receive: Callable[[], Awaitable[ASIMessage]],
subprotocols: Optional[List[str]] = None,
) -> None:
self._send = send
@@ -49,13 +47,7 @@ class WebSocketConnection:
message = await self._receive()
if message["type"] == "websocket.receive":
try:
return message["text"]
except KeyError:
try:
return message["bytes"].decode()
except KeyError:
raise InvalidUsage("Bad ASGI message received")
elif message["type"] == "websocket.disconnect":
pass

View File

@@ -154,7 +154,9 @@ class SignalRouter(BaseRouter):
try:
for signal in signals:
params.pop("__trigger__", None)
requirements = signal.extra.requirements
requirements = getattr(
signal.handler, "__requirements__", None
)
if (
(condition is None and signal.ctx.exclusive is False)
or (condition is None and not requirements)
@@ -217,13 +219,8 @@ class SignalRouter(BaseRouter):
if not trigger:
event = ".".join([*parts[:2], "<__trigger__>"])
try:
# Attaching __requirements__ and __trigger__ to the handler
# is deprecated and will be removed in v23.6.
handler.__requirements__ = condition # type: ignore
handler.__trigger__ = trigger # type: ignore
except AttributeError:
pass
signal = super().add(
event,
@@ -235,7 +232,6 @@ class SignalRouter(BaseRouter):
signal.ctx.exclusive = exclusive
signal.ctx.trigger = trigger
signal.ctx.definition = event_definition
signal.extra.requirements = condition
return cast(Signal, signal)

View File

@@ -1,18 +0,0 @@
from enum import IntEnum, auto
from sanic.compat import UpperStrEnum
class RestartOrder(UpperStrEnum):
SHUTDOWN_FIRST = auto()
STARTUP_FIRST = auto()
class ProcessState(IntEnum):
IDLE = auto()
RESTARTING = auto()
STARTING = auto()
STARTED = auto()
ACKED = auto()
JOINED = auto()
TERMINATED = auto()

View File

@@ -1,17 +1,23 @@
from __future__ import annotations
import sys
from datetime import datetime
from inspect import isawaitable
from multiprocessing.connection import Connection
from os import environ
from pathlib import Path
from typing import Any, Dict, Mapping, Union
from signal import SIGINT, SIGTERM
from signal import signal as signal_func
from socket import AF_INET, SOCK_STREAM, socket, timeout
from textwrap import indent
from typing import Any, Dict
from sanic.exceptions import Unauthorized
from sanic.helpers import Default
from sanic.log import logger
from sanic.request import Request
from sanic.response import json
from sanic.application.logo import get_logo
from sanic.application.motd import MOTDTTY
from sanic.log import Colors, error_logger, logger
from sanic.server.socket import configure_socket
try: # no cov
from ujson import dumps, loads
except ModuleNotFoundError: # no cov
from json import dumps, loads # type: ignore
class Inspector:
@@ -19,105 +25,118 @@ class Inspector:
self,
publisher: Connection,
app_info: Dict[str, Any],
worker_state: Mapping[str, Any],
worker_state: Dict[str, Any],
host: str,
port: int,
api_key: str,
tls_key: Union[Path, str, Default],
tls_cert: Union[Path, str, Default],
):
self._publisher = publisher
self.run = True
self.app_info = app_info
self.worker_state = worker_state
self.host = host
self.port = port
self.api_key = api_key
self.tls_key = tls_key
self.tls_cert = tls_cert
def __call__(self, run=True, **_) -> Inspector:
from sanic import Sanic
self.app = Sanic("Inspector")
self._setup()
if run:
self.app.run(
host=self.host,
port=self.port,
single_process=True,
ssl={"key": self.tls_key, "cert": self.tls_cert}
if not isinstance(self.tls_key, Default)
and not isinstance(self.tls_cert, Default)
else None,
def __call__(self) -> None:
sock = configure_socket(
{"host": self.host, "port": self.port, "unix": None, "backlog": 1}
)
return self
assert sock
signal_func(SIGINT, self.stop)
signal_func(SIGTERM, self.stop)
def _setup(self):
self.app.get("/")(self._info)
self.app.post("/<action:str>")(self._action)
if self.api_key:
self.app.on_request(self._authentication)
environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
logger.info(f"Inspector started on: {sock.getsockname()}")
sock.settimeout(0.5)
try:
while self.run:
try:
conn, _ = sock.accept()
except timeout:
continue
else:
action = conn.recv(64)
if action == b"reload":
conn.send(b"\n")
self.reload()
elif action == b"shutdown":
conn.send(b"\n")
self.shutdown()
else:
data = dumps(self.state_to_json())
conn.send(data.encode())
conn.close()
finally:
logger.debug("Inspector closing")
sock.close()
def _authentication(self, request: Request) -> None:
if request.token != self.api_key:
raise Unauthorized("Bad API key")
def stop(self, *_):
self.run = False
async def _action(self, request: Request, action: str):
logger.info("Incoming inspector action: %s", action)
output: Any = None
method = getattr(self, action, None)
if method:
kwargs = {}
if request.body:
kwargs = request.json
args = kwargs.pop("args", ())
output = method(*args, **kwargs)
if isawaitable(output):
output = await output
return await self._respond(request, output)
async def _info(self, request: Request):
return await self._respond(request, self._state_to_json())
async def _respond(self, request: Request, output: Any):
name = request.match_info.get("action", "info")
return json(
{"meta": {"action": name}, "result": output},
escape_forward_slashes=False,
)
def _state_to_json(self) -> Dict[str, Any]:
def state_to_json(self):
output = {"info": self.app_info}
output["workers"] = self._make_safe(dict(self.worker_state))
output["workers"] = self.make_safe(dict(self.worker_state))
return output
def reload(self):
message = "__ALL_PROCESSES__:"
self._publisher.send(message)
def shutdown(self):
message = "__TERMINATE__"
self._publisher.send(message)
@staticmethod
def _make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
def make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
for key, value in obj.items():
if isinstance(value, dict):
obj[key] = Inspector._make_safe(value)
obj[key] = Inspector.make_safe(value)
elif isinstance(value, datetime):
obj[key] = value.isoformat()
return obj
def reload(self, zero_downtime: bool = False) -> None:
message = "__ALL_PROCESSES__:"
if zero_downtime:
message += ":STARTUP_FIRST"
self._publisher.send(message)
def scale(self, replicas) -> str:
num_workers = 1
if replicas:
num_workers = int(replicas)
log_msg = f"Scaling to {num_workers}"
logger.info(log_msg)
message = f"__SCALE__:{num_workers}"
self._publisher.send(message)
return log_msg
def shutdown(self) -> None:
message = "__TERMINATE__"
self._publisher.send(message)
def inspect(host: str, port: int, action: str):
out = sys.stdout.write
with socket(AF_INET, SOCK_STREAM) as sock:
try:
sock.connect((host, port))
except ConnectionRefusedError:
error_logger.error(
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}{(host, port)}{Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance."
)
sock.close()
sys.exit(1)
sock.sendall(action.encode())
data = sock.recv(4096)
if action == "raw":
out(data.decode())
elif action == "pretty":
loaded = loads(data)
display = loaded.pop("info")
extra = display.pop("extra", {})
display["packages"] = ", ".join(display["packages"])
MOTDTTY(get_logo(), f"{host}:{port}", display, extra).display(
version=False,
action="Inspecting",
out=out,
)
for name, info in loaded["workers"].items():
info = "\n".join(
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
for key, value in info.items()
)
out(
"\n"
+ indent(
"\n".join(
[
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
info,
]
),
" ",
)
+ "\n"
)

View File

@@ -1,16 +1,13 @@
import os
from contextlib import suppress
from itertools import count
from random import choice
from signal import SIGINT, SIGTERM, Signals
from signal import signal as signal_func
from typing import Dict, List, Optional
from typing import List, Optional
from sanic.compat import OS_IS_WINDOWS
from sanic.constants import RestartOrder
from sanic.exceptions import ServerKilled
from sanic.log import error_logger, logger
from sanic.worker.constants import RestartOrder
from sanic.worker.process import ProcessState, Worker, WorkerProcess
@@ -21,7 +18,7 @@ else:
class WorkerManager:
THRESHOLD = WorkerProcess.THRESHOLD
THRESHOLD = 300 # == 30 seconds
MAIN_IDENT = "Sanic-Main"
def __init__(
@@ -32,69 +29,51 @@ class WorkerManager:
context,
monitor_pubsub,
worker_state,
restart_order: RestartOrder = RestartOrder.SHUTDOWN_FIRST,
):
self.num_server = number
self.context = context
self.transient: Dict[str, Worker] = {}
self.durable: Dict[str, Worker] = {}
self.transient: List[Worker] = []
self.durable: List[Worker] = []
self.monitor_publisher, self.monitor_subscriber = monitor_pubsub
self.worker_state = worker_state
self.worker_state[self.MAIN_IDENT] = {"pid": self.pid}
self._shutting_down = False
self._serve = serve
self._server_settings = server_settings
self._server_count = count()
self.terminated = False
self.restart_order = restart_order
if number == 0:
raise RuntimeError("Cannot serve with no workers")
for _ in range(number):
self.create_server()
for i in range(number):
self.manage(
f"{WorkerProcess.SERVER_LABEL}-{i}",
serve,
server_settings,
transient=True,
)
signal_func(SIGINT, self.shutdown_signal)
signal_func(SIGTERM, self.shutdown_signal)
def manage(self, ident, func, kwargs, transient=False) -> Worker:
def manage(self, ident, func, kwargs, transient=False):
container = self.transient if transient else self.durable
worker = Worker(ident, func, kwargs, self.context, self.worker_state)
container[worker.ident] = worker
return worker
def create_server(self) -> Worker:
server_number = next(self._server_count)
return self.manage(
f"{WorkerProcess.SERVER_LABEL}-{server_number}",
self._serve,
self._server_settings,
transient=True,
container.append(
Worker(
ident,
func,
kwargs,
self.context,
self.worker_state,
self.restart_order,
)
def shutdown_server(self, ident: Optional[str] = None) -> None:
if not ident:
servers = [
worker
for worker in self.transient.values()
if worker.ident.startswith(WorkerProcess.SERVER_LABEL)
]
if not servers:
error_logger.error(
"Server shutdown failed because a server was not found."
)
return
worker = choice(servers) # nosec B311
else:
worker = self.transient[ident]
for process in worker.processes:
process.terminate()
del self.transient[worker.ident]
def run(self):
self.start()
self.monitor()
self.join()
self.terminate()
# self.kill()
def start(self):
for process in self.processes:
@@ -116,41 +95,15 @@ class WorkerManager:
self.join()
def terminate(self):
if not self._shutting_down:
if not self.terminated:
for process in self.processes:
process.terminate()
self.terminated = True
def restart(
self,
process_names: Optional[List[str]] = None,
restart_order=RestartOrder.SHUTDOWN_FIRST,
**kwargs,
):
def restart(self, process_names: Optional[List[str]] = None, **kwargs):
for process in self.transient_processes:
if not process_names or process.name in process_names:
process.restart(restart_order=restart_order, **kwargs)
def scale(self, num_worker: int):
if num_worker <= 0:
raise ValueError("Cannot scale to 0 workers.")
change = num_worker - self.num_server
if change == 0:
logger.info(
f"No change needed. There are already {num_worker} workers."
)
return
logger.info(f"Scaling from {self.num_server} to {num_worker} workers")
if change > 0:
for _ in range(change):
worker = self.create_server()
for process in worker.processes:
process.start()
else:
for _ in range(abs(change)):
self.shutdown_server()
self.num_server = num_worker
process.restart(**kwargs)
def monitor(self):
self.wait_for_ack()
@@ -166,15 +119,7 @@ class WorkerManager:
elif message == "__TERMINATE__":
self.shutdown()
break
logger.debug(
"Incoming monitor message: %s",
message,
extra={"verbosity": 1},
)
split_message = message.split(":", 2)
if message.startswith("__SCALE__"):
self.scale(int(split_message[-1]))
continue
split_message = message.split(":", 1)
processes = split_message[0]
reloaded_files = (
split_message[1] if len(split_message) > 1 else None
@@ -184,15 +129,9 @@ class WorkerManager:
]
if "__ALL_PROCESSES__" in process_names:
process_names = None
order = (
RestartOrder.STARTUP_FIRST
if "STARTUP_FIRST" in split_message
else RestartOrder.SHUTDOWN_FIRST
)
self.restart(
process_names=process_names,
reloaded_files=reloaded_files,
restart_order=order,
)
self._sync_states()
except InterruptedError:
@@ -200,6 +139,12 @@ class WorkerManager:
raise
break
def _sync_states(self):
for process in self.processes:
state = self.worker_state[process.name].get("state")
if state and process.state.name != state:
process.set_state(ProcessState[state], True)
def wait_for_ack(self): # no cov
misses = 0
message = (
@@ -207,7 +152,7 @@ class WorkerManager:
"online in the allowed time. Sanic is shutting down to avoid a "
f"deadlock. The current threshold is {self.THRESHOLD / 10}s. "
"If this problem persists, please check out the documentation "
"https://sanic.dev/en/guide/deployment/manager.html#worker-ack."
"___."
)
while not self._all_workers_ack():
if self.monitor_subscriber.poll(0.1):
@@ -234,8 +179,8 @@ class WorkerManager:
self.kill()
@property
def workers(self) -> List[Worker]:
return list(self.transient.values()) + list(self.durable.values())
def workers(self):
return self.transient + self.durable
@property
def processes(self):
@@ -245,7 +190,7 @@ class WorkerManager:
@property
def transient_processes(self):
for worker in self.transient.values():
for worker in self.transient:
for process in worker.processes:
yield process
@@ -256,11 +201,6 @@ class WorkerManager:
raise ServerKilled
def shutdown_signal(self, signal, frame):
if self._shutting_down:
logger.info("Shutdown interrupted. Killing.")
with suppress(ServerKilled):
self.kill()
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
self.monitor_publisher.send(None)
self.shutdown()
@@ -269,7 +209,6 @@ class WorkerManager:
for process in self.processes:
if process.is_alive():
process.terminate()
self._shutting_down = True
@property
def pid(self):
@@ -282,9 +221,3 @@ class WorkerManager:
if worker_state.get("server")
]
return all(acked) and len(acked) == self.num_server
def _sync_states(self):
for process in self.processes:
state = self.worker_state[process.name].get("state")
if state and process.state.name != state:
process.set_state(ProcessState[state], True)

View File

@@ -2,7 +2,6 @@ from multiprocessing.connection import Connection
from os import environ, getpid
from typing import Any, Dict
from sanic.log import Colors, logger
from sanic.worker.process import ProcessState
from sanic.worker.state import WorkerState
@@ -17,23 +16,12 @@ class WorkerMultiplexer:
self._state = WorkerState(worker_state, self.name)
def ack(self):
logger.debug(
f"{Colors.BLUE}Process ack: {Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self.pid,
)
self._state._state[self.name] = {
**self._state._state[self.name],
"state": ProcessState.ACKED.name,
}
def restart(
self,
name: str = "",
all_workers: bool = False,
zero_downtime: bool = False,
):
def restart(self, name: str = "", all_workers: bool = False):
if name and all_workers:
raise ValueError(
"Ambiguous restart with both a named process and"
@@ -41,18 +29,10 @@ class WorkerMultiplexer:
)
if not name:
name = "__ALL_PROCESSES__:" if all_workers else self.name
if not name.endswith(":"):
name += ":"
if zero_downtime:
name += ":STARTUP_FIRST"
self._monitor_publisher.send(name)
reload = restart # no cov
def scale(self, num_workers: int):
message = f"__SCALE__:{num_workers}"
self._monitor_publisher.send(message)
def terminate(self, early: bool = False):
message = "__TERMINATE_EARLY__" if early else "__TERMINATE__"
self._monitor_publisher.send(message)

View File

@@ -1,14 +1,14 @@
import os
from datetime import datetime, timezone
from enum import IntEnum, auto
from multiprocessing.context import BaseContext
from signal import SIGINT
from threading import Thread
from time import sleep
from typing import Any, Dict, Set
from sanic.constants import RestartOrder
from sanic.log import Colors, logger
from sanic.worker.constants import ProcessState, RestartOrder
def get_now():
@@ -16,17 +16,35 @@ def get_now():
return now
class ProcessState(IntEnum):
IDLE = auto()
RESTARTING = auto()
STARTING = auto()
STARTED = auto()
ACKED = auto()
JOINED = auto()
TERMINATED = auto()
class WorkerProcess:
THRESHOLD = 300 # == 30 seconds
SERVER_LABEL = "Server"
def __init__(self, factory, name, target, kwargs, worker_state):
def __init__(
self,
factory,
name,
target,
kwargs,
worker_state,
restart_order: RestartOrder,
):
self.state = ProcessState.IDLE
self.factory = factory
self.name = name
self.target = target
self.kwargs = kwargs
self.worker_state = worker_state
self.restart_order = restart_order
if self.name not in self.worker_state:
self.worker_state[self.name] = {
"server": self.SERVER_LABEL in self.name
@@ -81,7 +99,7 @@ class WorkerProcess:
except (KeyError, AttributeError, ProcessLookupError):
...
def restart(self, restart_order=RestartOrder.SHUTDOWN_FIRST, **kwargs):
def restart(self, **kwargs):
logger.debug(
f"{Colors.BLUE}Restarting a process: {Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
@@ -89,7 +107,7 @@ class WorkerProcess:
self.pid,
)
self.set_state(ProcessState.RESTARTING, force=True)
if restart_order is RestartOrder.SHUTDOWN_FIRST:
if self.restart_order is RestartOrder.SHUTDOWN_FIRST:
self._terminate_now()
else:
self._old_process = self._current_process
@@ -102,7 +120,7 @@ class WorkerProcess:
except AttributeError:
raise RuntimeError("Restart failed")
if restart_order is RestartOrder.STARTUP_FIRST:
if self.restart_order is RestartOrder.STARTUP_FIRST:
self._terminate_soon()
self.worker_state[self.name] = {
@@ -112,26 +130,6 @@ class WorkerProcess:
"restart_at": get_now(),
}
def is_alive(self):
try:
return self._current_process.is_alive()
except AssertionError:
return False
def spawn(self):
if self.state not in (ProcessState.IDLE, ProcessState.RESTARTING):
raise Exception("Cannot spawn a worker process until it is idle.")
self._current_process = self.factory(
name=self.name,
target=self.target,
kwargs=self.kwargs,
daemon=True,
)
@property
def pid(self):
return self._current_process.pid
def _terminate_now(self):
logger.debug(
f"{Colors.BLUE}Begin restart termination: "
@@ -161,15 +159,9 @@ class WorkerProcess:
self.name,
self._old_process.pid,
)
misses = 0
# TODO: Add a timeout?
while self.state is not ProcessState.ACKED:
sleep(0.1)
misses += 1
if misses > self.THRESHOLD:
raise TimeoutError(
f"Worker {self.name} failed to come ack within "
f"{self.THRESHOLD / 10} seconds"
)
...
else:
logger.debug(
f"{Colors.BLUE}Process acked. Terminating: "
@@ -181,6 +173,26 @@ class WorkerProcess:
self._old_process.terminate()
delattr(self, "_old_process")
def is_alive(self):
try:
return self._current_process.is_alive()
except AssertionError:
return False
def spawn(self):
if self.state not in (ProcessState.IDLE, ProcessState.RESTARTING):
raise Exception("Cannot spawn a worker process until it is idle.")
self._current_process = self.factory(
name=self.name,
target=self.target,
kwargs=self.kwargs,
daemon=True,
)
@property
def pid(self):
return self._current_process.pid
class Worker:
WORKER_PREFIX = "Sanic-"
@@ -192,26 +204,25 @@ class Worker:
server_settings,
context: BaseContext,
worker_state: Dict[str, Any],
restart_order: RestartOrder,
):
self.ident = ident
self.ident = f"{self.WORKER_PREFIX}{ident}"
self.context = context
self.serve = serve
self.server_settings = server_settings
self.worker_state = worker_state
self.processes: Set[WorkerProcess] = set()
self.restart_order = restart_order
self.create_process()
def create_process(self) -> WorkerProcess:
process = WorkerProcess(
# Need to ignore this typing error - The problem is the
# BaseContext itself has no Process. But, all of its
# implementations do. We can safely ignore as it is a typing
# issue in the standard lib.
factory=self.context.Process, # type: ignore
name=f"{self.WORKER_PREFIX}{self.ident}-{len(self.processes)}",
factory=self.context.Process,
name=f"{self.ident}-{len(self.processes)}",
target=self.serve,
kwargs={**self.server_settings},
worker_state=self.worker_state,
restart_order=self.restart_order,
)
self.processes.add(process)
return process

View File

@@ -17,8 +17,6 @@ from sanic.worker.loader import AppLoader
class Reloader:
INTERVAL = 1.0 # seconds
def __init__(
self,
publisher: Connection,
@@ -27,7 +25,7 @@ class Reloader:
app_loader: AppLoader,
):
self._publisher = publisher
self.interval = interval or self.INTERVAL
self.interval = interval
self.reload_dirs = reload_dirs
self.run = True
self.app_loader = app_loader

View File

@@ -17,7 +17,6 @@ from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.runners import _serve_http_1, _serve_http_3
from sanic.worker.loader import AppLoader, CertLoader
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.process import Worker, WorkerProcess
def worker_serve(
@@ -80,10 +79,7 @@ def worker_serve(
info.settings["ssl"] = ssl
# When in a worker process, do some init
worker_name = os.environ.get("SANIC_WORKER_NAME")
if worker_name and worker_name.startswith(
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL
):
if os.environ.get("SANIC_WORKER_NAME"):
# Hydrate apps with any passed server info
if monitor_publisher is None:

View File

@@ -6,6 +6,8 @@ import os
import re
import sys
from distutils.util import strtobool
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
@@ -35,25 +37,6 @@ def open_local(paths, mode="r", encoding="utf8"):
return codecs.open(path, mode, encoding)
def str_to_bool(val: str) -> bool:
val = val.lower()
if val in {
"y",
"yes",
"yep",
"yup",
"t",
"true",
"on",
"enable",
"enabled",
"1",
}:
return True
elif val in {"n", "no", "f", "false", "off", "disable", "disabled", "0"}:
return False
else:
raise ValueError(f"Invalid truth value {val}")
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
try:
@@ -90,7 +73,6 @@ setup_kwargs = {
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
],
"entry_points": {"console_scripts": ["sanic = sanic.__main__:main"]},
}
@@ -149,13 +131,13 @@ dev_require = tests_require + [
all_require = list(set(dev_require + docs_require))
if str_to_bool(os.environ.get("SANIC_NO_UJSON", "no")):
if strtobool(os.environ.get("SANIC_NO_UJSON", "no")):
print("Installing without uJSON")
requirements.remove(ujson)
tests_require.remove(types_ujson)
# 'nt' means windows OS
if str_to_bool(os.environ.get("SANIC_NO_UVLOOP", "no")):
if strtobool(os.environ.get("SANIC_NO_UVLOOP", "no")):
print("Installing without uvLoop")
requirements.remove(uvloop)

View File

@@ -8,8 +8,8 @@ import uuid
from contextlib import suppress
from logging import LogRecord
from typing import Any, Dict, List, Tuple
from unittest.mock import MagicMock, Mock, patch
from typing import List, Tuple
from unittest.mock import MagicMock
import pytest
@@ -54,7 +54,7 @@ TYPE_TO_GENERATOR_MAP = {
"uuid": lambda: str(uuid.uuid1()),
}
CACHE: Dict[str, Any] = {}
CACHE = {}
class RouteStringGenerator:
@@ -147,7 +147,6 @@ def app(request):
for target, method_name in TouchUp._registry:
CACHE[method_name] = getattr(target, method_name)
app = Sanic(slugify.sub("-", request.node.name))
yield app
for target, method_name in TouchUp._registry:
setattr(target, method_name, CACHE[method_name])
@@ -221,14 +220,3 @@ def sanic_ext(ext_instance): # noqa
yield sanic_ext
with suppress(KeyError):
del sys.modules["sanic_ext"]
@pytest.fixture
def urlopen():
urlopen = Mock()
urlopen.return_value = urlopen
urlopen.__enter__ = Mock(return_value=urlopen)
urlopen.__exit__ = Mock()
urlopen.read = Mock()
with patch("sanic.cli.inspector_client.urlopen", urlopen):
yield urlopen

View File

@@ -348,14 +348,12 @@ def test_app_registry_retrieval_from_multiple():
def test_get_app_does_not_exist():
with pytest.raises(
SanicException,
match=(
"Sanic app name 'does-not-exist' not found.\n"
match="Sanic app name 'does-not-exist' not found.\n"
"App instantiation must occur outside "
"if __name__ == '__main__' "
"block or by using an AppLoader.\nSee "
"https://sanic.dev/en/guide/deployment/app-loader.html"
" for more details."
),
" for more details.",
):
Sanic.get_app("does-not-exist")

View File

@@ -8,7 +8,7 @@ import uvicorn
from sanic import Sanic
from sanic.application.state import Mode
from sanic.asgi import ASGIApp, MockTransport
from sanic.asgi import MockTransport
from sanic.exceptions import BadRequest, Forbidden, ServiceUnavailable
from sanic.request import Request
from sanic.response import json, text
@@ -16,12 +16,6 @@ from sanic.server.websockets.connection import WebSocketConnection
from sanic.signals import RESERVED_NAMESPACES
try:
from unittest.mock import AsyncMock
except ImportError:
from tests.asyncmock import AsyncMock # type: ignore
@pytest.fixture
def message_stack():
return deque()
@@ -564,39 +558,3 @@ async def test_asgi_serve_location(app):
_, response = await app.asgi_client.get("/")
assert response.text == "http://<ASGI>"
@pytest.mark.asyncio
async def test_error_on_lifespan_exception_start(app, caplog):
@app.before_server_start
async def before_server_start(_):
1 / 0
recv = AsyncMock(return_value={"type": "lifespan.startup"})
send = AsyncMock()
app.asgi = True
with caplog.at_level(logging.ERROR):
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
send.assert_awaited_once_with(
{"type": "lifespan.startup.failed", "message": "division by zero"}
)
@pytest.mark.asyncio
async def test_error_on_lifespan_exception_stop(app: Sanic):
@app.before_server_stop
async def before_server_stop(_):
1 / 0
recv = AsyncMock(return_value={"type": "lifespan.shutdown"})
send = AsyncMock()
app.asgi = True
await app._startup()
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
send.assert_awaited_once_with(
{"type": "lifespan.shutdown.failed", "message": "division by zero"}
)

View File

@@ -4,7 +4,6 @@ import sys
from pathlib import Path
from typing import List, Optional, Tuple
from unittest.mock import patch
import pytest
@@ -12,7 +11,6 @@ from sanic_routing import __version__ as __routing_version__
from sanic import __version__
from sanic.__main__ import main
from sanic.cli.inspector_client import InspectorClient
@pytest.fixture(scope="module", autouse=True)
@@ -119,13 +117,7 @@ def test_error_with_path_as_instance_without_simple_arg(caplog):
),
)
def test_tls_options(cmd: Tuple[str, ...], caplog):
command = [
"fake.server.app",
*cmd,
"--port=9999",
"--debug",
"--single-process",
]
command = ["fake.server.app", *cmd, "--port=9999", "--debug"]
lines = capture(command, caplog)
assert "Goin' Fast @ https://127.0.0.1:9999" in lines
@@ -294,50 +286,3 @@ def test_noisy_exceptions(cmd: str, expected: bool, caplog):
info = read_app_info(lines)
assert info["noisy_exceptions"] is expected
def test_inspector_inspect(urlopen, caplog, capsys):
urlopen.read.return_value = json.dumps(
{
"result": {
"info": {
"packages": ["foo"],
},
"extra": {
"more": "data",
},
"workers": {"Worker-Name": {"some": "state"}},
}
}
).encode()
with patch("sys.argv", ["sanic", "inspect"]):
capture(["inspect"], caplog)
captured = capsys.readouterr()
assert "Inspecting @ http://localhost:6457" in captured.out
assert "Worker-Name" in captured.out
assert captured.err == ""
@pytest.mark.parametrize(
"command,params",
(
(["reload"], {"zero_downtime": False}),
(["reload", "--zero-downtime"], {"zero_downtime": True}),
(["shutdown"], {}),
(["scale", "9"], {"replicas": 9}),
(["foo", "--bar=something"], {"bar": "something"}),
(["foo", "--bar"], {"bar": True}),
(["foo", "--no-bar"], {"bar": False}),
(["foo", "positional"], {"args": ["positional"]}),
(
["foo", "positional", "--bar=something"],
{"args": ["positional"], "bar": "something"},
),
),
)
def test_inspector_command(command, params):
with patch.object(InspectorClient, "request") as client:
with patch("sys.argv", ["sanic", "inspect", *command]):
main()
client.assert_called_once_with(command[0], **params)

View File

@@ -39,7 +39,7 @@ def test_logo_true(app, caplog):
with patch("sys.stdout.isatty") as isatty:
isatty.return_value = True
with caplog.at_level(logging.DEBUG):
app.make_coffee(single_process=True)
app.make_coffee()
# Only in the regular logo
assert " ▄███ █████ ██ " not in caplog.text

View File

@@ -14,7 +14,7 @@ from pytest import MonkeyPatch
from sanic import Sanic
from sanic.config import DEFAULT_CONFIG, Config
from sanic.constants import LocalCertCreator
from sanic.constants import LocalCertCreator, RestartOrder
from sanic.exceptions import PyFileError
@@ -436,3 +436,19 @@ def test_convert_local_cert_creator(passed, expected):
app = Sanic("Test")
assert app.config.LOCAL_CERT_CREATOR is expected
del os.environ["SANIC_LOCAL_CERT_CREATOR"]
@pytest.mark.parametrize(
"passed,expected",
(
("shutdown_first", RestartOrder.SHUTDOWN_FIRST),
("startup_first", RestartOrder.STARTUP_FIRST),
("SHUTDOWN_FIRST", RestartOrder.SHUTDOWN_FIRST),
("STARTUP_FIRST", RestartOrder.STARTUP_FIRST),
),
)
def test_convert_restart_order(passed, expected):
os.environ["SANIC_RESTART_ORDER"] = passed
app = Sanic("Test")
assert app.config.RESTART_ORDER is expected
del os.environ["SANIC_RESTART_ORDER"]

View File

@@ -2,6 +2,7 @@ import asyncio
import sys
from threading import Event
from unittest.mock import Mock
import pytest
@@ -74,7 +75,7 @@ def test_create_named_task(app):
app.stop()
app.run(single_process=True)
app.run()
def test_named_task_called(app):

View File

@@ -3,7 +3,7 @@ from functools import partial
import pytest
from sanic import Sanic
from sanic.middleware import Middleware, MiddlewareLocation
from sanic.middleware import Middleware
from sanic.response import json
@@ -40,86 +40,6 @@ def reset_middleware():
Middleware.reset_count()
def test_add_register_priority(app: Sanic):
def foo(*_):
...
app.register_middleware(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.register_middleware(foo, attach_to="response", priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_register_named_priority(app: Sanic):
def foo(*_):
...
app.register_named_middleware(foo, route_names=["foo"], priority=999)
assert len(app.named_request_middleware) == 1
assert len(app.named_response_middleware) == 0
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
app.register_named_middleware(
foo, attach_to="response", route_names=["foo"], priority=999
)
assert len(app.named_request_middleware) == 1
assert len(app.named_response_middleware) == 1
assert app.named_response_middleware["foo"][0].priority == 999 # type: ignore
def test_add_decorator_priority(app: Sanic):
def foo(*_):
...
app.middleware(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.middleware(foo, attach_to="response", priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_convenience_priority(app: Sanic):
def foo(*_):
...
app.on_request(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.on_response(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_conflicting_priority(app: Sanic):
def foo(*_):
...
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
app.register_middleware(middleware=middleware, priority=999)
assert app.request_middleware[0].priority == 999 # type: ignore
middleware.priority == 998
def test_add_conflicting_priority_named(app: Sanic):
def foo(*_):
...
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
app.register_named_middleware(
middleware=middleware, route_names=["foo"], priority=999
)
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
middleware.priority == 998
@pytest.mark.parametrize(
"expected,priorities",
PRIORITY_TEST_CASES,

View File

@@ -3,7 +3,6 @@ import multiprocessing
import pickle
import random
import signal
import sys
from asyncio import sleep
@@ -12,7 +11,6 @@ import pytest
from sanic_testing.testing import HOST, PORT
from sanic import Blueprint, text
from sanic.compat import use_context
from sanic.log import logger
from sanic.server.socket import configure_socket
@@ -22,10 +20,6 @@ from sanic.server.socket import configure_socket
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_multiprocessing(app):
"""Tests that the number of children we produce is correct"""
# Selects a number at random so we can spot check
@@ -43,7 +37,6 @@ def test_multiprocessing(app):
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(2)
with use_context("fork"):
app.run(HOST, 4120, workers=num_workers, debug=True)
assert len(process_list) == num_workers + 1
@@ -143,10 +136,6 @@ def test_multiprocessing_legacy_unix(app):
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform",
)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_multiprocessing_with_blueprint(app):
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
@@ -166,7 +155,6 @@ def test_multiprocessing_with_blueprint(app):
bp = Blueprint("test_text")
app.blueprint(bp)
with use_context("fork"):
app.run(HOST, 4121, workers=num_workers, debug=True)
assert len(process_list) == num_workers + 1
@@ -225,10 +213,6 @@ def test_pickle_app_with_static(app, protocol):
up_p_app.run(single_process=True)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_main_process_event(app, caplog):
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
@@ -251,7 +235,6 @@ def test_main_process_event(app, caplog):
def main_process_stop2(app, loop):
logger.info("main_process_stop")
with use_context("fork"):
with caplog.at_level(logging.INFO):
app.run(HOST, PORT, workers=num_workers)

View File

@@ -1,5 +1,8 @@
import asyncio
from contextlib import closing
from socket import socket
import pytest
from sanic import Sanic
@@ -620,4 +623,6 @@ def test_streaming_echo():
res = await read_chunk()
assert res == None
app.run(access_log=False, single_process=True)
# Use random port for tests
with closing(socket()) as sock:
app.run(access_log=False)

View File

@@ -1293,24 +1293,6 @@ async def test_request_string_representation_asgi(app):
"------sanic--\r\n",
"filename_\u00A0_test",
),
# Umlaut using NFC normalization (Windows, Linux, Android)
(
"------sanic\r\n"
'content-disposition: form-data; filename*="utf-8\'\'filename_%C3%A4_test"; name="test"\r\n'
"\r\n"
"OK\r\n"
"------sanic--\r\n",
"filename_\u00E4_test",
),
# Umlaut using NFD normalization (MacOS client)
(
"------sanic\r\n"
'content-disposition: form-data; filename*="utf-8\'\'filename_a%CC%88_test"; name="test"\r\n'
"\r\n"
"OK\r\n"
"------sanic--\r\n",
"filename_\u00E4_test", # Sanic should normalize to NFC
),
],
)
def test_request_multipart_files(app, payload, filename):

View File

@@ -803,21 +803,6 @@ def test_static_add_route(app, strict_slashes):
assert response.text == "OK2"
@pytest.mark.parametrize("unquote", [True, False, None])
def test_unquote_add_route(app, unquote):
async def handler1(_, foo):
return text(foo)
app.add_route(handler1, "/<foo>", unquote=unquote)
value = "" if unquote else r"%E5%95%8A"
_, response = app.test_client.get("/啊")
assert response.text == value
_, response = app.test_client.get(r"/%E5%95%8A")
assert response.text == value
def test_dynamic_add_route(app):
results = []

View File

@@ -7,7 +7,7 @@ import pytest
from sanic_routing.exceptions import NotFound
from sanic import Blueprint, Sanic, empty
from sanic import Blueprint
from sanic.exceptions import InvalidSignal, SanicException
@@ -20,31 +20,6 @@ def test_add_signal(app):
assert len(app.signal_router.routes) == 1
def test_add_signal_method_handler(app):
counter = 0
class TestSanic(Sanic):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_signal(
self.after_routing_signal_handler, "http.routing.after"
)
def after_routing_signal_handler(self, *args, **kwargs):
nonlocal counter
counter += 1
app = TestSanic("Test")
assert len(app.signal_router.routes) == 1
@app.route("/")
async def handler(_):
return empty()
app.test_client.get("/")
assert counter == 1
def test_add_signal_decorator(app):
@app.signal("foo.bar.baz")
def sync_signal(*_):
@@ -314,10 +289,10 @@ async def test_dispatch_signal_triggers_event_on_bp(app):
waiter = bp.event("foo.bar.baz")
assert isawaitable(waiter)
fut = do_wait()
fut = asyncio.ensure_future(do_wait())
for signal in signal_group:
signal.ctx.event.set()
await asyncio.gather(fut)
await fut
assert bp_counter == 1

View File

@@ -2,7 +2,6 @@ import logging
import os
import ssl
import subprocess
import sys
from contextlib import contextmanager
from multiprocessing import Event
@@ -18,7 +17,6 @@ import sanic.http.tls.creators
from sanic import Sanic
from sanic.application.constants import Mode
from sanic.compat import use_context
from sanic.constants import LocalCertCreator
from sanic.exceptions import SanicException
from sanic.helpers import _default
@@ -428,12 +426,7 @@ def test_logger_vhosts(caplog):
app.stop()
with caplog.at_level(logging.INFO):
app.run(
host="127.0.0.1",
port=42102,
ssl=[localhost_dir, sanic_dir],
single_process=True,
)
app.run(host="127.0.0.1", port=42102, ssl=[localhost_dir, sanic_dir])
logmsg = [
m for s, l, m in caplog.record_tuples if m.startswith("Certificate")
@@ -649,10 +642,6 @@ def test_sanic_ssl_context_create():
assert isinstance(sanic_context, SanicSSLContext)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_ssl_in_multiprocess_mode(app: Sanic, caplog):
ssl_dict = {"cert": localhost_cert, "key": localhost_key}
@@ -668,7 +657,6 @@ def test_ssl_in_multiprocess_mode(app: Sanic, caplog):
app.stop()
assert not event.is_set()
with use_context("fork"):
with caplog.at_level(logging.INFO):
app.run(ssl=ssl_dict)
assert event.is_set()

View File

@@ -1,7 +1,6 @@
# import asyncio
import logging
import os
import sys
from asyncio import AbstractEventLoop, sleep
from string import ascii_lowercase
@@ -13,7 +12,6 @@ import pytest
from pytest import LogCaptureFixture
from sanic import Sanic
from sanic.compat import use_context
from sanic.request import Request
from sanic.response import text
@@ -176,9 +174,7 @@ def handler(request: Request):
async def client(app: Sanic, loop: AbstractEventLoop):
try:
transport = httpx.AsyncHTTPTransport(uds=SOCKPATH)
async with httpx.AsyncClient(transport=transport) as client:
async with httpx.AsyncClient(uds=SOCKPATH) as client:
r = await client.get("http://myhost.invalid/")
assert r.status_code == 200
assert r.text == os.path.abspath(SOCKPATH)
@@ -187,12 +183,7 @@ async def client(app: Sanic, loop: AbstractEventLoop):
app.stop()
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_unix_connection_multiple_workers():
with use_context("fork"):
app_multi = Sanic(name="test")
app_multi.get("/")(handler)
app_multi.listener("after_server_start")(client)

View File

@@ -1,20 +1,14 @@
try: # no cov
from ujson import dumps
except ModuleNotFoundError: # no cov
from json import dumps # type: ignore
import json
from datetime import datetime
from logging import ERROR, INFO
from socket import AF_INET, SOCK_STREAM, timeout
from unittest.mock import Mock, patch
from urllib.error import URLError
import pytest
from sanic_testing import TestManager
from sanic.cli.inspector_client import InspectorClient
from sanic.helpers import Default
from sanic.log import Colors
from sanic.worker.inspector import Inspector
from sanic.worker.inspector import Inspector, inspect
DATA = {
@@ -26,90 +20,121 @@ DATA = {
},
"workers": {"Worker-Name": {"some": "state"}},
}
FULL_SERIALIZED = dumps({"result": DATA})
OUT_SERIALIZED = dumps(DATA)
SERIALIZED = json.dumps(DATA)
class FooInspector(Inspector):
async def foo(self, bar):
return f"bar is {bar}"
def test_inspector_stop():
inspector = Inspector(Mock(), {}, {}, "", 1)
assert inspector.run is True
inspector.stop()
assert inspector.run is False
@pytest.fixture
def publisher():
publisher = Mock()
return publisher
@patch("sanic.worker.inspector.sys.stdout.write")
@patch("sanic.worker.inspector.socket")
@pytest.mark.parametrize("command", ("foo", "raw", "pretty"))
def test_send_inspect(socket: Mock, write: Mock, command: str):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.recv.return_value = SERIALIZED.encode()
inspect("localhost", 9999, command)
socket.sendall.assert_called_once_with(command.encode())
socket.recv.assert_called_once_with(4096)
socket.connect.assert_called_once_with(("localhost", 9999))
socket.assert_called_once_with(AF_INET, SOCK_STREAM)
@pytest.fixture
def inspector(publisher):
inspector = FooInspector(
publisher, {}, {}, "localhost", 9999, "", Default(), Default()
)
inspector(False)
return inspector
@pytest.fixture
def http_client(inspector):
manager = TestManager(inspector.app)
return manager.test_client
@pytest.mark.parametrize("command", ("info",))
@patch("sanic.cli.inspector_client.sys.stdout.write")
def test_send_inspect(write, urlopen, command: str):
urlopen.read.return_value = FULL_SERIALIZED.encode()
InspectorClient("localhost", 9999, False, False, None).do(command)
if command == "raw":
write.assert_called_once_with(SERIALIZED)
elif command == "pretty":
write.assert_called()
write.reset_mock()
InspectorClient("localhost", 9999, False, True, None).do(command)
write.assert_called_with(OUT_SERIALIZED + "\n")
else:
write.assert_not_called()
@patch("sanic.cli.inspector_client.sys")
def test_send_inspect_conn_refused(sys: Mock, urlopen):
urlopen.side_effect = URLError("")
InspectorClient("localhost", 9999, False, False, None).do("info")
@patch("sanic.worker.inspector.sys")
@patch("sanic.worker.inspector.socket")
def test_send_inspect_conn_refused(socket: Mock, sys: Mock, caplog):
with caplog.at_level(INFO):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.connect.side_effect = ConnectionRefusedError()
inspect("localhost", 9999, "foo")
socket.close.assert_called_once()
sys.exit.assert_called_once_with(1)
message = (
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}http://localhost:9999{Colors.END}\n"
f"{Colors.YELLOW}('localhost', 9999){Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance.\n<urlopen error >\n"
"an inspector instance."
)
sys.exit.assert_called_once_with(1)
sys.stderr.write.assert_called_once_with(message)
assert ("sanic.error", ERROR, message) in caplog.record_tuples
def test_run_inspector_reload(publisher, http_client):
_, response = http_client.post("/reload")
assert response.status == 200
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
@patch("sanic.worker.inspector.configure_socket")
@pytest.mark.parametrize("action", (b"reload", b"shutdown", b"foo"))
def test_run_inspector(configure_socket: Mock, action: bytes):
sock = Mock()
conn = Mock()
conn.recv.return_value = action
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def accept():
inspector.run = False
return conn, ...
sock.accept = accept
inspector()
configure_socket.assert_called_once_with(
{"host": "localhost", "port": 9999, "unix": None, "backlog": 1}
)
conn.recv.assert_called_with(64)
if action == b"reload":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
elif action == b"shutdown":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_not_called()
inspector.shutdown.assert_called()
inspector.state_to_json.assert_not_called()
else:
conn.send.assert_called_with(b'"foo"')
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_called()
def test_run_inspector_reload_zero_downtime(publisher, http_client):
_, response = http_client.post("/reload", json={"zero_downtime": True})
assert response.status == 200
publisher.send.assert_called_once_with("__ALL_PROCESSES__::STARTUP_FIRST")
@patch("sanic.worker.inspector.configure_socket")
def test_accept_timeout(configure_socket: Mock):
sock = Mock()
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def accept():
inspector.run = False
raise timeout
def test_run_inspector_shutdown(publisher, http_client):
_, response = http_client.post("/shutdown")
assert response.status == 200
publisher.send.assert_called_once_with("__TERMINATE__")
sock.accept = accept
inspector()
def test_run_inspector_scale(publisher, http_client):
_, response = http_client.post("/scale", json={"replicas": 4})
assert response.status == 200
publisher.send.assert_called_once_with("__SCALE__:4")
def test_run_inspector_arbitrary(http_client):
_, response = http_client.post("/foo", json={"bar": 99})
assert response.status == 200
assert response.json == {"meta": {"action": "foo"}, "result": "bar is 99"}
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
def test_state_to_json():
@@ -117,10 +142,8 @@ def test_state_to_json():
now_iso = now.isoformat()
app_info = {"app": "hello"}
worker_state = {"Test": {"now": now, "nested": {"foo": now}}}
inspector = Inspector(
Mock(), app_info, worker_state, "", 0, "", Default(), Default()
)
state = inspector._state_to_json()
inspector = Inspector(Mock(), app_info, worker_state, "", 0)
state = inspector.state_to_json()
assert state == {
"info": app_info,
@@ -128,14 +151,17 @@ def test_state_to_json():
}
def test_run_inspector_authentication():
inspector = Inspector(
Mock(), {}, {}, "", 0, "super-secret", Default(), Default()
)(False)
manager = TestManager(inspector.app)
_, response = manager.test_client.get("/")
assert response.status == 401
_, response = manager.test_client.get(
"/", headers={"Authorization": "Bearer super-secret"}
)
assert response.status == 200
def test_reload():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.reload()
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
def test_shutdown():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.shutdown()
publisher.send.assert_called_once_with("__TERMINATE__")

View File

@@ -1,21 +1,12 @@
from logging import ERROR, INFO
from signal import SIGINT
from signal import SIGINT, SIGKILL
from unittest.mock import Mock, call, patch
import pytest
from sanic.compat import OS_IS_WINDOWS
from sanic.exceptions import ServerKilled
from sanic.worker.constants import RestartOrder
from sanic.worker.manager import WorkerManager
if not OS_IS_WINDOWS:
from signal import SIGKILL
else:
SIGKILL = SIGINT
def fake_serve():
...
@@ -23,7 +14,14 @@ def fake_serve():
def test_manager_no_workers():
message = "Cannot serve with no workers"
with pytest.raises(RuntimeError, match=message):
WorkerManager(0, fake_serve, {}, Mock(), (Mock(), Mock()), {})
WorkerManager(
0,
fake_serve,
{},
Mock(),
(Mock(), Mock()),
{},
)
@patch("sanic.worker.process.os")
@@ -32,8 +30,17 @@ def test_terminate(os_mock: Mock):
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
assert manager.terminated is False
manager.terminate()
assert manager.terminated is True
os_mock.kill.assert_called_once_with(1234, SIGINT)
@@ -44,7 +51,14 @@ def test_shutown(os_mock: Mock):
process.is_alive.return_value = True
context = Mock()
context.Process.return_value = process
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager.shutdown()
os_mock.kill.assert_called_once_with(1234, SIGINT)
@@ -55,36 +69,32 @@ def test_kill(os_mock: Mock):
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
with pytest.raises(ServerKilled):
manager.kill()
os_mock.kill.assert_called_once_with(1234, SIGKILL)
@patch("sanic.worker.process.os")
@patch("sanic.worker.manager.os")
def test_shutdown_signal_send_kill(
manager_os_mock: Mock, process_os_mock: Mock
):
process = Mock()
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
assert manager._shutting_down is False
manager.shutdown_signal(SIGINT, None)
assert manager._shutting_down is True
process_os_mock.kill.assert_called_once_with(1234, SIGINT)
manager.shutdown_signal(SIGINT, None)
manager_os_mock.kill.assert_called_once_with(1234, SIGKILL)
def test_restart_all():
p1 = Mock()
p2 = Mock()
context = Mock()
context.Process.side_effect = [p1, p2, p1, p2]
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), Mock()), {})
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
assert len(list(manager.transient_processes))
manager.restart()
p1.terminate.assert_called_once()
@@ -119,187 +129,91 @@ def test_restart_all():
)
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_all(zero_downtime):
def test_monitor_all():
p1 = Mock()
p2 = Mock()
sub = Mock()
incoming = (
"__ALL_PROCESSES__::STARTUP_FIRST"
if zero_downtime
else "__ALL_PROCESSES__:"
)
sub.recv.side_effect = [incoming, ""]
sub.recv.side_effect = ["__ALL_PROCESSES__:", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=None,
reloaded_files="",
restart_order=restart_order,
process_names=None, reloaded_files=""
)
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_all_with_files(zero_downtime):
def test_monitor_all_with_files():
p1 = Mock()
p2 = Mock()
sub = Mock()
incoming = (
"__ALL_PROCESSES__:foo,bar:STARTUP_FIRST"
if zero_downtime
else "__ALL_PROCESSES__:foo,bar"
)
sub.recv.side_effect = [incoming, ""]
sub.recv.side_effect = ["__ALL_PROCESSES__:foo,bar", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=None,
reloaded_files="foo,bar",
restart_order=restart_order,
process_names=None, reloaded_files="foo,bar"
)
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_one_process(zero_downtime):
def test_monitor_one_process():
p1 = Mock()
p1.name = "Testing"
p2 = Mock()
sub = Mock()
incoming = (
f"{p1.name}:foo,bar:STARTUP_FIRST"
if zero_downtime
else f"{p1.name}:foo,bar"
)
sub.recv.side_effect = [incoming, ""]
sub.recv.side_effect = [f"{p1.name}:foo,bar", ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=[p1.name],
reloaded_files="foo,bar",
restart_order=restart_order,
process_names=[p1.name], reloaded_files="foo,bar"
)
def test_shutdown_signal():
pub = Mock()
manager = WorkerManager(1, fake_serve, {}, Mock(), (pub, Mock()), {})
manager = WorkerManager(
1,
fake_serve,
{},
Mock(),
(pub, Mock()),
{},
)
manager.shutdown = Mock() # type: ignore
manager.shutdown_signal(SIGINT, None)
pub.send.assert_called_with(None)
manager.shutdown.assert_called_once_with()
def test_shutdown_servers(caplog):
p1 = Mock()
p1.pid = 1234
context = Mock()
context.Process.side_effect = [p1]
pub = Mock()
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
with patch("os.kill") as kill:
with caplog.at_level(ERROR):
manager.shutdown_server()
kill.assert_called_once_with(1234, SIGINT)
kill.reset_mock()
assert not caplog.record_tuples
manager.shutdown_server()
kill.assert_not_called()
assert (
"sanic.error",
ERROR,
"Server shutdown failed because a server was not found.",
) in caplog.record_tuples
def test_shutdown_servers_named():
p1 = Mock()
p1.pid = 1234
p2 = Mock()
p2.pid = 6543
context = Mock()
context.Process.side_effect = [p1, p2]
pub = Mock()
manager = WorkerManager(2, fake_serve, {}, context, (pub, Mock()), {})
with patch("os.kill") as kill:
with pytest.raises(KeyError):
manager.shutdown_server("foo")
manager.shutdown_server("Server-1")
kill.assert_called_once_with(6543, SIGINT)
def test_scale(caplog):
p1 = Mock()
p1.pid = 1234
p2 = Mock()
p2.pid = 3456
p3 = Mock()
p3.pid = 5678
context = Mock()
context.Process.side_effect = [p1, p2, p3]
pub = Mock()
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
assert len(manager.transient) == 1
manager.scale(3)
assert len(manager.transient) == 3
with patch("os.kill") as kill:
manager.scale(2)
assert len(manager.transient) == 2
manager.scale(1)
assert len(manager.transient) == 1
kill.call_count == 2
with caplog.at_level(INFO):
manager.scale(1)
assert (
"sanic.root",
INFO,
"No change needed. There are already 1 workers.",
) in caplog.record_tuples
with pytest.raises(ValueError, match=r"Cannot scale to 0 workers\."):
manager.scale(0)

View File

@@ -1,5 +1,3 @@
import sys
from multiprocessing import Event
from os import environ, getpid
from typing import Any, Dict, Type, Union
@@ -8,7 +6,6 @@ from unittest.mock import Mock
import pytest
from sanic import Sanic
from sanic.compat import use_context
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.state import WorkerState
@@ -31,10 +28,6 @@ def m(monitor_publisher, worker_state):
del environ["SANIC_WORKER_NAME"]
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_has_multiplexer_default(app: Sanic):
event = Event()
@@ -48,7 +41,6 @@ def test_has_multiplexer_default(app: Sanic):
app.shared_ctx.event.set()
app.stop()
with use_context("fork"):
app.run()
assert event.is_set()
@@ -98,17 +90,17 @@ def test_ack(worker_state: Dict[str, Any], m: WorkerMultiplexer):
def test_restart_self(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart()
monitor_publisher.send.assert_called_once_with("Test:")
monitor_publisher.send.assert_called_once_with("Test")
def test_restart_foo(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart("foo")
monitor_publisher.send.assert_called_once_with("foo:")
monitor_publisher.send.assert_called_once_with("foo")
def test_reload_alias(monitor_publisher: Mock, m: WorkerMultiplexer):
m.reload()
monitor_publisher.send.assert_called_once_with("Test:")
monitor_publisher.send.assert_called_once_with("Test")
def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
@@ -116,11 +108,6 @@ def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
monitor_publisher.send.assert_called_once_with("__TERMINATE__")
def test_scale(monitor_publisher: Mock, m: WorkerMultiplexer):
m.scale(99)
monitor_publisher.send.assert_called_once_with("__SCALE__:99")
def test_properties(
monitor_publisher: Mock, worker_state: Dict[str, Any], m: WorkerMultiplexer
):
@@ -135,20 +122,10 @@ def test_properties(
@pytest.mark.parametrize(
"params,expected",
(
({}, "Test:"),
({"name": "foo"}, "foo:"),
({}, "Test"),
({"name": "foo"}, "foo"),
({"all_workers": True}, "__ALL_PROCESSES__:"),
({"zero_downtime": True}, "Test::STARTUP_FIRST"),
({"name": "foo", "all_workers": True}, ValueError),
({"name": "foo", "zero_downtime": True}, "foo::STARTUP_FIRST"),
(
{"all_workers": True, "zero_downtime": True},
"__ALL_PROCESSES__::STARTUP_FIRST",
),
(
{"name": "foo", "all_workers": True, "zero_downtime": True},
ValueError,
),
),
)
def test_restart_params(

View File

@@ -5,15 +5,14 @@ import threading
from asyncio import Event
from logging import DEBUG
from pathlib import Path
from time import sleep
from unittest.mock import Mock
import pytest
from sanic.app import Sanic
from sanic.worker.constants import ProcessState, RestartOrder
from sanic.constants import RestartOrder
from sanic.worker.loader import AppLoader
from sanic.worker.process import WorkerProcess
from sanic.worker.process import ProcessState, WorkerProcess
from sanic.worker.reloader import Reloader
@@ -77,7 +76,7 @@ def test_iter_files():
"order,expected",
(
(
RestartOrder.SHUTDOWN_FIRST,
"shutdown_first",
[
"Restarting a process",
"Begin restart termination",
@@ -85,7 +84,7 @@ def test_iter_files():
],
),
(
RestartOrder.STARTUP_FIRST,
"startup_first",
[
"Restarting a process",
"Starting a process",
@@ -97,6 +96,7 @@ def test_iter_files():
),
)
def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
current_process = Mock()
worker_process = WorkerProcess(
lambda **_: current_process,
@@ -104,6 +104,7 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
lambda **_: ...,
{},
{},
RestartOrder[order.upper()],
)
def start(self):
@@ -114,7 +115,7 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
monkeypatch.setattr(threading.Thread, "start", start)
with caplog.at_level(DEBUG):
worker_process.restart(restart_order=order)
worker_process.restart()
ansi = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
@@ -129,32 +130,6 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
monkeypatch.setattr(threading.Thread, "start", orig)
def test_reload_delayed(monkeypatch):
WorkerProcess.THRESHOLD = 1
current_process = Mock()
worker_process = WorkerProcess(
lambda **_: current_process,
"Test",
lambda **_: ...,
{},
{},
)
def start(self):
sleep(0.2)
self._target()
orig = threading.Thread.start
monkeypatch.setattr(threading.Thread, "start", start)
message = "Worker Test failed to come ack within 0.1 seconds"
with pytest.raises(TimeoutError, match=message):
worker_process.restart(restart_order=RestartOrder.STARTUP_FIRST)
monkeypatch.setattr(threading.Thread, "start", orig)
def test_reloader_triggers_start_stop_listeners(
app: Sanic, app_loader: AppLoader
):

View File

@@ -1,25 +0,0 @@
from unittest.mock import patch
import pytest
from sanic import Sanic
@pytest.mark.parametrize(
"start_method,platform,expected",
(
(None, "linux", "spawn"),
(None, "other", "spawn"),
("fork", "linux", "fork"),
("fork", "other", "fork"),
("forkserver", "linux", "forkserver"),
("forkserver", "other", "forkserver"),
("spawn", "linux", "spawn"),
("spawn", "other", "spawn"),
),
)
def test_get_context(start_method, platform, expected):
if start_method:
Sanic.start_method = start_method
with patch("sys.platform", platform):
assert Sanic._get_startup_method() == expected

View File

@@ -8,7 +8,6 @@ import pytest
from sanic.app import Sanic
from sanic.worker.loader import AppLoader
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.process import Worker, WorkerProcess
from sanic.worker.serve import worker_serve
@@ -41,9 +40,7 @@ def test_config_app(mock_app: Mock):
def test_bad_process(mock_app: Mock, caplog):
environ["SANIC_WORKER_NAME"] = (
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL + "-FOO"
)
environ["SANIC_WORKER_NAME"] = "FOO"
message = "No restart publisher found in worker process"
with pytest.raises(RuntimeError, match=message):
@@ -61,9 +58,7 @@ def test_bad_process(mock_app: Mock, caplog):
def test_has_multiplexer(app: Sanic):
environ["SANIC_WORKER_NAME"] = (
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL + "-FOO"
)
environ["SANIC_WORKER_NAME"] = "FOO"
Sanic.register_app(app)
with patch("sanic.worker.serve._serve_http_1"):
@@ -102,13 +97,12 @@ def test_serve_app_factory(wm: Mock, mock_app):
@patch("sanic.mixins.startup.WorkerManager")
@patch("sanic.mixins.startup.Inspector")
@pytest.mark.parametrize("config", (True, False))
def test_serve_with_inspector(
WorkerManager: Mock, mock_app: Mock, config: bool
Inspector: Mock, WorkerManager: Mock, mock_app: Mock, config: bool
):
Inspector = Mock()
mock_app.config.INSPECTOR = config
mock_app.inspector_class = Inspector
inspector = Mock()
Inspector.return_value = inspector
WorkerManager.return_value = WorkerManager

View File

@@ -1,11 +1,11 @@
[tox]
envlist = py37, py38, py39, py310, py311, pyNightly, pypy37, {py37,py38,py39,py310,py311,pyNightly,pypy37}-no-ext, lint, check, security, docs, type-checking
envlist = py37, py38, py39, py310, pyNightly, pypy37, {py37,py38,py39,py310,pyNightly,pypy37}-no-ext, lint, check, security, docs, type-checking
[testenv]
usedevelop = true
setenv =
{py37,py38,py39,py310,py311,pyNightly}-no-ext: SANIC_NO_UJSON=1
{py37,py38,py39,py310,py311,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
{py37,py38,py39,py310,pyNightly}-no-ext: SANIC_NO_UJSON=1
{py37,py38,py39,py310,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
extras = test, http3
deps =
httpx==0.23