Compare commits
10 Commits
accept-enh
...
monitor-re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f83553be9e | ||
|
|
0c3527b8b2 | ||
|
|
232bbce1e0 | ||
|
|
f034a31d29 | ||
|
|
3536c0af27 | ||
|
|
0e7ee94574 | ||
|
|
4e4e2b036b | ||
|
|
7f682cea02 | ||
|
|
ae1669cd8f | ||
|
|
3c4c136090 |
@@ -9,7 +9,6 @@ omit =
|
||||
sanic/simple.py
|
||||
sanic/utils.py
|
||||
sanic/cli
|
||||
sanic/pages
|
||||
|
||||
[html]
|
||||
directory = coverage
|
||||
|
||||
1
.github/workflows/pr-bandit.yml
vendored
1
.github/workflows/pr-bandit.yml
vendored
@@ -20,7 +20,6 @@ jobs:
|
||||
- { python-version: 3.8, tox-env: security}
|
||||
- { python-version: 3.9, tox-env: security}
|
||||
- { python-version: "3.10", tox-env: security}
|
||||
- { python-version: "3.11", tox-env: security}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
2
.github/workflows/pr-docs.yml
vendored
2
.github/workflows/pr-docs.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
config:
|
||||
- {python-version: "3.10", tox-env: "docs"}
|
||||
- {python-version: "3.8", tox-env: "docs"}
|
||||
fail-fast: false
|
||||
|
||||
|
||||
|
||||
2
.github/workflows/pr-linter.yml
vendored
2
.github/workflows/pr-linter.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
config:
|
||||
- { python-version: "3.10", tox-env: lint}
|
||||
- { python-version: 3.8, tox-env: lint}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
47
.github/workflows/pr-python311.yml
vendored
47
.github/workflows/pr-python311.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: Python 3.11 Tests
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- "*LTS"
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
testPy311:
|
||||
if: github.event.pull_request.draft == false
|
||||
name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-latest]
|
||||
config:
|
||||
- {
|
||||
python-version: "3.11",
|
||||
tox-env: py311,
|
||||
ignore-error-flake: "false",
|
||||
command-timeout: "0",
|
||||
}
|
||||
- {
|
||||
python-version: "3.11",
|
||||
tox-env: py311-no-ext,
|
||||
ignore-error-flake: "true",
|
||||
command-timeout: "600000",
|
||||
}
|
||||
steps:
|
||||
- name: Checkout the Repository
|
||||
uses: actions/checkout@v2
|
||||
id: checkout-branch
|
||||
|
||||
- name: Run Unit Tests
|
||||
uses: harshanarayana/custom-actions@main
|
||||
with:
|
||||
python-version: ${{ matrix.config.python-version }}
|
||||
test-infra-tool: tox
|
||||
test-infra-version: latest
|
||||
action: tests
|
||||
test-additional-args: "-e=${{ matrix.config.tox-env }},-vv=''"
|
||||
experimental-ignore-error: "${{ matrix.config.ignore-error-flake }}"
|
||||
command-timeout: "${{ matrix.config.command-timeout }}"
|
||||
test-failure-retry: "3"
|
||||
1
.github/workflows/pr-type-check.yml
vendored
1
.github/workflows/pr-type-check.yml
vendored
@@ -20,7 +20,6 @@ jobs:
|
||||
- { python-version: 3.8, tox-env: type-checking}
|
||||
- { python-version: 3.9, tox-env: type-checking}
|
||||
- { python-version: "3.10", tox-env: type-checking}
|
||||
- { python-version: "3.11", tox-env: type-checking}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
1
.github/workflows/pr-windows.yml
vendored
1
.github/workflows/pr-windows.yml
vendored
@@ -19,7 +19,6 @@ jobs:
|
||||
- { python-version: 3.8, tox-env: py38-no-ext }
|
||||
- { python-version: 3.9, tox-env: py39-no-ext }
|
||||
- { python-version: "3.10", tox-env: py310-no-ext }
|
||||
- { python-version: "3.11", tox-env: py310-no-ext }
|
||||
- { python-version: pypy-3.7, tox-env: pypy37-no-ext }
|
||||
|
||||
steps:
|
||||
|
||||
2
.github/workflows/publish-images.yml
vendored
2
.github/workflows/publish-images.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
|
||||
2
.github/workflows/publish-package.yml
vendored
2
.github/workflows/publish-package.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-version: ["3.8"]
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
|
||||
@@ -313,8 +313,8 @@ Version 21.3.0
|
||||
`#2074 <https://github.com/sanic-org/sanic/pull/2074>`_
|
||||
Performance adjustments in ``handle_request_``
|
||||
|
||||
Version 20.12.3
|
||||
---------------
|
||||
Version 20.12.3 🔷
|
||||
------------------
|
||||
|
||||
`Current LTS version`
|
||||
|
||||
@@ -350,8 +350,8 @@ Version 19.12.5
|
||||
`#2027 <https://github.com/sanic-org/sanic/pull/2027>`_
|
||||
Remove old chardet requirement, add in hard multidict requirement
|
||||
|
||||
Version 20.12.0
|
||||
---------------
|
||||
Version 20.12.0 🔹
|
||||
-----------------
|
||||
|
||||
**Features**
|
||||
|
||||
|
||||
@@ -7,15 +7,14 @@ Sanic releases long term support release once a year in December. LTS releases r
|
||||
|
||||
| Version | LTS | Supported |
|
||||
| ------- | ------------- | ----------------------- |
|
||||
| 22.12 | until 2024-12 | :white_check_mark: |
|
||||
| 22.9 | | :x: |
|
||||
| 22.9 | | :white_check_mark: |
|
||||
| 22.6 | | :x: |
|
||||
| 22.3 | | :x: |
|
||||
| 21.12 | until 2023-12 | :ballot_box_with_check: |
|
||||
| 21.12 | until 2023-12 | :white_check_mark: |
|
||||
| 21.9 | | :x: |
|
||||
| 21.6 | | :x: |
|
||||
| 21.3 | | :x: |
|
||||
| 20.12 | | :x: |
|
||||
| 20.12 | until 2022-12 | :ballot_box_with_check: |
|
||||
| 20.9 | | :x: |
|
||||
| 20.6 | | :x: |
|
||||
| 20.3 | | :x: |
|
||||
|
||||
@@ -17,8 +17,7 @@ ignore:
|
||||
- "sanic/compat.py"
|
||||
- "sanic/simple.py"
|
||||
- "sanic/utils.py"
|
||||
- "sanic/cli/"
|
||||
- "sanic/pages/"
|
||||
- "sanic/cli"
|
||||
- ".github/"
|
||||
- "changelogs/"
|
||||
- "docker/"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
📜 Changelog
|
||||
============
|
||||
|
||||
.. mdinclude:: ./releases/22/22.12.md
|
||||
.. mdinclude:: ./releases/22/22.9.md
|
||||
.. mdinclude:: ./releases/22/22.6.md
|
||||
.. mdinclude:: ./releases/22/22.3.md
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
## Version 22.12.0 🔶
|
||||
|
||||
_Current version_
|
||||
|
||||
### Features
|
||||
|
||||
- [#2569](https://github.com/sanic-org/sanic/pull/2569) Add `JSONResponse` class with some convenient methods when updating a response object
|
||||
- [#2598](https://github.com/sanic-org/sanic/pull/2598) Change `uvloop` requirement to `>=0.15.0`
|
||||
- [#2609](https://github.com/sanic-org/sanic/pull/2609) Add compatibility with `websockets` v11.0
|
||||
- [#2610](https://github.com/sanic-org/sanic/pull/2610) Kill server early on worker error
|
||||
- Raise deadlock timeout to 30s
|
||||
- [#2617](https://github.com/sanic-org/sanic/pull/2617) Scale number of running server workers
|
||||
- [#2621](https://github.com/sanic-org/sanic/pull/2621) [#2634](https://github.com/sanic-org/sanic/pull/2634) Send `SIGKILL` on subsequent `ctrl+c` to force worker exit
|
||||
- [#2622](https://github.com/sanic-org/sanic/pull/2622) Add API to restart all workers from the multiplexer
|
||||
- [#2624](https://github.com/sanic-org/sanic/pull/2624) Default to `spawn` for all subprocesses unless specifically set:
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
Sanic.start_method = "fork"
|
||||
```
|
||||
- [#2625](https://github.com/sanic-org/sanic/pull/2625) Filename normalisation of form-data/multipart file uploads
|
||||
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector:
|
||||
- Remote access to inspect running Sanic instances
|
||||
- TLS support for encrypted calls to Inspector
|
||||
- Authentication to Inspector with API key
|
||||
- Ability to extend Inspector with custom commands
|
||||
- [#2632](https://github.com/sanic-org/sanic/pull/2632) Control order of restart operations
|
||||
- [#2633](https://github.com/sanic-org/sanic/pull/2633) Move reload interval to class variable
|
||||
- [#2636](https://github.com/sanic-org/sanic/pull/2636) Add `priority` to `register_middleware` method
|
||||
- [#2639](https://github.com/sanic-org/sanic/pull/2639) Add `unquote` to `add_route` method
|
||||
- [#2640](https://github.com/sanic-org/sanic/pull/2640) ASGI websockets to receive `text` or `bytes`
|
||||
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- [#2607](https://github.com/sanic-org/sanic/pull/2607) Force socket shutdown before close to allow rebinding
|
||||
- [#2590](https://github.com/sanic-org/sanic/pull/2590) Use actual `StrEnum` in Python 3.11+
|
||||
- [#2615](https://github.com/sanic-org/sanic/pull/2615) Ensure middleware executes only once per request timeout
|
||||
- [#2627](https://github.com/sanic-org/sanic/pull/2627) Crash ASGI application on lifespan failure
|
||||
- [#2635](https://github.com/sanic-org/sanic/pull/2635) Resolve error with low-level server creation on Windows
|
||||
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- [#2608](https://github.com/sanic-org/sanic/pull/2608) [#2630](https://github.com/sanic-org/sanic/pull/2630) Signal conditions and triggers saved on `signal.extra`
|
||||
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector
|
||||
- 🚨 *BREAKING CHANGE*: Moves the Inspector to a Sanic app from a simple TCP socket with a custom protocol
|
||||
- *DEPRECATE*: The `--inspect*` commands have been deprecated in favor of `inspect ...` commands
|
||||
- [#2628](https://github.com/sanic-org/sanic/pull/2628) Replace deprecated `distutils.strtobool`
|
||||
|
||||
|
||||
### Developer infrastructure
|
||||
|
||||
- [#2612](https://github.com/sanic-org/sanic/pull/2612) Add CI testing for Python 3.11
|
||||
|
||||
@@ -1,33 +1,6 @@
|
||||
## Version 22.9.1
|
||||
## Version 22.9.0 🔶
|
||||
|
||||
### Features
|
||||
|
||||
- [#2585](https://github.com/sanic-org/sanic/pull/2585) Improved error message when no applications have been registered
|
||||
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- [#2578](https://github.com/sanic-org/sanic/pull/2578) Add certificate loader for in process certificate creation
|
||||
- [#2591](https://github.com/sanic-org/sanic/pull/2591) Do not use sentinel identity for `spawn` compatibility
|
||||
- [#2592](https://github.com/sanic-org/sanic/pull/2592) Fix properties in nested blueprint groups
|
||||
- [#2595](https://github.com/sanic-org/sanic/pull/2595) Introduce sleep interval on new worker reloader
|
||||
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
|
||||
### Developer infrastructure
|
||||
|
||||
- [#2588](https://github.com/sanic-org/sanic/pull/2588) Markdown templates on issue forms
|
||||
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- [#2556](https://github.com/sanic-org/sanic/pull/2556) v22.9 documentation
|
||||
- [#2582](https://github.com/sanic-org/sanic/pull/2582) Cleanup documentation on Windows support
|
||||
|
||||
|
||||
## Version 22.9.0
|
||||
_Current version_
|
||||
|
||||
### Features
|
||||
|
||||
|
||||
@@ -22,7 +22,5 @@ module = [
|
||||
"httptools.*",
|
||||
"trustme.*",
|
||||
"sanic_routing.*",
|
||||
"aioquic.*",
|
||||
"html5tagger.*",
|
||||
]
|
||||
ignore_missing_imports = true
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "22.12.0"
|
||||
__version__ = "22.12.0a0"
|
||||
|
||||
77
sanic/app.py
77
sanic/app.py
@@ -61,7 +61,7 @@ from sanic.exceptions import (
|
||||
URLBuildError,
|
||||
)
|
||||
from sanic.handlers import ErrorHandler
|
||||
from sanic.helpers import Default, _default
|
||||
from sanic.helpers import Default
|
||||
from sanic.http import Stage
|
||||
from sanic.log import (
|
||||
LOGGING_CONFIG_DEFAULTS,
|
||||
@@ -69,10 +69,8 @@ from sanic.log import (
|
||||
error_logger,
|
||||
logger,
|
||||
)
|
||||
from sanic.middleware import Middleware, MiddlewareLocation
|
||||
from sanic.mixins.listeners import ListenerEvent
|
||||
from sanic.mixins.startup import StartupMixin
|
||||
from sanic.mixins.static import StaticHandleMixin
|
||||
from sanic.models.futures import (
|
||||
FutureException,
|
||||
FutureListener,
|
||||
@@ -80,6 +78,7 @@ from sanic.models.futures import (
|
||||
FutureRegistry,
|
||||
FutureRoute,
|
||||
FutureSignal,
|
||||
FutureStatic,
|
||||
)
|
||||
from sanic.models.handler_types import ListenerType, MiddlewareType
|
||||
from sanic.models.handler_types import Sanic as SanicVar
|
||||
@@ -106,7 +105,7 @@ if OS_IS_WINDOWS: # no cov
|
||||
enable_windows_color_support()
|
||||
|
||||
|
||||
class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
"""
|
||||
The main application instance
|
||||
"""
|
||||
@@ -141,7 +140,6 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
"configure_logging",
|
||||
"ctx",
|
||||
"error_handler",
|
||||
"inspector_class",
|
||||
"go_fast",
|
||||
"listeners",
|
||||
"multiplexer",
|
||||
@@ -164,7 +162,7 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
name: str = None,
|
||||
config: Optional[Config] = None,
|
||||
ctx: Optional[Any] = None,
|
||||
router: Optional[Router] = None,
|
||||
@@ -178,7 +176,6 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
dumps: Optional[Callable[..., AnyStr]] = None,
|
||||
loads: Optional[Callable[..., Any]] = None,
|
||||
inspector: bool = False,
|
||||
inspector_class: Optional[Type[Inspector]] = None,
|
||||
) -> None:
|
||||
super().__init__(name=name)
|
||||
# logging
|
||||
@@ -214,7 +211,6 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
self.configure_logging: bool = configure_logging
|
||||
self.ctx: Any = ctx or SimpleNamespace()
|
||||
self.error_handler: ErrorHandler = error_handler or ErrorHandler()
|
||||
self.inspector_class: Type[Inspector] = inspector_class or Inspector
|
||||
self.listeners: Dict[str, List[ListenerType[Any]]] = defaultdict(list)
|
||||
self.named_request_middleware: Dict[str, Deque[MiddlewareType]] = {}
|
||||
self.named_response_middleware: Dict[str, Deque[MiddlewareType]] = {}
|
||||
@@ -295,12 +291,8 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
return listener
|
||||
|
||||
def register_middleware(
|
||||
self,
|
||||
middleware: Union[MiddlewareType, Middleware],
|
||||
attach_to: str = "request",
|
||||
*,
|
||||
priority: Union[Default, int] = _default,
|
||||
) -> Union[MiddlewareType, Middleware]:
|
||||
self, middleware: MiddlewareType, attach_to: str = "request"
|
||||
) -> MiddlewareType:
|
||||
"""
|
||||
Register an application level middleware that will be attached
|
||||
to all the API URLs registered under this application.
|
||||
@@ -316,37 +308,19 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
**response** - Invoke before the response is returned back
|
||||
:return: decorated method
|
||||
"""
|
||||
retval = middleware
|
||||
location = MiddlewareLocation[attach_to.upper()]
|
||||
|
||||
if not isinstance(middleware, Middleware):
|
||||
middleware = Middleware(
|
||||
middleware,
|
||||
location=location,
|
||||
priority=priority if isinstance(priority, int) else 0,
|
||||
)
|
||||
elif middleware.priority != priority and isinstance(priority, int):
|
||||
middleware = Middleware(
|
||||
middleware.func,
|
||||
location=middleware.location,
|
||||
priority=priority,
|
||||
)
|
||||
|
||||
if location is MiddlewareLocation.REQUEST:
|
||||
if attach_to == "request":
|
||||
if middleware not in self.request_middleware:
|
||||
self.request_middleware.append(middleware)
|
||||
if location is MiddlewareLocation.RESPONSE:
|
||||
if attach_to == "response":
|
||||
if middleware not in self.response_middleware:
|
||||
self.response_middleware.appendleft(middleware)
|
||||
return retval
|
||||
return middleware
|
||||
|
||||
def register_named_middleware(
|
||||
self,
|
||||
middleware: MiddlewareType,
|
||||
route_names: Iterable[str],
|
||||
attach_to: str = "request",
|
||||
*,
|
||||
priority: Union[Default, int] = _default,
|
||||
):
|
||||
"""
|
||||
Method for attaching middleware to specific routes. This is mainly an
|
||||
@@ -360,35 +334,19 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
defaults to "request"
|
||||
:type attach_to: str, optional
|
||||
"""
|
||||
retval = middleware
|
||||
location = MiddlewareLocation[attach_to.upper()]
|
||||
|
||||
if not isinstance(middleware, Middleware):
|
||||
middleware = Middleware(
|
||||
middleware,
|
||||
location=location,
|
||||
priority=priority if isinstance(priority, int) else 0,
|
||||
)
|
||||
elif middleware.priority != priority and isinstance(priority, int):
|
||||
middleware = Middleware(
|
||||
middleware.func,
|
||||
location=middleware.location,
|
||||
priority=priority,
|
||||
)
|
||||
|
||||
if location is MiddlewareLocation.REQUEST:
|
||||
if attach_to == "request":
|
||||
for _rn in route_names:
|
||||
if _rn not in self.named_request_middleware:
|
||||
self.named_request_middleware[_rn] = deque()
|
||||
if middleware not in self.named_request_middleware[_rn]:
|
||||
self.named_request_middleware[_rn].append(middleware)
|
||||
if location is MiddlewareLocation.RESPONSE:
|
||||
if attach_to == "response":
|
||||
for _rn in route_names:
|
||||
if _rn not in self.named_response_middleware:
|
||||
self.named_response_middleware[_rn] = deque()
|
||||
if middleware not in self.named_response_middleware[_rn]:
|
||||
self.named_response_middleware[_rn].appendleft(middleware)
|
||||
return retval
|
||||
return middleware
|
||||
|
||||
def _apply_exception_handler(
|
||||
self,
|
||||
@@ -441,6 +399,9 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
|
||||
return routes
|
||||
|
||||
def _apply_static(self, static: FutureStatic) -> Route:
|
||||
return self._register_static(static)
|
||||
|
||||
def _apply_middleware(
|
||||
self,
|
||||
middleware: FutureMiddleware,
|
||||
@@ -887,11 +848,11 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
Union[
|
||||
BaseHTTPResponse,
|
||||
Coroutine[Any, Any, Optional[BaseHTTPResponse]],
|
||||
ResponseStream,
|
||||
]
|
||||
] = None
|
||||
run_middleware = True
|
||||
try:
|
||||
|
||||
await self.dispatch(
|
||||
"http.routing.before",
|
||||
inline=True,
|
||||
@@ -923,6 +884,7 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
and request.stream.request_body
|
||||
and not route.extra.ignore_body
|
||||
):
|
||||
|
||||
if hasattr(handler, "is_stream"):
|
||||
# Streaming handler: lift the size limit
|
||||
request.stream.request_max_size = float("inf")
|
||||
@@ -996,7 +958,7 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
...
|
||||
await response.send(end_stream=True)
|
||||
elif isinstance(response, ResponseStream):
|
||||
resp = await response(request)
|
||||
resp = await response(request) # type: ignore
|
||||
await self.dispatch(
|
||||
"http.lifecycle.response",
|
||||
inline=True,
|
||||
@@ -1005,7 +967,7 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
"response": resp,
|
||||
},
|
||||
)
|
||||
await response.eof()
|
||||
await response.eof() # type: ignore
|
||||
else:
|
||||
if not hasattr(handler, "is_websocket"):
|
||||
raise ServerError(
|
||||
@@ -1569,7 +1531,6 @@ class Sanic(StaticHandleMixin, BaseSanic, StartupMixin, metaclass=TouchUpMeta):
|
||||
|
||||
self.state.is_started = True
|
||||
|
||||
def ack(self):
|
||||
if hasattr(self, "multiplexer"):
|
||||
self.multiplexer.ack()
|
||||
|
||||
|
||||
@@ -40,8 +40,6 @@ FULL_COLOR_LOGO = """
|
||||
|
||||
""" # noqa
|
||||
|
||||
SVG_LOGO = """<svg id=logo alt=Sanic viewBox="0 0 964 279"><path d="M107 222c9-2 10-20 1-22s-20-2-30-2-17 7-16 14 6 10 15 10h30zm115-1c16-2 30-11 35-23s6-24 2-33-6-14-15-20-24-11-38-10c-7 3-10 13-5 19s17-1 24 4 15 14 13 24-5 15-14 18-50 0-74 0h-17c-6 4-10 15-4 20s16 2 23 3zM251 83q9-1 9-7 0-15-10-16h-13c-10 6-10 20 0 22zM147 60c-4 0-10 3-11 11s5 13 10 12 42 0 67 0c5-3 7-10 6-15s-4-8-9-8zm-33 1c-8 0-16 0-24 3s-20 10-25 20-6 24-4 36 15 22 26 27 78 8 94 3c4-4 4-12 0-18s-69 8-93-10c-8-7-9-23 0-30s12-10 20-10 12 2 16-3 1-15-5-18z" fill="#ff0d68"/><path d="M676 74c0-14-18-9-20 0s0 30 0 39 20 9 20 2zm-297-10c-12 2-15 12-23 23l-41 58H340l22-30c8-12 23-13 30-4s20 24 24 38-10 10-17 10l-68 2q-17 1-48 30c-7 6-10 20 0 24s15-8 20-13 20 -20 58-21h50 c20 2 33 9 52 30 8 10 24-4 16-13L384 65q-3-2-5-1zm131 0c-10 1-12 12-11 20v96c1 10-3 23 5 32s20-5 17-15c0-23-3-46 2-67 5-12 22-14 32-5l103 87c7 5 19 1 18-9v-64c-3-10-20-9-21 2s-20 22-30 13l-97-80c-5-4-10-10-18-10zM701 76v128c2 10 15 12 20 4s0-102 0-124s-20-18-20-7z M850 63c-35 0-69-2-86 15s-20 60-13 66 13 8 16 0 1-10 1-27 12-26 20-32 66-5 85-5 31 4 31-10-18-7-54-7M764 159c-6-2-15-2-16 12s19 37 33 43 23 8 25-4-4-11-11-14q-9-3-22-18c-4-7-3-16-10-19zM828 196c-4 0-8 1-10 5s-4 12 0 15 8 2 12 2h60c5 0 10-2 12-6 3-7-1-16-8-16z" fill="#e1e1e1"/></svg>""" # noqa
|
||||
|
||||
ansi_pattern = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from sanic.compat import Header
|
||||
from sanic.exceptions import ServerError
|
||||
from sanic.helpers import Default
|
||||
from sanic.http import Stage
|
||||
from sanic.log import error_logger, logger
|
||||
from sanic.log import logger
|
||||
from sanic.models.asgi import ASGIReceive, ASGIScope, ASGISend, MockTransport
|
||||
from sanic.request import Request
|
||||
from sanic.response import BaseHTTPResponse
|
||||
@@ -85,27 +85,13 @@ class Lifespan:
|
||||
) -> None:
|
||||
message = await receive()
|
||||
if message["type"] == "lifespan.startup":
|
||||
try:
|
||||
await self.startup()
|
||||
except Exception as e:
|
||||
error_logger.exception(e)
|
||||
await send(
|
||||
{"type": "lifespan.startup.failed", "message": str(e)}
|
||||
)
|
||||
else:
|
||||
await send({"type": "lifespan.startup.complete"})
|
||||
await self.startup()
|
||||
await send({"type": "lifespan.startup.complete"})
|
||||
|
||||
message = await receive()
|
||||
if message["type"] == "lifespan.shutdown":
|
||||
try:
|
||||
await self.shutdown()
|
||||
except Exception as e:
|
||||
error_logger.exception(e)
|
||||
await send(
|
||||
{"type": "lifespan.shutdown.failed", "message": str(e)}
|
||||
)
|
||||
else:
|
||||
await send({"type": "lifespan.shutdown.complete"})
|
||||
await self.shutdown()
|
||||
await send({"type": "lifespan.shutdown.complete"})
|
||||
|
||||
|
||||
class ASGIApp:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import re
|
||||
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from sanic.base.meta import SanicMeta
|
||||
from sanic.exceptions import SanicException
|
||||
@@ -9,7 +9,6 @@ from sanic.mixins.listeners import ListenerMixin
|
||||
from sanic.mixins.middleware import MiddlewareMixin
|
||||
from sanic.mixins.routes import RouteMixin
|
||||
from sanic.mixins.signals import SignalMixin
|
||||
from sanic.mixins.static import StaticMixin
|
||||
|
||||
|
||||
VALID_NAME = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_\-]*$")
|
||||
@@ -17,7 +16,6 @@ VALID_NAME = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_\-]*$")
|
||||
|
||||
class BaseSanic(
|
||||
RouteMixin,
|
||||
StaticMixin,
|
||||
MiddlewareMixin,
|
||||
ListenerMixin,
|
||||
ExceptionMixin,
|
||||
@@ -26,9 +24,7 @@ class BaseSanic(
|
||||
):
|
||||
__slots__ = ("name",)
|
||||
|
||||
def __init__(
|
||||
self, name: Optional[str] = None, *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
def __init__(self, name: str = None, *args: Any, **kwargs: Any) -> None:
|
||||
class_name = self.__class__.__name__
|
||||
|
||||
if name is None:
|
||||
|
||||
@@ -304,6 +304,9 @@ class Blueprint(BaseSanic):
|
||||
|
||||
# Routes
|
||||
for future in self._future_routes:
|
||||
# attach the blueprint name to the handler so that it can be
|
||||
# prefixed properly in the router
|
||||
future.handler.__blueprintname__ = self.name
|
||||
# Prepend the blueprint URI prefix if available
|
||||
uri = self._setup_uri(future.uri, url_prefix)
|
||||
|
||||
@@ -439,7 +442,7 @@ class Blueprint(BaseSanic):
|
||||
events.add(signal.ctx.event)
|
||||
|
||||
return asyncio.wait(
|
||||
[asyncio.create_task(event.wait()) for event in events],
|
||||
[event.wait() for event in events],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
116
sanic/cli/app.py
116
sanic/cli/app.py
@@ -3,21 +3,23 @@ import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from argparse import Namespace
|
||||
from argparse import ArgumentParser, RawTextHelpFormatter
|
||||
from functools import partial
|
||||
from textwrap import indent
|
||||
from typing import List, Union, cast
|
||||
from typing import Any, List, Union
|
||||
|
||||
from sanic.app import Sanic
|
||||
from sanic.application.logo import get_logo
|
||||
from sanic.cli.arguments import Group
|
||||
from sanic.cli.base import SanicArgumentParser, SanicHelpFormatter
|
||||
from sanic.cli.inspector import make_inspector_parser
|
||||
from sanic.cli.inspector_client import InspectorClient
|
||||
from sanic.log import Colors, error_logger
|
||||
from sanic.log import error_logger
|
||||
from sanic.worker.inspector import inspect
|
||||
from sanic.worker.loader import AppLoader
|
||||
|
||||
|
||||
class SanicArgumentParser(ArgumentParser):
|
||||
...
|
||||
|
||||
|
||||
class SanicCLI:
|
||||
DESCRIPTION = indent(
|
||||
f"""
|
||||
@@ -44,7 +46,7 @@ Or, a path to a directory to run as a simple HTTP server:
|
||||
self.parser = SanicArgumentParser(
|
||||
prog="sanic",
|
||||
description=self.DESCRIPTION,
|
||||
formatter_class=lambda prog: SanicHelpFormatter(
|
||||
formatter_class=lambda prog: RawTextHelpFormatter(
|
||||
prog,
|
||||
max_help_position=36 if width > 96 else 24,
|
||||
indent_increment=4,
|
||||
@@ -56,27 +58,16 @@ Or, a path to a directory to run as a simple HTTP server:
|
||||
self.main_process = (
|
||||
os.environ.get("SANIC_RELOADER_PROCESS", "") != "true"
|
||||
)
|
||||
self.args: Namespace = Namespace()
|
||||
self.args: List[Any] = []
|
||||
self.groups: List[Group] = []
|
||||
self.inspecting = False
|
||||
|
||||
def attach(self):
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "inspect":
|
||||
self.inspecting = True
|
||||
self.parser.description = get_logo(True)
|
||||
make_inspector_parser(self.parser)
|
||||
return
|
||||
|
||||
for group in Group._registry:
|
||||
instance = group.create(self.parser)
|
||||
instance.attach()
|
||||
self.groups.append(instance)
|
||||
|
||||
def run(self, parse_args=None):
|
||||
if self.inspecting:
|
||||
self._inspector()
|
||||
return
|
||||
|
||||
legacy_version = False
|
||||
if not parse_args:
|
||||
# This is to provide backwards compat -v to display version
|
||||
@@ -95,21 +86,36 @@ Or, a path to a directory to run as a simple HTTP server:
|
||||
self.args = self.parser.parse_args(args=parse_args)
|
||||
self._precheck()
|
||||
app_loader = AppLoader(
|
||||
self.args.module, self.args.factory, self.args.simple, self.args
|
||||
self.args.module,
|
||||
self.args.factory,
|
||||
self.args.simple,
|
||||
self.args,
|
||||
)
|
||||
|
||||
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
|
||||
self._inspector_legacy(app_loader)
|
||||
return
|
||||
|
||||
try:
|
||||
app = self._get_app(app_loader)
|
||||
kwargs = self._build_run_kwargs()
|
||||
except ValueError as e:
|
||||
error_logger.exception(f"Failed to run app: {e}")
|
||||
else:
|
||||
for http_version in self.args.http:
|
||||
app.prepare(**kwargs, version=http_version)
|
||||
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
|
||||
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
|
||||
else:
|
||||
for http_version in self.args.http:
|
||||
app.prepare(**kwargs, version=http_version)
|
||||
|
||||
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
|
||||
action = self.args.trigger or (
|
||||
"raw" if self.args.inspect_raw else "pretty"
|
||||
)
|
||||
inspect(
|
||||
app.config.INSPECTOR_HOST,
|
||||
app.config.INSPECTOR_PORT,
|
||||
action,
|
||||
)
|
||||
del os.environ["SANIC_IGNORE_PRODUCTION_WARNING"]
|
||||
return
|
||||
|
||||
if self.args.single:
|
||||
serve = Sanic.serve_single
|
||||
elif self.args.legacy:
|
||||
@@ -118,64 +124,6 @@ Or, a path to a directory to run as a simple HTTP server:
|
||||
serve = partial(Sanic.serve, app_loader=app_loader)
|
||||
serve(app)
|
||||
|
||||
def _inspector_legacy(self, app_loader: AppLoader):
|
||||
host = port = None
|
||||
module = cast(str, self.args.module)
|
||||
if ":" in module:
|
||||
maybe_host, maybe_port = module.rsplit(":", 1)
|
||||
if maybe_port.isnumeric():
|
||||
host, port = maybe_host, int(maybe_port)
|
||||
if not host:
|
||||
app = self._get_app(app_loader)
|
||||
host, port = app.config.INSPECTOR_HOST, app.config.INSPECTOR_PORT
|
||||
|
||||
action = self.args.trigger or "info"
|
||||
|
||||
InspectorClient(
|
||||
str(host), int(port or 6457), False, self.args.inspect_raw, ""
|
||||
).do(action)
|
||||
sys.stdout.write(
|
||||
f"\n{Colors.BOLD}{Colors.YELLOW}WARNING:{Colors.END} "
|
||||
"You are using the legacy CLI command that will be removed in "
|
||||
f"{Colors.RED}v23.3{Colors.END}. See "
|
||||
"https://sanic.dev/en/guide/release-notes/v22.12.html"
|
||||
"#deprecations-and-removals or checkout the new "
|
||||
"style commands:\n\n\t"
|
||||
f"{Colors.YELLOW}sanic inspect --help{Colors.END}\n"
|
||||
)
|
||||
|
||||
def _inspector(self):
|
||||
args = sys.argv[2:]
|
||||
self.args, unknown = self.parser.parse_known_args(args=args)
|
||||
if unknown:
|
||||
for arg in unknown:
|
||||
if arg.startswith("--"):
|
||||
try:
|
||||
key, value = arg.split("=")
|
||||
key = key.lstrip("-")
|
||||
except ValueError:
|
||||
value = False if arg.startswith("--no-") else True
|
||||
key = (
|
||||
arg.replace("--no-", "")
|
||||
.lstrip("-")
|
||||
.replace("-", "_")
|
||||
)
|
||||
setattr(self.args, key, value)
|
||||
|
||||
kwargs = {**self.args.__dict__}
|
||||
host = kwargs.pop("host")
|
||||
port = kwargs.pop("port")
|
||||
secure = kwargs.pop("secure")
|
||||
raw = kwargs.pop("raw")
|
||||
action = kwargs.pop("action") or "info"
|
||||
api_key = kwargs.pop("api_key")
|
||||
positional = kwargs.pop("positional", None)
|
||||
if action == "<custom>" and positional:
|
||||
action = positional[0]
|
||||
if len(positional) > 1:
|
||||
kwargs["args"] = positional[1:]
|
||||
InspectorClient(host, port, secure, raw, api_key).do(action, **kwargs)
|
||||
|
||||
def _precheck(self):
|
||||
# Custom TLS mismatch handling for better diagnostics
|
||||
if self.main_process and (
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
from argparse import (
|
||||
SUPPRESS,
|
||||
Action,
|
||||
ArgumentParser,
|
||||
RawTextHelpFormatter,
|
||||
_SubParsersAction,
|
||||
)
|
||||
from typing import Any
|
||||
|
||||
|
||||
class SanicArgumentParser(ArgumentParser):
|
||||
def _check_value(self, action: Action, value: Any) -> None:
|
||||
if isinstance(action, SanicSubParsersAction):
|
||||
return
|
||||
super()._check_value(action, value)
|
||||
|
||||
|
||||
class SanicHelpFormatter(RawTextHelpFormatter):
|
||||
def add_usage(self, usage, actions, groups, prefix=None):
|
||||
if not usage:
|
||||
usage = SUPPRESS
|
||||
# Add one linebreak, but not two
|
||||
self.add_text("\x1b[1A")
|
||||
super().add_usage(usage, actions, groups, prefix)
|
||||
|
||||
|
||||
class SanicSubParsersAction(_SubParsersAction):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
self._name_parser_map
|
||||
parser_name = values[0]
|
||||
if parser_name not in self._name_parser_map:
|
||||
self._name_parser_map[parser_name] = parser
|
||||
values = ["<custom>", *values]
|
||||
|
||||
super().__call__(parser, namespace, values, option_string)
|
||||
@@ -1,105 +0,0 @@
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from sanic.application.logo import get_logo
|
||||
from sanic.cli.base import SanicHelpFormatter, SanicSubParsersAction
|
||||
|
||||
|
||||
def _add_shared(parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
"-H",
|
||||
default="localhost",
|
||||
help="Inspector host address [default 127.0.0.1]",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
"-p",
|
||||
default=6457,
|
||||
type=int,
|
||||
help="Inspector port [default 6457]",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--secure",
|
||||
"-s",
|
||||
action="store_true",
|
||||
help="Whether to access the Inspector via TLS encryption",
|
||||
)
|
||||
parser.add_argument("--api-key", "-k", help="Inspector authentication key")
|
||||
parser.add_argument(
|
||||
"--raw",
|
||||
action="store_true",
|
||||
help="Whether to output the raw response information",
|
||||
)
|
||||
|
||||
|
||||
class InspectorSubParser(ArgumentParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
_add_shared(self)
|
||||
if not self.description:
|
||||
self.description = ""
|
||||
self.description = get_logo(True) + self.description
|
||||
|
||||
|
||||
def make_inspector_parser(parser: ArgumentParser) -> None:
|
||||
_add_shared(parser)
|
||||
subparsers = parser.add_subparsers(
|
||||
action=SanicSubParsersAction,
|
||||
dest="action",
|
||||
description=(
|
||||
"Run one or none of the below subcommands. Using inspect without "
|
||||
"a subcommand will fetch general information about the state "
|
||||
"of the application instance.\n\n"
|
||||
"Or, you can optionally follow inspect with a subcommand. "
|
||||
"If you have created a custom "
|
||||
"Inspector instance, then you can run custom commands. See "
|
||||
"https://sanic.dev/en/guide/deployment/inspector.html "
|
||||
"for more details."
|
||||
),
|
||||
title=" Subcommands",
|
||||
parser_class=InspectorSubParser,
|
||||
)
|
||||
reloader = subparsers.add_parser(
|
||||
"reload",
|
||||
help="Trigger a reload of the server workers",
|
||||
formatter_class=SanicHelpFormatter,
|
||||
)
|
||||
reloader.add_argument(
|
||||
"--zero-downtime",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Whether to wait for the new process to be online before "
|
||||
"terminating the old"
|
||||
),
|
||||
)
|
||||
subparsers.add_parser(
|
||||
"shutdown",
|
||||
help="Shutdown the application and all processes",
|
||||
formatter_class=SanicHelpFormatter,
|
||||
)
|
||||
scale = subparsers.add_parser(
|
||||
"scale",
|
||||
help="Scale the number of workers",
|
||||
formatter_class=SanicHelpFormatter,
|
||||
)
|
||||
scale.add_argument(
|
||||
"replicas",
|
||||
type=int,
|
||||
help="Number of workers requested",
|
||||
)
|
||||
|
||||
custom = subparsers.add_parser(
|
||||
"<custom>",
|
||||
help="Run a custom command",
|
||||
description=(
|
||||
"keyword arguments:\n When running a custom command, you can "
|
||||
"add keyword arguments by appending them to your command\n\n"
|
||||
"\tsanic inspect foo --one=1 --two=2"
|
||||
),
|
||||
formatter_class=SanicHelpFormatter,
|
||||
)
|
||||
custom.add_argument(
|
||||
"positional",
|
||||
nargs="*",
|
||||
help="Add one or more non-keyword args to your custom command",
|
||||
)
|
||||
@@ -1,119 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from http.client import RemoteDisconnected
|
||||
from textwrap import indent
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.error import URLError
|
||||
from urllib.request import Request as URequest
|
||||
from urllib.request import urlopen
|
||||
|
||||
from sanic.application.logo import get_logo
|
||||
from sanic.application.motd import MOTDTTY
|
||||
from sanic.log import Colors
|
||||
|
||||
|
||||
try: # no cov
|
||||
from ujson import dumps, loads
|
||||
except ModuleNotFoundError: # no cov
|
||||
from json import dumps, loads # type: ignore
|
||||
|
||||
|
||||
class InspectorClient:
|
||||
def __init__(
|
||||
self,
|
||||
host: str,
|
||||
port: int,
|
||||
secure: bool,
|
||||
raw: bool,
|
||||
api_key: Optional[str],
|
||||
) -> None:
|
||||
self.scheme = "https" if secure else "http"
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.raw = raw
|
||||
self.api_key = api_key
|
||||
|
||||
for scheme in ("http", "https"):
|
||||
full = f"{scheme}://"
|
||||
if self.host.startswith(full):
|
||||
self.scheme = scheme
|
||||
self.host = self.host[len(full) :] # noqa E203
|
||||
|
||||
def do(self, action: str, **kwargs: Any) -> None:
|
||||
if action == "info":
|
||||
self.info()
|
||||
return
|
||||
result = self.request(action, **kwargs).get("result")
|
||||
if result:
|
||||
out = (
|
||||
dumps(result)
|
||||
if isinstance(result, (list, dict))
|
||||
else str(result)
|
||||
)
|
||||
sys.stdout.write(out + "\n")
|
||||
|
||||
def info(self) -> None:
|
||||
out = sys.stdout.write
|
||||
response = self.request("", "GET")
|
||||
if self.raw or not response:
|
||||
return
|
||||
data = response["result"]
|
||||
display = data.pop("info")
|
||||
extra = display.pop("extra", {})
|
||||
display["packages"] = ", ".join(display["packages"])
|
||||
MOTDTTY(get_logo(), self.base_url, display, extra).display(
|
||||
version=False,
|
||||
action="Inspecting",
|
||||
out=out,
|
||||
)
|
||||
for name, info in data["workers"].items():
|
||||
info = "\n".join(
|
||||
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
|
||||
for key, value in info.items()
|
||||
)
|
||||
out(
|
||||
"\n"
|
||||
+ indent(
|
||||
"\n".join(
|
||||
[
|
||||
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
|
||||
info,
|
||||
]
|
||||
),
|
||||
" ",
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
def request(self, action: str, method: str = "POST", **kwargs: Any) -> Any:
|
||||
url = f"{self.base_url}/{action}"
|
||||
params: Dict[str, Any] = {"method": method, "headers": {}}
|
||||
if kwargs:
|
||||
params["data"] = dumps(kwargs).encode()
|
||||
params["headers"]["content-type"] = "application/json"
|
||||
if self.api_key:
|
||||
params["headers"]["authorization"] = f"Bearer {self.api_key}"
|
||||
request = URequest(url, **params)
|
||||
|
||||
try:
|
||||
with urlopen(request) as response: # nosec B310
|
||||
raw = response.read()
|
||||
loaded = loads(raw)
|
||||
if self.raw:
|
||||
sys.stdout.write(dumps(loaded.get("result")) + "\n")
|
||||
return {}
|
||||
return loaded
|
||||
except (URLError, RemoteDisconnected) as e:
|
||||
sys.stderr.write(
|
||||
f"{Colors.RED}Could not connect to inspector at: "
|
||||
f"{Colors.YELLOW}{self.base_url}{Colors.END}\n"
|
||||
"Either the application is not running, or it did not start "
|
||||
f"an inspector instance.\n{e}\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
return f"{self.scheme}://{self.host}:{self.port}"
|
||||
@@ -3,23 +3,11 @@ import os
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from enum import Enum
|
||||
from typing import Awaitable, Union
|
||||
from typing import Awaitable
|
||||
|
||||
from multidict import CIMultiDict # type: ignore
|
||||
|
||||
from sanic.helpers import Default
|
||||
|
||||
|
||||
if sys.version_info < (3, 8): # no cov
|
||||
StartMethod = Union[Default, str]
|
||||
else: # no cov
|
||||
from typing import Literal
|
||||
|
||||
StartMethod = Union[
|
||||
Default, Literal["fork"], Literal["forkserver"], Literal["spawn"]
|
||||
]
|
||||
|
||||
OS_IS_WINDOWS = os.name == "nt"
|
||||
UVLOOP_INSTALLED = False
|
||||
@@ -31,6 +19,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
# Python 3.11 changed the way Enum formatting works for mixed-in types.
|
||||
if sys.version_info < (3, 11, 0):
|
||||
|
||||
@@ -56,16 +45,6 @@ class UpperStrEnum(StrEnum):
|
||||
return self.value
|
||||
|
||||
|
||||
@contextmanager
|
||||
def use_context(method: StartMethod):
|
||||
from sanic import Sanic
|
||||
|
||||
orig = Sanic.start_method
|
||||
Sanic.start_method = method
|
||||
yield
|
||||
Sanic.start_method = orig
|
||||
|
||||
|
||||
def enable_windows_color_support():
|
||||
import ctypes
|
||||
|
||||
|
||||
@@ -2,14 +2,13 @@ from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from abc import ABCMeta
|
||||
from inspect import getmembers, isclass, isdatadescriptor
|
||||
from os import environ
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Optional, Sequence, Union
|
||||
from warnings import filterwarnings
|
||||
|
||||
from sanic.constants import LocalCertCreator
|
||||
from sanic.constants import LocalCertCreator, RestartOrder
|
||||
from sanic.errorpages import DEFAULT_FORMAT, check_error_format
|
||||
from sanic.helpers import Default, _default
|
||||
from sanic.http import Http
|
||||
@@ -47,9 +46,6 @@ DEFAULT_CONFIG = {
|
||||
"INSPECTOR": False,
|
||||
"INSPECTOR_HOST": "localhost",
|
||||
"INSPECTOR_PORT": 6457,
|
||||
"INSPECTOR_TLS_KEY": _default,
|
||||
"INSPECTOR_TLS_CERT": _default,
|
||||
"INSPECTOR_API_KEY": "",
|
||||
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
|
||||
"KEEP_ALIVE": True,
|
||||
"LOCAL_CERT_CREATOR": LocalCertCreator.AUTO,
|
||||
@@ -67,6 +63,7 @@ DEFAULT_CONFIG = {
|
||||
"REQUEST_MAX_SIZE": 100000000, # 100 megabytes
|
||||
"REQUEST_TIMEOUT": 60, # 60 seconds
|
||||
"RESPONSE_TIMEOUT": 60, # 60 seconds
|
||||
"RESTART_ORDER": RestartOrder.SHUTDOWN_FIRST,
|
||||
"TLS_CERT_PASSWORD": "",
|
||||
"TOUCHUP": _default,
|
||||
"USE_UVLOOP": _default,
|
||||
@@ -76,7 +73,7 @@ DEFAULT_CONFIG = {
|
||||
}
|
||||
|
||||
|
||||
class DescriptorMeta(ABCMeta):
|
||||
class DescriptorMeta(type):
|
||||
def __init__(cls, *_):
|
||||
cls.__setters__ = {name for name, _ in getmembers(cls, cls._is_setter)}
|
||||
|
||||
@@ -97,9 +94,6 @@ class Config(dict, metaclass=DescriptorMeta):
|
||||
INSPECTOR: bool
|
||||
INSPECTOR_HOST: str
|
||||
INSPECTOR_PORT: int
|
||||
INSPECTOR_TLS_KEY: Union[Path, str, Default]
|
||||
INSPECTOR_TLS_CERT: Union[Path, str, Default]
|
||||
INSPECTOR_API_KEY: str
|
||||
KEEP_ALIVE_TIMEOUT: int
|
||||
KEEP_ALIVE: bool
|
||||
LOCAL_CERT_CREATOR: Union[str, LocalCertCreator]
|
||||
@@ -117,6 +111,7 @@ class Config(dict, metaclass=DescriptorMeta):
|
||||
REQUEST_MAX_SIZE: int
|
||||
REQUEST_TIMEOUT: int
|
||||
RESPONSE_TIMEOUT: int
|
||||
RESTART_ORDER: Union[str, RestartOrder]
|
||||
SERVER_NAME: str
|
||||
TLS_CERT_PASSWORD: str
|
||||
TOUCHUP: Union[Default, bool]
|
||||
@@ -127,9 +122,7 @@ class Config(dict, metaclass=DescriptorMeta):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
defaults: Optional[
|
||||
Dict[str, Union[str, bool, int, float, None]]
|
||||
] = None,
|
||||
defaults: Dict[str, Union[str, bool, int, float, None]] = None,
|
||||
env_prefix: Optional[str] = SANIC_PREFIX,
|
||||
keep_alive: Optional[bool] = None,
|
||||
*,
|
||||
@@ -203,6 +196,10 @@ class Config(dict, metaclass=DescriptorMeta):
|
||||
self.LOCAL_CERT_CREATOR = LocalCertCreator[
|
||||
self.LOCAL_CERT_CREATOR.upper()
|
||||
]
|
||||
elif attr == "RESTART_ORDER" and not isinstance(
|
||||
self.RESTART_ORDER, RestartOrder
|
||||
):
|
||||
self.RESTART_ORDER = RestartOrder[self.RESTART_ORDER.upper()]
|
||||
elif attr == "DEPRECATION_FILTER":
|
||||
self._configure_warnings()
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from sanic.compat import UpperStrEnum
|
||||
|
||||
|
||||
class HTTPMethod(UpperStrEnum):
|
||||
|
||||
GET = auto()
|
||||
POST = auto()
|
||||
PUT = auto()
|
||||
@@ -14,11 +15,18 @@ class HTTPMethod(UpperStrEnum):
|
||||
|
||||
|
||||
class LocalCertCreator(UpperStrEnum):
|
||||
|
||||
AUTO = auto()
|
||||
TRUSTME = auto()
|
||||
MKCERT = auto()
|
||||
|
||||
|
||||
class RestartOrder(UpperStrEnum):
|
||||
|
||||
SHUTDOWN_FIRST = auto()
|
||||
STARTUP_FIRST = auto()
|
||||
|
||||
|
||||
HTTP_METHODS = tuple(HTTPMethod.__members__.values())
|
||||
SAFE_HTTP_METHODS = (HTTPMethod.GET, HTTPMethod.HEAD, HTTPMethod.OPTIONS)
|
||||
IDEMPOTENT_HTTP_METHODS = (
|
||||
|
||||
@@ -12,7 +12,6 @@ Setting ``app.config.FALLBACK_ERROR_FORMAT = "auto"`` will enable a switch that
|
||||
will attempt to provide an appropriate response format based upon the
|
||||
request type.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing as t
|
||||
@@ -22,7 +21,8 @@ from traceback import extract_tb
|
||||
|
||||
from sanic.exceptions import BadRequest, SanicException
|
||||
from sanic.helpers import STATUS_CODES
|
||||
from sanic.response import html, json, text
|
||||
from sanic.request import Request
|
||||
from sanic.response import HTTPResponse, html, json, text
|
||||
|
||||
|
||||
dumps: t.Callable[..., str]
|
||||
@@ -33,8 +33,6 @@ try:
|
||||
except ImportError: # noqa
|
||||
from json import dumps
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from sanic import HTTPResponse, Request
|
||||
|
||||
DEFAULT_FORMAT = "auto"
|
||||
FALLBACK_TEXT = (
|
||||
@@ -406,13 +404,16 @@ CONTENT_TYPE_BY_RENDERERS = {
|
||||
v: k for k, v in RENDERERS_BY_CONTENT_TYPE.items()
|
||||
}
|
||||
|
||||
# Handler source code is checked for which response types it returns with the
|
||||
# route error_format="auto" (default) to determine which format to use.
|
||||
RESPONSE_MAPPING = {
|
||||
"empty": "html",
|
||||
"json": "json",
|
||||
"text": "text",
|
||||
"raw": "text",
|
||||
"html": "html",
|
||||
"JSONResponse": "json",
|
||||
"file": "html",
|
||||
"file_stream": "text",
|
||||
"stream": "text",
|
||||
"redirect": "html",
|
||||
"text/plain": "text",
|
||||
"text/html": "html",
|
||||
"application/json": "json",
|
||||
|
||||
@@ -3,6 +3,11 @@ from __future__ import annotations
|
||||
from typing import Dict, List, Optional, Tuple, Type
|
||||
|
||||
from sanic.errorpages import BaseRenderer, TextRenderer, exception_response
|
||||
from sanic.exceptions import (
|
||||
HeaderNotFound,
|
||||
InvalidRangeType,
|
||||
RangeNotSatisfiable,
|
||||
)
|
||||
from sanic.log import deprecation, error_logger
|
||||
from sanic.models.handler_types import RouteHandler
|
||||
from sanic.response import text
|
||||
@@ -18,6 +23,7 @@ class ErrorHandler:
|
||||
by the developers to perform a wide range of tasks from recording the error
|
||||
stats to reporting them to an external service that can be used for
|
||||
realtime alerting system.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -30,6 +36,14 @@ class ErrorHandler:
|
||||
self.debug = False
|
||||
self.base = base
|
||||
|
||||
@classmethod
|
||||
def finalize(cls, *args, **kwargs):
|
||||
deprecation(
|
||||
"ErrorHandler.finalize is deprecated and no longer needed. "
|
||||
"Please remove update your code to remove it. ",
|
||||
22.12,
|
||||
)
|
||||
|
||||
def _full_lookup(self, exception, route_name: Optional[str] = None):
|
||||
return self.lookup(exception, route_name)
|
||||
|
||||
@@ -190,3 +204,74 @@ class ErrorHandler:
|
||||
error_logger.exception(
|
||||
"Exception occurred while handling uri: %s", url
|
||||
)
|
||||
|
||||
|
||||
class ContentRangeHandler:
|
||||
"""
|
||||
A mechanism to parse and process the incoming request headers to
|
||||
extract the content range information.
|
||||
|
||||
:param request: Incoming api request
|
||||
:param stats: Stats related to the content
|
||||
|
||||
:type request: :class:`sanic.request.Request`
|
||||
:type stats: :class:`posix.stat_result`
|
||||
|
||||
:ivar start: Content Range start
|
||||
:ivar end: Content Range end
|
||||
:ivar size: Length of the content
|
||||
:ivar total: Total size identified by the :class:`posix.stat_result`
|
||||
instance
|
||||
:ivar ContentRangeHandler.headers: Content range header ``dict``
|
||||
"""
|
||||
|
||||
__slots__ = ("start", "end", "size", "total", "headers")
|
||||
|
||||
def __init__(self, request, stats):
|
||||
self.total = stats.st_size
|
||||
_range = request.headers.getone("range", None)
|
||||
if _range is None:
|
||||
raise HeaderNotFound("Range Header Not Found")
|
||||
unit, _, value = tuple(map(str.strip, _range.partition("=")))
|
||||
if unit != "bytes":
|
||||
raise InvalidRangeType(
|
||||
"%s is not a valid Range Type" % (unit,), self
|
||||
)
|
||||
start_b, _, end_b = tuple(map(str.strip, value.partition("-")))
|
||||
try:
|
||||
self.start = int(start_b) if start_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
"'%s' is invalid for Content Range" % (start_b,), self
|
||||
)
|
||||
try:
|
||||
self.end = int(end_b) if end_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
"'%s' is invalid for Content Range" % (end_b,), self
|
||||
)
|
||||
if self.end is None:
|
||||
if self.start is None:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
else:
|
||||
# this case represents `Content-Range: bytes 5-`
|
||||
self.end = self.total - 1
|
||||
else:
|
||||
if self.start is None:
|
||||
# this case represents `Content-Range: bytes -5`
|
||||
self.start = self.total - self.end
|
||||
self.end = self.total - 1
|
||||
if self.start >= self.end:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
self.size = self.end - self.start + 1
|
||||
self.headers = {
|
||||
"Content-Range": "bytes %s-%s/%s"
|
||||
% (self.start, self.end, self.total)
|
||||
}
|
||||
|
||||
def __bool__(self):
|
||||
return self.size > 0
|
||||
@@ -1,10 +0,0 @@
|
||||
from .content_range import ContentRangeHandler
|
||||
from .directory import DirectoryHandler
|
||||
from .error import ErrorHandler
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ContentRangeHandler",
|
||||
"DirectoryHandler",
|
||||
"ErrorHandler",
|
||||
)
|
||||
@@ -1,78 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sanic.exceptions import (
|
||||
HeaderNotFound,
|
||||
InvalidRangeType,
|
||||
RangeNotSatisfiable,
|
||||
)
|
||||
|
||||
|
||||
class ContentRangeHandler:
|
||||
"""
|
||||
A mechanism to parse and process the incoming request headers to
|
||||
extract the content range information.
|
||||
|
||||
:param request: Incoming api request
|
||||
:param stats: Stats related to the content
|
||||
|
||||
:type request: :class:`sanic.request.Request`
|
||||
:type stats: :class:`posix.stat_result`
|
||||
|
||||
:ivar start: Content Range start
|
||||
:ivar end: Content Range end
|
||||
:ivar size: Length of the content
|
||||
:ivar total: Total size identified by the :class:`posix.stat_result`
|
||||
instance
|
||||
:ivar ContentRangeHandler.headers: Content range header ``dict``
|
||||
"""
|
||||
|
||||
__slots__ = ("start", "end", "size", "total", "headers")
|
||||
|
||||
def __init__(self, request, stats):
|
||||
self.total = stats.st_size
|
||||
_range = request.headers.getone("range", None)
|
||||
if _range is None:
|
||||
raise HeaderNotFound("Range Header Not Found")
|
||||
unit, _, value = tuple(map(str.strip, _range.partition("=")))
|
||||
if unit != "bytes":
|
||||
raise InvalidRangeType(
|
||||
"%s is not a valid Range Type" % (unit,), self
|
||||
)
|
||||
start_b, _, end_b = tuple(map(str.strip, value.partition("-")))
|
||||
try:
|
||||
self.start = int(start_b) if start_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
"'%s' is invalid for Content Range" % (start_b,), self
|
||||
)
|
||||
try:
|
||||
self.end = int(end_b) if end_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
"'%s' is invalid for Content Range" % (end_b,), self
|
||||
)
|
||||
if self.end is None:
|
||||
if self.start is None:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
else:
|
||||
# this case represents `Content-Range: bytes 5-`
|
||||
self.end = self.total - 1
|
||||
else:
|
||||
if self.start is None:
|
||||
# this case represents `Content-Range: bytes -5`
|
||||
self.start = self.total - self.end
|
||||
self.end = self.total - 1
|
||||
if self.start >= self.end:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
self.size = self.end - self.start + 1
|
||||
self.headers = {
|
||||
"Content-Range": "bytes %s-%s/%s"
|
||||
% (self.start, self.end, self.total)
|
||||
}
|
||||
|
||||
def __bool__(self):
|
||||
return self.size > 0
|
||||
@@ -1,84 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from stat import S_ISDIR
|
||||
from typing import Dict, Iterable, Optional, Sequence, Union, cast
|
||||
|
||||
from sanic.exceptions import NotFound
|
||||
from sanic.pages.directory_page import DirectoryPage, FileInfo
|
||||
from sanic.request import Request
|
||||
from sanic.response import file, html, redirect
|
||||
|
||||
|
||||
class DirectoryHandler:
|
||||
def __init__(
|
||||
self,
|
||||
uri: str,
|
||||
directory: Path,
|
||||
directory_view: bool = False,
|
||||
index: Optional[Union[str, Sequence[str]]] = None,
|
||||
) -> None:
|
||||
if isinstance(index, str):
|
||||
index = [index]
|
||||
elif index is None:
|
||||
index = []
|
||||
self.base = uri.strip("/")
|
||||
self.directory = directory
|
||||
self.directory_view = directory_view
|
||||
self.index = tuple(index)
|
||||
|
||||
async def handle(self, request: Request, path: str):
|
||||
current = path.strip("/")[len(self.base) :].strip("/") # noqa: E203
|
||||
for file_name in self.index:
|
||||
index_file = self.directory / current / file_name
|
||||
if index_file.is_file():
|
||||
return await file(index_file)
|
||||
|
||||
if self.directory_view:
|
||||
return self._index(
|
||||
self.directory / current, path, request.app.debug
|
||||
)
|
||||
|
||||
if self.index:
|
||||
raise NotFound("File not found")
|
||||
|
||||
raise IsADirectoryError(f"{self.directory.as_posix()} is a directory")
|
||||
|
||||
def _index(self, location: Path, path: str, debug: bool):
|
||||
# Remove empty path elements, append slash
|
||||
if "//" in path or not path.endswith("/"):
|
||||
return redirect(
|
||||
"/" + "".join([f"{p}/" for p in path.split("/") if p])
|
||||
)
|
||||
|
||||
# Render file browser
|
||||
page = DirectoryPage(self._iter_files(location), path, debug)
|
||||
return html(page.render())
|
||||
|
||||
def _prepare_file(self, path: Path) -> Dict[str, Union[int, str]]:
|
||||
stat = path.stat()
|
||||
modified = (
|
||||
datetime.fromtimestamp(stat.st_mtime)
|
||||
.isoformat()[:19]
|
||||
.replace("T", " ")
|
||||
)
|
||||
is_dir = S_ISDIR(stat.st_mode)
|
||||
icon = "📁" if is_dir else "📄"
|
||||
file_name = path.name
|
||||
if is_dir:
|
||||
file_name += "/"
|
||||
return {
|
||||
"priority": is_dir * -1,
|
||||
"file_name": file_name,
|
||||
"icon": icon,
|
||||
"file_access": modified,
|
||||
"file_size": stat.st_size,
|
||||
}
|
||||
|
||||
def _iter_files(self, location: Path) -> Iterable[FileInfo]:
|
||||
prepared = [self._prepare_file(f) for f in location.iterdir()]
|
||||
for item in sorted(prepared, key=itemgetter("priority", "file_name")):
|
||||
del item["priority"]
|
||||
yield cast(FileInfo, item)
|
||||
296
sanic/headers.py
296
sanic/headers.py
@@ -35,96 +35,141 @@ _host_re = re.compile(
|
||||
|
||||
def parse_arg_as_accept(f):
|
||||
def func(self, other, *args, **kwargs):
|
||||
if not isinstance(other, MediaType) and other:
|
||||
other = MediaType._parse(other)
|
||||
if not isinstance(other, Accept) and other:
|
||||
other = Accept.parse(other)
|
||||
return f(self, other, *args, **kwargs)
|
||||
|
||||
return func
|
||||
|
||||
|
||||
class MediaType:
|
||||
"""A media type, as used in the Accept header."""
|
||||
class MediaType(str):
|
||||
def __new__(cls, value: str):
|
||||
return str.__new__(cls, value)
|
||||
|
||||
def __init__(self, value: str) -> None:
|
||||
self.value = value
|
||||
self.is_wildcard = self.check_if_wildcard(value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.is_wildcard:
|
||||
return True
|
||||
|
||||
if self.match(other):
|
||||
return True
|
||||
|
||||
other_is_wildcard = (
|
||||
other.is_wildcard
|
||||
if isinstance(other, MediaType)
|
||||
else self.check_if_wildcard(other)
|
||||
)
|
||||
|
||||
return other_is_wildcard
|
||||
|
||||
def match(self, other):
|
||||
other_value = other.value if isinstance(other, MediaType) else other
|
||||
return self.value == other_value
|
||||
|
||||
@staticmethod
|
||||
def check_if_wildcard(value):
|
||||
return value == "*"
|
||||
|
||||
|
||||
class Accept(str):
|
||||
def __new__(cls, value: str, *args, **kwargs):
|
||||
return str.__new__(cls, value)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
type_: str,
|
||||
subtype: str,
|
||||
**params: str,
|
||||
value: str,
|
||||
type_: MediaType,
|
||||
subtype: MediaType,
|
||||
*,
|
||||
q: str = "1.0",
|
||||
**kwargs: str,
|
||||
):
|
||||
qvalue = float(q)
|
||||
if qvalue > 1 or qvalue < 0:
|
||||
raise InvalidHeader(
|
||||
f"Accept header qvalue must be between 0 and 1, not: {qvalue}"
|
||||
)
|
||||
self.value = value
|
||||
self.type_ = type_
|
||||
self.subtype = subtype
|
||||
self.q = float(params.get("q", "1.0"))
|
||||
self.params = params
|
||||
self.mime = f"{type_}/{subtype}"
|
||||
self.qvalue = qvalue
|
||||
self.params = kwargs
|
||||
|
||||
def __repr__(self):
|
||||
return self.mime + "".join(f";{k}={v}" for k, v in self.params.items())
|
||||
def _compare(self, other, method):
|
||||
try:
|
||||
return method(self.qvalue, other.qvalue)
|
||||
except (AttributeError, TypeError):
|
||||
return NotImplemented
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Check for mime (str or MediaType) identical type/subtype."""
|
||||
if isinstance(other, str):
|
||||
return self.mime == other
|
||||
if isinstance(other, MediaType):
|
||||
return self.mime == other.mime
|
||||
return NotImplemented
|
||||
@parse_arg_as_accept
|
||||
def __lt__(self, other: Union[str, Accept]):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def __le__(self, other: Union[str, Accept]):
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def __eq__(self, other: Union[str, Accept]): # type: ignore
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def __ge__(self, other: Union[str, Accept]):
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def __gt__(self, other: Union[str, Accept]):
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def __ne__(self, other: Union[str, Accept]): # type: ignore
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
@parse_arg_as_accept
|
||||
def match(
|
||||
self,
|
||||
mime: str,
|
||||
allow_type_wildcard=True,
|
||||
allow_subtype_wildcard=True,
|
||||
) -> Optional[MediaType]:
|
||||
"""Check if this media type matches the given mime type/subtype.
|
||||
|
||||
Wildcards are supported both ways on both type and subtype.
|
||||
|
||||
Note: Use the `==` operator instead to check for literal matches
|
||||
without expanding wildcards.
|
||||
|
||||
@param media_type: A type/subtype string to match.
|
||||
@return `self` if the media types are compatible, else `None`
|
||||
"""
|
||||
mt = MediaType._parse(mime)
|
||||
return (
|
||||
self
|
||||
if (
|
||||
# Subtype match
|
||||
(self.subtype in (mt.subtype, "*") or mt.subtype == "*")
|
||||
# Type match
|
||||
and (self.type_ in (mt.type_, "*") or mt.type_ == "*")
|
||||
# Allow disabling wildcards (backwards compatibility with tests)
|
||||
and (
|
||||
allow_type_wildcard
|
||||
or self.type_ != "*"
|
||||
and mt.type_ != "*"
|
||||
)
|
||||
and (
|
||||
allow_subtype_wildcard
|
||||
or self.subtype != "*"
|
||||
and mt.subtype != "*"
|
||||
)
|
||||
other,
|
||||
*,
|
||||
allow_type_wildcard: bool = True,
|
||||
allow_subtype_wildcard: bool = True,
|
||||
) -> bool:
|
||||
type_match = (
|
||||
self.type_ == other.type_
|
||||
if allow_type_wildcard
|
||||
else (
|
||||
self.type_.match(other.type_)
|
||||
and not self.type_.is_wildcard
|
||||
and not other.type_.is_wildcard
|
||||
)
|
||||
)
|
||||
subtype_match = (
|
||||
self.subtype == other.subtype
|
||||
if allow_subtype_wildcard
|
||||
else (
|
||||
self.subtype.match(other.subtype)
|
||||
and not self.subtype.is_wildcard
|
||||
and not other.subtype.is_wildcard
|
||||
)
|
||||
else None
|
||||
)
|
||||
|
||||
@property
|
||||
def has_wildcard(self) -> bool:
|
||||
"""Return True if this media type has a wildcard in it."""
|
||||
return "*" in (self.subtype, self.type_)
|
||||
|
||||
@property
|
||||
def is_wildcard(self) -> bool:
|
||||
"""Return True if this is the wildcard `*/*`"""
|
||||
return self.type_ == "*" and self.subtype == "*"
|
||||
return type_match and subtype_match
|
||||
|
||||
@classmethod
|
||||
def _parse(cls, mime_with_params: str) -> MediaType:
|
||||
mtype = mime_with_params.strip()
|
||||
def parse(cls, raw: str) -> Accept:
|
||||
invalid = False
|
||||
mtype = raw.strip()
|
||||
|
||||
media, *raw_params = mtype.split(";")
|
||||
type_, subtype = media.split("/", 1)
|
||||
if not type_ or not subtype:
|
||||
raise ValueError(f"Invalid media type: {mtype}")
|
||||
try:
|
||||
media, *raw_params = mtype.split(";")
|
||||
type_, subtype = media.split("/")
|
||||
except ValueError:
|
||||
invalid = True
|
||||
|
||||
if invalid or not type_ or not subtype:
|
||||
raise InvalidHeader(f"Header contains invalid Accept value: {raw}")
|
||||
|
||||
params = dict(
|
||||
[
|
||||
@@ -133,79 +178,28 @@ class MediaType:
|
||||
]
|
||||
)
|
||||
|
||||
return cls(type_.lstrip(), subtype.rstrip(), **params)
|
||||
return cls(mtype, MediaType(type_), MediaType(subtype), **params)
|
||||
|
||||
|
||||
class Matched(str):
|
||||
"""A matching result of a MIME string against a MediaType."""
|
||||
class AcceptContainer(list):
|
||||
def __contains__(self, o: object) -> bool:
|
||||
return any(item.match(o) for item in self)
|
||||
|
||||
def __new__(cls, mime: str, m: Optional[MediaType]):
|
||||
return super().__new__(cls, mime)
|
||||
|
||||
def __init__(self, mime: str, m: Optional[MediaType]):
|
||||
self.m = m
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self} matched {self.m}>" if self else "<no match>"
|
||||
|
||||
|
||||
class AcceptList(list):
|
||||
"""A list of media types, as used in the Accept header.
|
||||
|
||||
The Accept header entries are listed in order of preference, starting
|
||||
with the most preferred. This class is a list of `MediaType` objects,
|
||||
that encapsulate also the q value or any other parameters.
|
||||
|
||||
Two separate methods are provided for searching the list:
|
||||
- 'match' for finding the most preferred match (wildcards supported)
|
||||
- operator 'in' for checking explicit matches (wildcards as literals)
|
||||
"""
|
||||
|
||||
def match(self, *mimes: str) -> Matched:
|
||||
"""Find a media type accepted by the client.
|
||||
|
||||
This method can be used to find which of the media types requested by
|
||||
the client is most preferred against the ones given as arguments.
|
||||
|
||||
The ordering of preference is set by:
|
||||
1. The q values on the Accept header, and those being equal,
|
||||
2. The order of the arguments (first is most preferred), and
|
||||
3. The first matching entry on the Accept header.
|
||||
|
||||
Wildcards are matched both ways. A match is usually found, as the
|
||||
Accept headers typically include `*/*`, in particular if the header
|
||||
is missing, is not manually set, or if the client is a browser.
|
||||
|
||||
Note: the returned object behaves as a string of the mime argument
|
||||
that matched, and is empty/falsy if no match was found. The matched
|
||||
header entry `MediaType` or `None` is available as the `m` attribute.
|
||||
|
||||
@param mimes: Any MIME types to search for in order of preference.
|
||||
@return A match object with the mime string and the MediaType object.
|
||||
"""
|
||||
l = sorted(
|
||||
[
|
||||
(-acc.q, i, j, mime, acc) # Sort by -q, i, j
|
||||
for j, acc in enumerate(self)
|
||||
for i, mime in enumerate(mimes)
|
||||
if acc.match(mime)
|
||||
]
|
||||
def match(
|
||||
self,
|
||||
o: object,
|
||||
*,
|
||||
allow_type_wildcard: bool = True,
|
||||
allow_subtype_wildcard: bool = True,
|
||||
) -> bool:
|
||||
return any(
|
||||
item.match(
|
||||
o,
|
||||
allow_type_wildcard=allow_type_wildcard,
|
||||
allow_subtype_wildcard=allow_subtype_wildcard,
|
||||
)
|
||||
for item in self
|
||||
)
|
||||
return Matched(*(l[0][3:] if l else ("", None)))
|
||||
|
||||
|
||||
def parse_accept(accept: str) -> AcceptList:
|
||||
"""Parse an Accept header and order the acceptable media types in
|
||||
accorsing to RFC 7231, s. 5.3.2
|
||||
https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
||||
"""
|
||||
if not accept:
|
||||
return AcceptList()
|
||||
try:
|
||||
a = [MediaType._parse(mtype) for mtype in accept.split(",")]
|
||||
return AcceptList(sorted(a, key=lambda mtype: -mtype.q))
|
||||
except ValueError:
|
||||
raise InvalidHeader(f"Invalid header value in Accept: {accept}")
|
||||
|
||||
|
||||
def parse_content_header(value: str) -> Tuple[str, Options]:
|
||||
@@ -374,6 +368,34 @@ def format_http1_response(status: int, headers: HeaderBytesIterable) -> bytes:
|
||||
return ret
|
||||
|
||||
|
||||
def _sort_accept_value(accept: Accept):
|
||||
return (
|
||||
accept.qvalue,
|
||||
len(accept.params),
|
||||
accept.subtype != "*",
|
||||
accept.type_ != "*",
|
||||
)
|
||||
|
||||
|
||||
def parse_accept(accept: str) -> AcceptContainer:
|
||||
"""Parse an Accept header and order the acceptable media types in
|
||||
accorsing to RFC 7231, s. 5.3.2
|
||||
https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
||||
"""
|
||||
media_types = accept.split(",")
|
||||
accept_list: List[Accept] = []
|
||||
|
||||
for mtype in media_types:
|
||||
if not mtype:
|
||||
continue
|
||||
|
||||
accept_list.append(Accept.parse(mtype))
|
||||
|
||||
return AcceptContainer(
|
||||
sorted(accept_list, key=_sort_accept_value, reverse=True)
|
||||
)
|
||||
|
||||
|
||||
def parse_credentials(
|
||||
header: Optional[str],
|
||||
prefixes: Union[List, Tuple, Set] = None,
|
||||
|
||||
@@ -71,6 +71,7 @@ class Http(Stream, metaclass=TouchUpMeta):
|
||||
"request_body",
|
||||
"request_bytes",
|
||||
"request_bytes_left",
|
||||
"request_max_size",
|
||||
"response",
|
||||
"response_func",
|
||||
"response_size",
|
||||
|
||||
@@ -19,7 +19,7 @@ class Stream:
|
||||
request_max_size: Union[int, float]
|
||||
|
||||
__touchup__: Tuple[str, ...] = tuple()
|
||||
__slots__ = ("request_max_size",)
|
||||
__slots__ = ()
|
||||
|
||||
def respond(
|
||||
self, response: BaseHTTPResponse
|
||||
|
||||
@@ -24,15 +24,13 @@ def create_context(
|
||||
certfile: Optional[str] = None,
|
||||
keyfile: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
purpose: ssl.Purpose = ssl.Purpose.CLIENT_AUTH,
|
||||
) -> ssl.SSLContext:
|
||||
"""Create a context with secure crypto and HTTP/1.1 in protocols."""
|
||||
context = ssl.create_default_context(purpose=purpose)
|
||||
context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
|
||||
context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
context.set_ciphers(":".join(CIPHERS_TLS12))
|
||||
context.set_alpn_protocols(["http/1.1"])
|
||||
if purpose is ssl.Purpose.CLIENT_AUTH:
|
||||
context.sni_callback = server_name_callback
|
||||
context.sni_callback = server_name_callback
|
||||
if certfile and keyfile:
|
||||
context.load_cert_chain(certfile, keyfile, password)
|
||||
return context
|
||||
|
||||
@@ -126,6 +126,7 @@ class CertCreator(ABC):
|
||||
local_tls_key,
|
||||
local_tls_cert,
|
||||
) -> CertCreator:
|
||||
|
||||
creator: Optional[CertCreator] = None
|
||||
|
||||
cert_creator_options: Tuple[
|
||||
|
||||
16
sanic/log.py
16
sanic/log.py
@@ -1,22 +1,10 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, Any, Dict
|
||||
from typing import Any, Dict
|
||||
from warnings import warn
|
||||
|
||||
from sanic.compat import is_atty
|
||||
|
||||
|
||||
# Python 3.11 changed the way Enum formatting works for mixed-in types.
|
||||
if sys.version_info < (3, 11, 0):
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
pass
|
||||
|
||||
else:
|
||||
if not TYPE_CHECKING:
|
||||
from enum import StrEnum
|
||||
from sanic.compat import StrEnum, is_atty
|
||||
|
||||
|
||||
LOGGING_CONFIG_DEFAULTS: Dict[str, Any] = dict( # no cov
|
||||
|
||||
@@ -32,9 +32,6 @@ class Middleware:
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.func(*args, **kwargs)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.func)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"{self.__class__.__name__}("
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from sanic.base.meta import SanicMeta
|
||||
|
||||
|
||||
class BaseMixin(metaclass=SanicMeta):
|
||||
name: str
|
||||
strict_slashes: Optional[bool]
|
||||
|
||||
def _generate_name(self, *objects) -> str:
|
||||
name = None
|
||||
|
||||
for obj in objects:
|
||||
if obj:
|
||||
if isinstance(obj, str):
|
||||
name = obj
|
||||
break
|
||||
|
||||
try:
|
||||
name = obj.name
|
||||
except AttributeError:
|
||||
try:
|
||||
name = obj.__name__
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if not name: # noqa
|
||||
raise ValueError("Could not generate a name for handler")
|
||||
|
||||
if not name.startswith(f"{self.name}."):
|
||||
name = f"{self.name}.{name}"
|
||||
|
||||
return name
|
||||
@@ -1,6 +1,11 @@
|
||||
from ast import NodeVisitor, Return, parse
|
||||
from contextlib import suppress
|
||||
from email.utils import formatdate
|
||||
from functools import partial, wraps
|
||||
from inspect import getsource, signature
|
||||
from mimetypes import guess_type
|
||||
from os import path
|
||||
from pathlib import Path, PurePath
|
||||
from textwrap import dedent
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -14,15 +19,20 @@ from typing import (
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from urllib.parse import unquote
|
||||
|
||||
from sanic_routing.route import Route
|
||||
|
||||
from sanic.base.meta import SanicMeta
|
||||
from sanic.constants import HTTP_METHODS
|
||||
from sanic.compat import stat_async
|
||||
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE, HTTP_METHODS
|
||||
from sanic.errorpages import RESPONSE_MAPPING
|
||||
from sanic.mixins.base import BaseMixin
|
||||
from sanic.exceptions import FileNotFound, HeaderNotFound, RangeNotSatisfiable
|
||||
from sanic.handlers import ContentRangeHandler
|
||||
from sanic.log import error_logger
|
||||
from sanic.models.futures import FutureRoute, FutureStatic
|
||||
from sanic.models.handler_types import RouteHandler
|
||||
from sanic.response import HTTPResponse, file, file_stream, validate_file
|
||||
from sanic.types import HashableDict
|
||||
|
||||
|
||||
@@ -31,14 +41,20 @@ RouteWrapper = Callable[
|
||||
]
|
||||
|
||||
|
||||
class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
||||
class RouteMixin(metaclass=SanicMeta):
|
||||
name: str
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
self._future_routes: Set[FutureRoute] = set()
|
||||
self._future_statics: Set[FutureStatic] = set()
|
||||
self.strict_slashes: Optional[bool] = False
|
||||
|
||||
def _apply_route(self, route: FutureRoute) -> List[Route]:
|
||||
raise NotImplementedError # noqa
|
||||
|
||||
def _apply_static(self, static: FutureStatic) -> Route:
|
||||
raise NotImplementedError # noqa
|
||||
|
||||
def route(
|
||||
self,
|
||||
uri: str,
|
||||
@@ -202,7 +218,6 @@ class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
||||
stream: bool = False,
|
||||
version_prefix: str = "/v",
|
||||
error_format: Optional[str] = None,
|
||||
unquote: bool = False,
|
||||
**ctx_kwargs: Any,
|
||||
) -> RouteHandler:
|
||||
"""A helper method to register class instance or
|
||||
@@ -249,7 +264,6 @@ class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
||||
name=name,
|
||||
version_prefix=version_prefix,
|
||||
error_format=error_format,
|
||||
unquote=unquote,
|
||||
**ctx_kwargs,
|
||||
)(handler)
|
||||
return handler
|
||||
@@ -672,6 +686,324 @@ class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
||||
**ctx_kwargs,
|
||||
)(handler)
|
||||
|
||||
def static(
|
||||
self,
|
||||
uri: str,
|
||||
file_or_directory: Union[str, bytes, PurePath],
|
||||
pattern: str = r"/?.+",
|
||||
use_modified_since: bool = True,
|
||||
use_content_range: bool = False,
|
||||
stream_large_files: bool = False,
|
||||
name: str = "static",
|
||||
host: Optional[str] = None,
|
||||
strict_slashes: Optional[bool] = None,
|
||||
content_type: Optional[bool] = None,
|
||||
apply: bool = True,
|
||||
resource_type: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Register a root to serve files from. The input can either be a
|
||||
file or a directory. This method will enable an easy and simple way
|
||||
to setup the :class:`Route` necessary to serve the static files.
|
||||
|
||||
:param uri: URL path to be used for serving static content
|
||||
:param file_or_directory: Path for the Static file/directory with
|
||||
static files
|
||||
:param pattern: Regex Pattern identifying the valid static files
|
||||
:param use_modified_since: If true, send file modified time, and return
|
||||
not modified if the browser's matches the server's
|
||||
:param use_content_range: If true, process header for range requests
|
||||
and sends the file part that is requested
|
||||
:param stream_large_files: If true, use the
|
||||
:func:`StreamingHTTPResponse.file_stream` handler rather
|
||||
than the :func:`HTTPResponse.file` handler to send the file.
|
||||
If this is an integer, this represents the threshold size to
|
||||
switch to :func:`StreamingHTTPResponse.file_stream`
|
||||
:param name: user defined name used for url_for
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`Sanic` to check if the request
|
||||
URLs need to terminate with a */*
|
||||
:param content_type: user defined content type for header
|
||||
:return: routes registered on the router
|
||||
:rtype: List[sanic.router.Route]
|
||||
"""
|
||||
|
||||
name = self._generate_name(name)
|
||||
|
||||
if strict_slashes is None and self.strict_slashes is not None:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
if not isinstance(file_or_directory, (str, bytes, PurePath)):
|
||||
raise ValueError(
|
||||
f"Static route must be a valid path, not {file_or_directory}"
|
||||
)
|
||||
|
||||
static = FutureStatic(
|
||||
uri,
|
||||
file_or_directory,
|
||||
pattern,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
name,
|
||||
host,
|
||||
strict_slashes,
|
||||
content_type,
|
||||
resource_type,
|
||||
)
|
||||
self._future_statics.add(static)
|
||||
|
||||
if apply:
|
||||
self._apply_static(static)
|
||||
|
||||
def _generate_name(self, *objects) -> str:
|
||||
name = None
|
||||
|
||||
for obj in objects:
|
||||
if obj:
|
||||
if isinstance(obj, str):
|
||||
name = obj
|
||||
break
|
||||
|
||||
try:
|
||||
name = obj.name
|
||||
except AttributeError:
|
||||
try:
|
||||
name = obj.__name__
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if not name: # noqa
|
||||
raise ValueError("Could not generate a name for handler")
|
||||
|
||||
if not name.startswith(f"{self.name}."):
|
||||
name = f"{self.name}.{name}"
|
||||
|
||||
return name
|
||||
|
||||
async def _get_file_path(self, file_or_directory, __file_uri__, not_found):
|
||||
file_path_raw = Path(unquote(file_or_directory))
|
||||
root_path = file_path = file_path_raw.resolve()
|
||||
|
||||
if __file_uri__:
|
||||
# Strip all / that in the beginning of the URL to help prevent
|
||||
# python from herping a derp and treating the uri as an
|
||||
# absolute path
|
||||
unquoted_file_uri = unquote(__file_uri__).lstrip("/")
|
||||
file_path_raw = Path(file_or_directory, unquoted_file_uri)
|
||||
file_path = file_path_raw.resolve()
|
||||
if (
|
||||
file_path < root_path and not file_path_raw.is_symlink()
|
||||
) or ".." in file_path_raw.parts:
|
||||
error_logger.exception(
|
||||
f"File not found: path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise not_found
|
||||
|
||||
try:
|
||||
file_path.relative_to(root_path)
|
||||
except ValueError:
|
||||
if not file_path_raw.is_symlink():
|
||||
error_logger.exception(
|
||||
f"File not found: path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise not_found
|
||||
return file_path
|
||||
|
||||
async def _static_request_handler(
|
||||
self,
|
||||
file_or_directory,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
request,
|
||||
content_type=None,
|
||||
__file_uri__=None,
|
||||
):
|
||||
not_found = FileNotFound(
|
||||
"File not found",
|
||||
path=file_or_directory,
|
||||
relative_url=__file_uri__,
|
||||
)
|
||||
|
||||
# Merge served directory and requested file if provided
|
||||
file_path = await self._get_file_path(
|
||||
file_or_directory, __file_uri__, not_found
|
||||
)
|
||||
|
||||
try:
|
||||
headers = {}
|
||||
# Check if the client has been sent this file before
|
||||
# and it has not been modified since
|
||||
stats = None
|
||||
if use_modified_since:
|
||||
stats = await stat_async(file_path)
|
||||
modified_since = stats.st_mtime
|
||||
response = await validate_file(request.headers, modified_since)
|
||||
if response:
|
||||
return response
|
||||
headers["Last-Modified"] = formatdate(
|
||||
modified_since, usegmt=True
|
||||
)
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
headers["Accept-Ranges"] = "bytes"
|
||||
headers["Content-Length"] = str(stats.st_size)
|
||||
if request.method != "HEAD":
|
||||
try:
|
||||
_range = ContentRangeHandler(request, stats)
|
||||
except HeaderNotFound:
|
||||
pass
|
||||
else:
|
||||
del headers["Content-Length"]
|
||||
headers.update(_range.headers)
|
||||
|
||||
if "content-type" not in headers:
|
||||
content_type = (
|
||||
content_type
|
||||
or guess_type(file_path)[0]
|
||||
or DEFAULT_HTTP_CONTENT_TYPE
|
||||
)
|
||||
|
||||
if "charset=" not in content_type and (
|
||||
content_type.startswith("text/")
|
||||
or content_type == "application/javascript"
|
||||
):
|
||||
content_type += "; charset=utf-8"
|
||||
|
||||
headers["Content-Type"] = content_type
|
||||
|
||||
if request.method == "HEAD":
|
||||
return HTTPResponse(headers=headers)
|
||||
else:
|
||||
if stream_large_files:
|
||||
if type(stream_large_files) == int:
|
||||
threshold = stream_large_files
|
||||
else:
|
||||
threshold = 1024 * 1024
|
||||
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
if stats.st_size >= threshold:
|
||||
return await file_stream(
|
||||
file_path, headers=headers, _range=_range
|
||||
)
|
||||
return await file(file_path, headers=headers, _range=_range)
|
||||
except RangeNotSatisfiable:
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
raise not_found
|
||||
except Exception:
|
||||
error_logger.exception(
|
||||
f"Exception in static request handler: "
|
||||
f"path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise
|
||||
|
||||
def _register_static(
|
||||
self,
|
||||
static: FutureStatic,
|
||||
):
|
||||
# TODO: Though sanic is not a file server, I feel like we should
|
||||
# at least make a good effort here. Modified-since is nice, but
|
||||
# we could also look into etags, expires, and caching
|
||||
"""
|
||||
Register a static directory handler with Sanic by adding a route to the
|
||||
router and registering a handler.
|
||||
|
||||
:param app: Sanic
|
||||
:param file_or_directory: File or directory path to serve from
|
||||
:type file_or_directory: Union[str,bytes,Path]
|
||||
:param uri: URL to serve from
|
||||
:type uri: str
|
||||
:param pattern: regular expression used to match files in the URL
|
||||
:param use_modified_since: If true, send file modified time, and return
|
||||
not modified if the browser's matches the
|
||||
server's
|
||||
:param use_content_range: If true, process header for range requests
|
||||
and sends the file part that is requested
|
||||
:param stream_large_files: If true, use the file_stream() handler
|
||||
rather than the file() handler to send the file
|
||||
If this is an integer, this represents the
|
||||
threshold size to switch to file_stream()
|
||||
:param name: user defined name used for url_for
|
||||
:type name: str
|
||||
:param content_type: user defined content type for header
|
||||
:return: registered static routes
|
||||
:rtype: List[sanic.router.Route]
|
||||
"""
|
||||
|
||||
if isinstance(static.file_or_directory, bytes):
|
||||
file_or_directory = static.file_or_directory.decode("utf-8")
|
||||
elif isinstance(static.file_or_directory, PurePath):
|
||||
file_or_directory = str(static.file_or_directory)
|
||||
elif not isinstance(static.file_or_directory, str):
|
||||
raise ValueError("Invalid file path string.")
|
||||
else:
|
||||
file_or_directory = static.file_or_directory
|
||||
|
||||
uri = static.uri
|
||||
name = static.name
|
||||
# If we're not trying to match a file directly,
|
||||
# serve from the folder
|
||||
if not static.resource_type:
|
||||
if not path.isfile(file_or_directory):
|
||||
uri = uri.rstrip("/")
|
||||
uri += "/<__file_uri__:path>"
|
||||
elif static.resource_type == "dir":
|
||||
if path.isfile(file_or_directory):
|
||||
raise TypeError(
|
||||
"Resource type improperly identified as directory. "
|
||||
f"'{file_or_directory}'"
|
||||
)
|
||||
uri = uri.rstrip("/")
|
||||
uri += "/<__file_uri__:path>"
|
||||
elif static.resource_type == "file" and not path.isfile(
|
||||
file_or_directory
|
||||
):
|
||||
raise TypeError(
|
||||
"Resource type improperly identified as file. "
|
||||
f"'{file_or_directory}'"
|
||||
)
|
||||
elif static.resource_type != "file":
|
||||
raise ValueError(
|
||||
"The resource_type should be set to 'file' or 'dir'"
|
||||
)
|
||||
|
||||
# special prefix for static files
|
||||
# if not static.name.startswith("_static_"):
|
||||
# name = f"_static_{static.name}"
|
||||
|
||||
_handler = wraps(self._static_request_handler)(
|
||||
partial(
|
||||
self._static_request_handler,
|
||||
file_or_directory,
|
||||
static.use_modified_since,
|
||||
static.use_content_range,
|
||||
static.stream_large_files,
|
||||
content_type=static.content_type,
|
||||
)
|
||||
)
|
||||
|
||||
route, _ = self.route( # type: ignore
|
||||
uri=uri,
|
||||
methods=["GET", "HEAD"],
|
||||
name=name,
|
||||
host=static.host,
|
||||
strict_slashes=static.strict_slashes,
|
||||
static=True,
|
||||
)(_handler)
|
||||
|
||||
return route
|
||||
|
||||
def _determine_error_format(self, handler) -> str:
|
||||
with suppress(OSError, TypeError):
|
||||
src = dedent(getsource(handler))
|
||||
|
||||
@@ -20,7 +20,7 @@ class SignalMixin(metaclass=SanicMeta):
|
||||
event: Union[str, Enum],
|
||||
*,
|
||||
apply: bool = True,
|
||||
condition: Optional[Dict[str, Any]] = None,
|
||||
condition: Dict[str, Any] = None,
|
||||
exclusive: bool = True,
|
||||
) -> Callable[[SignalHandler], SignalHandler]:
|
||||
"""
|
||||
@@ -64,7 +64,7 @@ class SignalMixin(metaclass=SanicMeta):
|
||||
self,
|
||||
handler: Optional[Callable[..., Any]],
|
||||
event: str,
|
||||
condition: Optional[Dict[str, Any]] = None,
|
||||
condition: Dict[str, Any] = None,
|
||||
exclusive: bool = True,
|
||||
):
|
||||
if not handler:
|
||||
|
||||
@@ -27,7 +27,6 @@ from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
@@ -41,9 +40,10 @@ from sanic.application.logo import get_logo
|
||||
from sanic.application.motd import MOTD
|
||||
from sanic.application.state import ApplicationServerInfo, Mode, ServerStage
|
||||
from sanic.base.meta import SanicMeta
|
||||
from sanic.compat import OS_IS_WINDOWS, StartMethod, is_atty
|
||||
from sanic.compat import OS_IS_WINDOWS, is_atty
|
||||
from sanic.constants import RestartOrder
|
||||
from sanic.exceptions import ServerKilled
|
||||
from sanic.helpers import Default, _default
|
||||
from sanic.helpers import Default
|
||||
from sanic.http.constants import HTTP
|
||||
from sanic.http.tls import get_ssl_context, process_to_context
|
||||
from sanic.http.tls.context import SanicSSLContext
|
||||
@@ -59,6 +59,7 @@ from sanic.server.protocols.http_protocol import HttpProtocol
|
||||
from sanic.server.protocols.websocket_protocol import WebSocketProtocol
|
||||
from sanic.server.runners import serve, serve_multiple, serve_single
|
||||
from sanic.server.socket import configure_socket, remove_unix_socket
|
||||
from sanic.worker.inspector import Inspector
|
||||
from sanic.worker.loader import AppLoader
|
||||
from sanic.worker.manager import WorkerManager
|
||||
from sanic.worker.multiplexer import WorkerMultiplexer
|
||||
@@ -88,7 +89,6 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
state: ApplicationState
|
||||
websocket_enabled: bool
|
||||
multiplexer: WorkerMultiplexer
|
||||
start_method: StartMethod = _default
|
||||
|
||||
def setup_loop(self):
|
||||
if not self.asgi:
|
||||
@@ -126,7 +126,7 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
register_sys_signals: bool = True,
|
||||
access_log: Optional[bool] = None,
|
||||
unix: Optional[str] = None,
|
||||
loop: Optional[AbstractEventLoop] = None,
|
||||
loop: AbstractEventLoop = None,
|
||||
reload_dir: Optional[Union[List[str], str]] = None,
|
||||
noisy_exceptions: Optional[bool] = None,
|
||||
motd: bool = True,
|
||||
@@ -225,7 +225,7 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
register_sys_signals: bool = True,
|
||||
access_log: Optional[bool] = None,
|
||||
unix: Optional[str] = None,
|
||||
loop: Optional[AbstractEventLoop] = None,
|
||||
loop: AbstractEventLoop = None,
|
||||
reload_dir: Optional[Union[List[str], str]] = None,
|
||||
noisy_exceptions: Optional[bool] = None,
|
||||
motd: bool = True,
|
||||
@@ -355,12 +355,12 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
debug: bool = False,
|
||||
ssl: Union[None, SSLContext, dict, str, list, tuple] = None,
|
||||
sock: Optional[socket] = None,
|
||||
protocol: Optional[Type[Protocol]] = None,
|
||||
protocol: Type[Protocol] = None,
|
||||
backlog: int = 100,
|
||||
access_log: Optional[bool] = None,
|
||||
unix: Optional[str] = None,
|
||||
return_asyncio_server: bool = False,
|
||||
asyncio_server_kwargs: Optional[Dict[str, Any]] = None,
|
||||
asyncio_server_kwargs: Dict[str, Any] = None,
|
||||
noisy_exceptions: Optional[bool] = None,
|
||||
) -> Optional[AsyncioServer]:
|
||||
"""
|
||||
@@ -481,7 +481,7 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
sock: Optional[socket] = None,
|
||||
unix: Optional[str] = None,
|
||||
workers: int = 1,
|
||||
loop: Optional[AbstractEventLoop] = None,
|
||||
loop: AbstractEventLoop = None,
|
||||
protocol: Type[Protocol] = HttpProtocol,
|
||||
backlog: int = 100,
|
||||
register_sys_signals: bool = True,
|
||||
@@ -692,18 +692,13 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
def should_auto_reload(cls) -> bool:
|
||||
return any(app.state.auto_reload for app in cls._app_registry.values())
|
||||
|
||||
@classmethod
|
||||
def _get_startup_method(cls) -> str:
|
||||
return (
|
||||
cls.start_method
|
||||
if not isinstance(cls.start_method, Default)
|
||||
else "spawn"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _get_context(cls) -> BaseContext:
|
||||
method = cls._get_startup_method()
|
||||
logger.debug("Creating multiprocessing context using '%s'", method)
|
||||
method = (
|
||||
"spawn"
|
||||
if "linux" not in sys.platform or cls.should_auto_reload()
|
||||
else "fork"
|
||||
)
|
||||
return get_context(method)
|
||||
|
||||
@classmethod
|
||||
@@ -769,7 +764,7 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
]
|
||||
primary_server_info.settings["run_multiple"] = True
|
||||
monitor_sub, monitor_pub = Pipe(True)
|
||||
worker_state: Mapping[str, Any] = sync_manager.dict()
|
||||
worker_state: Dict[str, Any] = sync_manager.dict()
|
||||
kwargs: Dict[str, Any] = {
|
||||
**primary_server_info.settings,
|
||||
"monitor_publisher": monitor_pub,
|
||||
@@ -820,12 +815,13 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
cls._get_context(),
|
||||
(monitor_pub, monitor_sub),
|
||||
worker_state,
|
||||
cast(RestartOrder, primary.config.RESTART_ORDER),
|
||||
)
|
||||
if cls.should_auto_reload():
|
||||
reload_dirs: Set[Path] = primary.state.reload_dirs.union(
|
||||
*(app.state.reload_dirs for app in apps)
|
||||
)
|
||||
reloader = Reloader(monitor_pub, 0, reload_dirs, app_loader)
|
||||
reloader = Reloader(monitor_pub, 1.0, reload_dirs, app_loader)
|
||||
manager.manage("Reloader", reloader, {}, transient=False)
|
||||
|
||||
inspector = None
|
||||
@@ -841,15 +837,12 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
"packages": [sanic_version, *packages],
|
||||
"extra": extra,
|
||||
}
|
||||
inspector = primary.inspector_class(
|
||||
inspector = Inspector(
|
||||
monitor_pub,
|
||||
app_info,
|
||||
worker_state,
|
||||
primary.config.INSPECTOR_HOST,
|
||||
primary.config.INSPECTOR_PORT,
|
||||
primary.config.INSPECTOR_API_KEY,
|
||||
primary.config.INSPECTOR_TLS_KEY,
|
||||
primary.config.INSPECTOR_TLS_CERT,
|
||||
)
|
||||
manager.manage("Inspector", inspector, {}, transient=False)
|
||||
|
||||
@@ -1109,6 +1102,7 @@ class StartupMixin(metaclass=SanicMeta):
|
||||
app: StartupMixin,
|
||||
server_info: ApplicationServerInfo,
|
||||
) -> None: # no cov
|
||||
|
||||
try:
|
||||
# We should never get to this point without a server
|
||||
# This is primarily to keep mypy happy
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
from email.utils import formatdate
|
||||
from functools import partial, wraps
|
||||
from mimetypes import guess_type
|
||||
from os import PathLike, path
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Optional, Sequence, Set, Union, cast
|
||||
from urllib.parse import unquote
|
||||
|
||||
from sanic_routing.route import Route
|
||||
|
||||
from sanic.base.meta import SanicMeta
|
||||
from sanic.compat import stat_async
|
||||
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
|
||||
from sanic.exceptions import FileNotFound, HeaderNotFound, RangeNotSatisfiable
|
||||
from sanic.handlers import ContentRangeHandler
|
||||
from sanic.handlers.directory import DirectoryHandler
|
||||
from sanic.log import deprecation, error_logger
|
||||
from sanic.mixins.base import BaseMixin
|
||||
from sanic.models.futures import FutureStatic
|
||||
from sanic.request import Request
|
||||
from sanic.response import HTTPResponse, file, file_stream, validate_file
|
||||
|
||||
|
||||
class StaticMixin(BaseMixin, metaclass=SanicMeta):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
self._future_statics: Set[FutureStatic] = set()
|
||||
|
||||
def _apply_static(self, static: FutureStatic) -> Route:
|
||||
raise NotImplementedError # noqa
|
||||
|
||||
def static(
|
||||
self,
|
||||
uri: str,
|
||||
file_or_directory: Union[PathLike, str, bytes],
|
||||
pattern: str = r"/?.+",
|
||||
use_modified_since: bool = True,
|
||||
use_content_range: bool = False,
|
||||
stream_large_files: Union[bool, int] = False,
|
||||
name: str = "static",
|
||||
host: Optional[str] = None,
|
||||
strict_slashes: Optional[bool] = None,
|
||||
content_type: Optional[str] = None,
|
||||
apply: bool = True,
|
||||
resource_type: Optional[str] = None,
|
||||
index: Optional[Union[str, Sequence[str]]] = None,
|
||||
directory_view: bool = False,
|
||||
directory_handler: Optional[DirectoryHandler] = None,
|
||||
):
|
||||
"""
|
||||
Register a root to serve files from. The input can either be a
|
||||
file or a directory. This method will enable an easy and simple way
|
||||
to setup the :class:`Route` necessary to serve the static files.
|
||||
|
||||
:param uri: URL path to be used for serving static content
|
||||
:param file_or_directory: Path for the Static file/directory with
|
||||
static files
|
||||
:param pattern: Regex Pattern identifying the valid static files
|
||||
:param use_modified_since: If true, send file modified time, and return
|
||||
not modified if the browser's matches the server's
|
||||
:param use_content_range: If true, process header for range requests
|
||||
and sends the file part that is requested
|
||||
:param stream_large_files: If true, use the
|
||||
:func:`StreamingHTTPResponse.file_stream` handler rather
|
||||
than the :func:`HTTPResponse.file` handler to send the file.
|
||||
If this is an integer, this represents the threshold size to
|
||||
switch to :func:`StreamingHTTPResponse.file_stream`
|
||||
:param name: user defined name used for url_for
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`Sanic` to check if the request
|
||||
URLs need to terminate with a */*
|
||||
:param content_type: user defined content type for header
|
||||
:param apply: If true, will register the route immediately
|
||||
:param resource_type: Explicitly declare a resource to be a "
|
||||
file" or a "dir"
|
||||
:param index: When exposing against a directory, index is the name that
|
||||
will be served as the default file. When multiple files names are
|
||||
passed, then they will be tried in order.
|
||||
:param directory_view: Whether to fallback to showing the directory
|
||||
viewer when exposing a directory
|
||||
:param directory_handler: An instance of :class:`DirectoryHandler`
|
||||
that can be used for explicitly controlling and subclassing the
|
||||
behavior of the default directory handler
|
||||
:return: routes registered on the router
|
||||
:rtype: List[sanic.router.Route]
|
||||
"""
|
||||
|
||||
name = self._generate_name(name)
|
||||
|
||||
if strict_slashes is None and self.strict_slashes is not None:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
if not isinstance(file_or_directory, (str, bytes, PurePath)):
|
||||
raise ValueError(
|
||||
f"Static route must be a valid path, not {file_or_directory}"
|
||||
)
|
||||
|
||||
if isinstance(file_or_directory, bytes):
|
||||
deprecation(
|
||||
"Serving a static directory with a bytes string is "
|
||||
"deprecated and will be removed in v22.9.",
|
||||
22.9,
|
||||
)
|
||||
file_or_directory = cast(str, file_or_directory.decode())
|
||||
file_or_directory = Path(file_or_directory)
|
||||
|
||||
if directory_handler and (directory_view or index):
|
||||
raise ValueError(
|
||||
"When explicitly setting directory_handler, you cannot "
|
||||
"set either directory_view or index. Instead, pass "
|
||||
"these arguments to your DirectoryHandler instance."
|
||||
)
|
||||
|
||||
if not directory_handler:
|
||||
directory_handler = DirectoryHandler(
|
||||
uri=uri,
|
||||
directory=file_or_directory,
|
||||
directory_view=directory_view,
|
||||
index=index,
|
||||
)
|
||||
|
||||
static = FutureStatic(
|
||||
uri,
|
||||
file_or_directory,
|
||||
pattern,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
name,
|
||||
host,
|
||||
strict_slashes,
|
||||
content_type,
|
||||
resource_type,
|
||||
directory_handler,
|
||||
)
|
||||
self._future_statics.add(static)
|
||||
|
||||
if apply:
|
||||
self._apply_static(static)
|
||||
|
||||
|
||||
class StaticHandleMixin(metaclass=SanicMeta):
|
||||
def _apply_static(self, static: FutureStatic) -> Route:
|
||||
return self._register_static(static)
|
||||
|
||||
def _register_static(
|
||||
self,
|
||||
static: FutureStatic,
|
||||
):
|
||||
# TODO: Though sanic is not a file server, I feel like we should
|
||||
# at least make a good effort here. Modified-since is nice, but
|
||||
# we could also look into etags, expires, and caching
|
||||
"""
|
||||
Register a static directory handler with Sanic by adding a route to the
|
||||
router and registering a handler.
|
||||
"""
|
||||
|
||||
if isinstance(static.file_or_directory, bytes):
|
||||
file_or_directory = static.file_or_directory.decode("utf-8")
|
||||
elif isinstance(static.file_or_directory, PurePath):
|
||||
file_or_directory = str(static.file_or_directory)
|
||||
elif not isinstance(static.file_or_directory, str):
|
||||
raise ValueError("Invalid file path string.")
|
||||
else:
|
||||
file_or_directory = static.file_or_directory
|
||||
|
||||
uri = static.uri
|
||||
name = static.name
|
||||
# If we're not trying to match a file directly,
|
||||
# serve from the folder
|
||||
if not static.resource_type:
|
||||
if not path.isfile(file_or_directory):
|
||||
uri = uri.rstrip("/")
|
||||
uri += "/<__file_uri__:path>"
|
||||
elif static.resource_type == "dir":
|
||||
if path.isfile(file_or_directory):
|
||||
raise TypeError(
|
||||
"Resource type improperly identified as directory. "
|
||||
f"'{file_or_directory}'"
|
||||
)
|
||||
uri = uri.rstrip("/")
|
||||
uri += "/<__file_uri__:path>"
|
||||
elif static.resource_type == "file" and not path.isfile(
|
||||
file_or_directory
|
||||
):
|
||||
raise TypeError(
|
||||
"Resource type improperly identified as file. "
|
||||
f"'{file_or_directory}'"
|
||||
)
|
||||
elif static.resource_type != "file":
|
||||
raise ValueError(
|
||||
"The resource_type should be set to 'file' or 'dir'"
|
||||
)
|
||||
|
||||
# special prefix for static files
|
||||
# if not static.name.startswith("_static_"):
|
||||
# name = f"_static_{static.name}"
|
||||
|
||||
_handler = wraps(self._static_request_handler)(
|
||||
partial(
|
||||
self._static_request_handler,
|
||||
file_or_directory=file_or_directory,
|
||||
use_modified_since=static.use_modified_since,
|
||||
use_content_range=static.use_content_range,
|
||||
stream_large_files=static.stream_large_files,
|
||||
content_type=static.content_type,
|
||||
directory_handler=static.directory_handler,
|
||||
)
|
||||
)
|
||||
|
||||
route, _ = self.route( # type: ignore
|
||||
uri=uri,
|
||||
methods=["GET", "HEAD"],
|
||||
name=name,
|
||||
host=static.host,
|
||||
strict_slashes=static.strict_slashes,
|
||||
static=True,
|
||||
)(_handler)
|
||||
|
||||
return route
|
||||
|
||||
async def _static_request_handler(
|
||||
self,
|
||||
request: Request,
|
||||
*,
|
||||
file_or_directory: PathLike,
|
||||
use_modified_since: bool,
|
||||
use_content_range: bool,
|
||||
stream_large_files: Union[bool, int],
|
||||
directory_handler: DirectoryHandler,
|
||||
content_type: Optional[str] = None,
|
||||
__file_uri__: Optional[str] = None,
|
||||
):
|
||||
not_found = FileNotFound(
|
||||
"File not found",
|
||||
path=file_or_directory,
|
||||
relative_url=__file_uri__,
|
||||
)
|
||||
|
||||
# Merge served directory and requested file if provided
|
||||
file_path = await self._get_file_path(
|
||||
file_or_directory, __file_uri__, not_found
|
||||
)
|
||||
|
||||
try:
|
||||
headers = {}
|
||||
# Check if the client has been sent this file before
|
||||
# and it has not been modified since
|
||||
stats = None
|
||||
if use_modified_since:
|
||||
stats = await stat_async(file_path)
|
||||
modified_since = stats.st_mtime
|
||||
response = await validate_file(request.headers, modified_since)
|
||||
if response:
|
||||
return response
|
||||
headers["Last-Modified"] = formatdate(
|
||||
modified_since, usegmt=True
|
||||
)
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
headers["Accept-Ranges"] = "bytes"
|
||||
headers["Content-Length"] = str(stats.st_size)
|
||||
if request.method != "HEAD":
|
||||
try:
|
||||
_range = ContentRangeHandler(request, stats)
|
||||
except HeaderNotFound:
|
||||
pass
|
||||
else:
|
||||
del headers["Content-Length"]
|
||||
headers.update(_range.headers)
|
||||
|
||||
if "content-type" not in headers:
|
||||
content_type = (
|
||||
content_type
|
||||
or guess_type(file_path)[0]
|
||||
or DEFAULT_HTTP_CONTENT_TYPE
|
||||
)
|
||||
|
||||
if "charset=" not in content_type and (
|
||||
content_type.startswith("text/")
|
||||
or content_type == "application/javascript"
|
||||
):
|
||||
content_type += "; charset=utf-8"
|
||||
|
||||
headers["Content-Type"] = content_type
|
||||
|
||||
if request.method == "HEAD":
|
||||
return HTTPResponse(headers=headers)
|
||||
else:
|
||||
if stream_large_files:
|
||||
if isinstance(stream_large_files, bool):
|
||||
threshold = 1024 * 1024
|
||||
else:
|
||||
threshold = stream_large_files
|
||||
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
if stats.st_size >= threshold:
|
||||
return await file_stream(
|
||||
file_path, headers=headers, _range=_range
|
||||
)
|
||||
return await file(file_path, headers=headers, _range=_range)
|
||||
except (IsADirectoryError, PermissionError):
|
||||
return await directory_handler.handle(request, request.path)
|
||||
except RangeNotSatisfiable:
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
raise not_found
|
||||
except Exception:
|
||||
error_logger.exception(
|
||||
"Exception in static request handler: "
|
||||
f"path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise
|
||||
|
||||
async def _get_file_path(self, file_or_directory, __file_uri__, not_found):
|
||||
file_path_raw = Path(unquote(file_or_directory))
|
||||
root_path = file_path = file_path_raw.resolve()
|
||||
|
||||
if __file_uri__:
|
||||
# Strip all / that in the beginning of the URL to help prevent
|
||||
# python from herping a derp and treating the uri as an
|
||||
# absolute path
|
||||
unquoted_file_uri = unquote(__file_uri__).lstrip("/")
|
||||
file_path_raw = Path(file_or_directory, unquoted_file_uri)
|
||||
file_path = file_path_raw.resolve()
|
||||
if (
|
||||
file_path < root_path and not file_path_raw.is_symlink()
|
||||
) or ".." in file_path_raw.parts:
|
||||
error_logger.exception(
|
||||
f"File not found: path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise not_found
|
||||
|
||||
try:
|
||||
file_path.relative_to(root_path)
|
||||
except ValueError:
|
||||
if not file_path_raw.is_symlink():
|
||||
error_logger.exception(
|
||||
f"File not found: path={file_or_directory}, "
|
||||
f"relative_url={__file_uri__}"
|
||||
)
|
||||
raise not_found
|
||||
return file_path
|
||||
@@ -1,7 +1,6 @@
|
||||
from pathlib import Path
|
||||
from pathlib import PurePath
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Union
|
||||
|
||||
from sanic.handlers.directory import DirectoryHandler
|
||||
from sanic.models.handler_types import (
|
||||
ErrorMiddlewareType,
|
||||
ListenerType,
|
||||
@@ -47,17 +46,16 @@ class FutureException(NamedTuple):
|
||||
|
||||
class FutureStatic(NamedTuple):
|
||||
uri: str
|
||||
file_or_directory: Path
|
||||
file_or_directory: Union[str, bytes, PurePath]
|
||||
pattern: str
|
||||
use_modified_since: bool
|
||||
use_content_range: bool
|
||||
stream_large_files: Union[bool, int]
|
||||
stream_large_files: bool
|
||||
name: str
|
||||
host: Optional[str]
|
||||
strict_slashes: Optional[bool]
|
||||
content_type: Optional[str]
|
||||
content_type: Optional[bool]
|
||||
resource_type: Optional[str]
|
||||
directory_handler: DirectoryHandler
|
||||
|
||||
|
||||
class FutureSignal(NamedTuple):
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from html5tagger import HTML, Document
|
||||
|
||||
from sanic import __version__ as VERSION
|
||||
from sanic.application.logo import SVG_LOGO
|
||||
from sanic.pages.css import CSS
|
||||
|
||||
|
||||
class BasePage(ABC, metaclass=CSS): # no cov
|
||||
TITLE = "Unknown"
|
||||
CSS: str
|
||||
|
||||
def __init__(self, debug: bool = True) -> None:
|
||||
self.doc = Document(self.TITLE, lang="en")
|
||||
self.debug = debug
|
||||
|
||||
@property
|
||||
def style(self) -> str:
|
||||
return self.CSS
|
||||
|
||||
def render(self) -> str:
|
||||
self._head()
|
||||
self._body()
|
||||
self._foot()
|
||||
return str(self.doc)
|
||||
|
||||
def _head(self) -> None:
|
||||
self.doc.style(HTML(self.style))
|
||||
with self.doc.header:
|
||||
self.doc.div(self.TITLE)
|
||||
|
||||
def _foot(self) -> None:
|
||||
with self.doc.footer:
|
||||
self.doc.div("powered by")
|
||||
with self.doc.div:
|
||||
self._sanic_logo()
|
||||
if self.debug:
|
||||
self.doc.div(f"Version {VERSION}")
|
||||
|
||||
@abstractmethod
|
||||
def _body(self) -> None:
|
||||
...
|
||||
|
||||
def _sanic_logo(self) -> None:
|
||||
self.doc.a(
|
||||
HTML(SVG_LOGO),
|
||||
href="https://sanic.dev",
|
||||
target="_blank",
|
||||
referrerpolicy="no-referrer",
|
||||
)
|
||||
@@ -1,35 +0,0 @@
|
||||
from abc import ABCMeta
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
CURRENT_DIR = Path(__file__).parent
|
||||
|
||||
|
||||
def _extract_style(maybe_style: Optional[str], name: str) -> str:
|
||||
if maybe_style is not None:
|
||||
maybe_path = Path(maybe_style)
|
||||
if maybe_path.exists():
|
||||
return maybe_path.read_text(encoding="UTF-8")
|
||||
return maybe_style
|
||||
maybe_path = CURRENT_DIR / "styles" / f"{name}.css"
|
||||
if maybe_path.exists():
|
||||
return maybe_path.read_text(encoding="UTF-8")
|
||||
return ""
|
||||
|
||||
|
||||
class CSS(ABCMeta):
|
||||
"""Cascade stylesheets, i.e. combine all ancestor styles"""
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
Page = super().__new__(cls, name, bases, attrs)
|
||||
# Use a locally defined STYLE or the one from styles directory
|
||||
s = _extract_style(attrs.get("STYLE"), name)
|
||||
Page.STYLE = f"\n/* {name} */\n{s.strip()}\n" if s else ""
|
||||
# Combine with all ancestor styles
|
||||
Page.CSS = "".join(
|
||||
Class.STYLE
|
||||
for Class in reversed(Page.__mro__)
|
||||
if type(Class) is CSS
|
||||
)
|
||||
return Page
|
||||
@@ -1,66 +0,0 @@
|
||||
import sys
|
||||
|
||||
from typing import Dict, Iterable
|
||||
|
||||
from html5tagger import E
|
||||
|
||||
from .base import BasePage
|
||||
|
||||
|
||||
if sys.version_info < (3, 8): # no cov
|
||||
FileInfo = Dict
|
||||
|
||||
else:
|
||||
from typing import TypedDict
|
||||
|
||||
class FileInfo(TypedDict):
|
||||
icon: str
|
||||
file_name: str
|
||||
file_access: str
|
||||
file_size: str
|
||||
|
||||
|
||||
class DirectoryPage(BasePage): # no cov
|
||||
TITLE = "Directory Viewer"
|
||||
|
||||
def __init__(
|
||||
self, files: Iterable[FileInfo], url: str, debug: bool
|
||||
) -> None:
|
||||
super().__init__(debug)
|
||||
self.files = files
|
||||
self.url = url
|
||||
|
||||
def _body(self) -> None:
|
||||
with self.doc.main:
|
||||
self._headline()
|
||||
files = list(self.files)
|
||||
if files:
|
||||
self._file_table(files)
|
||||
else:
|
||||
self.doc.p("The folder is empty.")
|
||||
|
||||
def _headline(self):
|
||||
"""Implement a heading with the current path, combined with
|
||||
breadcrumb links"""
|
||||
with self.doc.h1(id="breadcrumbs"):
|
||||
p = self.url.split("/")[:-1]
|
||||
|
||||
for i, part in enumerate(p):
|
||||
path = "/".join(p[: i + 1]) + "/"
|
||||
with self.doc.a(href=path):
|
||||
self.doc.span(part, class_="dir").span("/", class_="sep")
|
||||
|
||||
def _file_table(self, files: Iterable[FileInfo]):
|
||||
with self.doc.table(class_="autoindex container"):
|
||||
for f in files:
|
||||
self._file_row(**f)
|
||||
|
||||
def _file_row(
|
||||
self,
|
||||
icon: str,
|
||||
file_name: str,
|
||||
file_access: str,
|
||||
file_size: str,
|
||||
):
|
||||
first = E.span(icon, class_="icon").a(file_name, href=file_name)
|
||||
self.doc.tr.td(first).td(file_size).td(file_access)
|
||||
@@ -1,79 +0,0 @@
|
||||
html {
|
||||
font: 16px sans-serif;
|
||||
background: #eee;
|
||||
color: #111;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
body>* {
|
||||
padding: 1rem 2vw;
|
||||
}
|
||||
|
||||
@media (max-width: 1200px) {
|
||||
body>* {
|
||||
padding: 0.5rem 1.5vw;
|
||||
}
|
||||
|
||||
body {
|
||||
font-size: 1rem;
|
||||
}
|
||||
}
|
||||
|
||||
.container {
|
||||
min-width: 600px;
|
||||
max-width: 1600px;
|
||||
}
|
||||
|
||||
header {
|
||||
background: #111;
|
||||
color: #e1e1e1;
|
||||
border-bottom: 1px solid #272727;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
footer {
|
||||
text-align: center;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-size: 0.8rem;
|
||||
margin-top: 2rem;
|
||||
}
|
||||
|
||||
h1 {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
color: #88f;
|
||||
}
|
||||
|
||||
a:hover,
|
||||
a:focus {
|
||||
text-decoration: underline;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
#logo {
|
||||
height: 1.75rem;
|
||||
padding: 0 0.25rem;
|
||||
}
|
||||
|
||||
span.icon {
|
||||
margin-right: 1rem;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
html {
|
||||
background: #111;
|
||||
color: #ccc;
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
#breadcrumbs>a:hover {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
#breadcrumbs>a .dir {
|
||||
padding: 0 0.25em;
|
||||
}
|
||||
|
||||
#breadcrumbs>a:first-child:hover::before,
|
||||
#breadcrumbs>a .dir:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
#breadcrumbs>a:first-child::before {
|
||||
content: "🏠";
|
||||
}
|
||||
|
||||
#breadcrumbs>a:last-child {
|
||||
color: #ff0d68;
|
||||
}
|
||||
|
||||
main a {
|
||||
color: inherit;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
table.autoindex {
|
||||
width: 100%;
|
||||
font-family: monospace;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
table.autoindex tr {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
table.autoindex tr:hover {
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
table.autoindex td {
|
||||
margin: 0 0.5rem;
|
||||
}
|
||||
|
||||
table.autoindex td:first-child {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
table.autoindex td:nth-child(2) {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
table.autoindex td:last-child {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
table.autoindex tr:hover {
|
||||
background-color: #222;
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,6 @@ if TYPE_CHECKING:
|
||||
from sanic.app import Sanic
|
||||
|
||||
import email.utils
|
||||
import unicodedata
|
||||
import uuid
|
||||
|
||||
from collections import defaultdict
|
||||
@@ -47,7 +46,7 @@ from sanic.constants import (
|
||||
)
|
||||
from sanic.exceptions import BadRequest, BadURL, ServerError
|
||||
from sanic.headers import (
|
||||
AcceptList,
|
||||
AcceptContainer,
|
||||
Options,
|
||||
parse_accept,
|
||||
parse_content_header,
|
||||
@@ -146,6 +145,7 @@ class Request:
|
||||
head: bytes = b"",
|
||||
stream_id: int = 0,
|
||||
):
|
||||
|
||||
self.raw_url = url_bytes
|
||||
try:
|
||||
self._parsed_url = parse_url(url_bytes)
|
||||
@@ -167,7 +167,7 @@ class Request:
|
||||
self.conn_info: Optional[ConnInfo] = None
|
||||
self.ctx = SimpleNamespace()
|
||||
self.parsed_forwarded: Optional[Options] = None
|
||||
self.parsed_accept: Optional[AcceptList] = None
|
||||
self.parsed_accept: Optional[AcceptContainer] = None
|
||||
self.parsed_credentials: Optional[Credentials] = None
|
||||
self.parsed_json = None
|
||||
self.parsed_form: Optional[RequestParameters] = None
|
||||
@@ -499,7 +499,7 @@ class Request:
|
||||
return self.parsed_json
|
||||
|
||||
@property
|
||||
def accept(self) -> AcceptList:
|
||||
def accept(self) -> AcceptContainer:
|
||||
"""
|
||||
:return: The ``Accept`` header parsed
|
||||
:rtype: AcceptContainer
|
||||
@@ -1084,16 +1084,6 @@ def parse_multipart_form(body, boundary):
|
||||
form_parameters["filename*"]
|
||||
)
|
||||
file_name = unquote(value, encoding=encoding)
|
||||
|
||||
# Normalize to NFC (Apple MacOS/iOS send NFD)
|
||||
# Notes:
|
||||
# - No effect for Windows, Linux or Android clients which
|
||||
# already send NFC
|
||||
# - Python open() is tricky (creates files in NFC no matter
|
||||
# which form you use)
|
||||
if file_name is not None:
|
||||
file_name = unicodedata.normalize("NFC", file_name)
|
||||
|
||||
elif form_header_field == "content-type":
|
||||
content_type = form_header_value
|
||||
content_charset = form_parameters.get("charset", "utf-8")
|
||||
|
||||
@@ -94,6 +94,7 @@ def watchdog(sleep_interval, reload_dirs):
|
||||
|
||||
try:
|
||||
while True:
|
||||
|
||||
changed = set()
|
||||
for filename in itertools.chain(
|
||||
_iter_module_files(),
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
from distutils.util import strtobool
|
||||
from os import getenv
|
||||
|
||||
from sanic.compat import OS_IS_WINDOWS
|
||||
from sanic.log import error_logger
|
||||
from sanic.utils import str_to_bool
|
||||
|
||||
|
||||
def try_use_uvloop() -> None:
|
||||
@@ -35,7 +35,7 @@ def try_use_uvloop() -> None:
|
||||
)
|
||||
return
|
||||
|
||||
uvloop_install_removed = str_to_bool(getenv("SANIC_NO_UVLOOP", "no"))
|
||||
uvloop_install_removed = strtobool(getenv("SANIC_NO_UVLOOP", "no"))
|
||||
if uvloop_install_removed:
|
||||
error_logger.info(
|
||||
"You are requesting to run Sanic using uvloop, but the "
|
||||
|
||||
@@ -200,7 +200,7 @@ def _serve_http_1(
|
||||
asyncio_server_kwargs = (
|
||||
asyncio_server_kwargs if asyncio_server_kwargs else {}
|
||||
)
|
||||
if OS_IS_WINDOWS and sock:
|
||||
if OS_IS_WINDOWS:
|
||||
pid = os.getpid()
|
||||
sock = sock.share(pid)
|
||||
sock = socket.fromshare(sock)
|
||||
@@ -229,7 +229,6 @@ def _serve_http_1(
|
||||
|
||||
loop.run_until_complete(app._startup())
|
||||
loop.run_until_complete(app._server_event("init", "before"))
|
||||
app.ack()
|
||||
|
||||
try:
|
||||
http_server = loop.run_until_complete(server_coroutine)
|
||||
@@ -307,7 +306,6 @@ def _serve_http_3(
|
||||
server = AsyncioServer(app, loop, coro, [])
|
||||
loop.run_until_complete(server.startup())
|
||||
loop.run_until_complete(server.before_start())
|
||||
app.ack()
|
||||
loop.run_until_complete(server)
|
||||
_setup_system_signals(app, run_multiple, register_sys_signals, loop)
|
||||
loop.run_until_complete(server.after_start())
|
||||
|
||||
@@ -9,10 +9,8 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from sanic.exceptions import InvalidUsage
|
||||
|
||||
|
||||
ASGIMessage = MutableMapping[str, Any]
|
||||
ASIMessage = MutableMapping[str, Any]
|
||||
|
||||
|
||||
class WebSocketConnection:
|
||||
@@ -27,8 +25,8 @@ class WebSocketConnection:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
send: Callable[[ASGIMessage], Awaitable[None]],
|
||||
receive: Callable[[], Awaitable[ASGIMessage]],
|
||||
send: Callable[[ASIMessage], Awaitable[None]],
|
||||
receive: Callable[[], Awaitable[ASIMessage]],
|
||||
subprotocols: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
self._send = send
|
||||
@@ -45,17 +43,11 @@ class WebSocketConnection:
|
||||
|
||||
await self._send(message)
|
||||
|
||||
async def recv(self, *args, **kwargs) -> Optional[Union[str, bytes]]:
|
||||
async def recv(self, *args, **kwargs) -> Optional[str]:
|
||||
message = await self._receive()
|
||||
|
||||
if message["type"] == "websocket.receive":
|
||||
try:
|
||||
return message["text"]
|
||||
except KeyError:
|
||||
try:
|
||||
return message["bytes"]
|
||||
except KeyError:
|
||||
raise InvalidUsage("Bad ASGI message received")
|
||||
return message["text"]
|
||||
elif message["type"] == "websocket.disconnect":
|
||||
pass
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ class WebsocketFrameAssembler:
|
||||
paused: bool
|
||||
|
||||
def __init__(self, protocol) -> None:
|
||||
|
||||
self.protocol = protocol
|
||||
|
||||
self.read_mutex = asyncio.Lock()
|
||||
|
||||
@@ -686,6 +686,7 @@ class WebsocketImplProtocol:
|
||||
:raises TypeError: for unsupported inputs
|
||||
"""
|
||||
async with self.conn_mutex:
|
||||
|
||||
if self.ws_proto.state in (CLOSED, CLOSING):
|
||||
raise WebsocketClosed(
|
||||
"Cannot write to websocket interface after it is closed."
|
||||
|
||||
@@ -154,7 +154,9 @@ class SignalRouter(BaseRouter):
|
||||
try:
|
||||
for signal in signals:
|
||||
params.pop("__trigger__", None)
|
||||
requirements = signal.extra.requirements
|
||||
requirements = getattr(
|
||||
signal.handler, "__requirements__", None
|
||||
)
|
||||
if (
|
||||
(condition is None and signal.ctx.exclusive is False)
|
||||
or (condition is None and not requirements)
|
||||
@@ -217,13 +219,8 @@ class SignalRouter(BaseRouter):
|
||||
if not trigger:
|
||||
event = ".".join([*parts[:2], "<__trigger__>"])
|
||||
|
||||
try:
|
||||
# Attaching __requirements__ and __trigger__ to the handler
|
||||
# is deprecated and will be removed in v23.6.
|
||||
handler.__requirements__ = condition # type: ignore
|
||||
handler.__trigger__ = trigger # type: ignore
|
||||
except AttributeError:
|
||||
pass
|
||||
handler.__requirements__ = condition # type: ignore
|
||||
handler.__trigger__ = trigger # type: ignore
|
||||
|
||||
signal = super().add(
|
||||
event,
|
||||
@@ -235,7 +232,6 @@ class SignalRouter(BaseRouter):
|
||||
signal.ctx.exclusive = exclusive
|
||||
signal.ctx.trigger = trigger
|
||||
signal.ctx.definition = event_definition
|
||||
signal.extra.requirements = condition
|
||||
|
||||
return cast(Signal, signal)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ from pathlib import Path
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.exceptions import SanicException
|
||||
from sanic.response import redirect
|
||||
|
||||
|
||||
def create_simple_server(directory: Path):
|
||||
@@ -11,8 +12,10 @@ def create_simple_server(directory: Path):
|
||||
)
|
||||
|
||||
app = Sanic("SimpleServer")
|
||||
app.static(
|
||||
"/", directory, name="main", directory_view=True, index="index.html"
|
||||
)
|
||||
app.static("/", directory, name="main")
|
||||
|
||||
@app.get("/")
|
||||
def index(_):
|
||||
return redirect(app.url_for("main", filename="index.html"))
|
||||
|
||||
return app
|
||||
|
||||
@@ -11,6 +11,7 @@ class TouchUpMeta(SanicMeta):
|
||||
methods = attrs.get("__touchup__")
|
||||
attrs["__touched__"] = False
|
||||
if methods:
|
||||
|
||||
for method in methods:
|
||||
if method not in attrs:
|
||||
raise SanicException(
|
||||
|
||||
@@ -75,6 +75,7 @@ def load_module_from_file_location(
|
||||
location = location.decode(encoding)
|
||||
|
||||
if isinstance(location, Path) or "/" in location or "$" in location:
|
||||
|
||||
if not isinstance(location, Path):
|
||||
# A) Check if location contains any environment variables
|
||||
# in format ${some_env_var}.
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
from enum import IntEnum, auto
|
||||
|
||||
from sanic.compat import UpperStrEnum
|
||||
|
||||
|
||||
class RestartOrder(UpperStrEnum):
|
||||
SHUTDOWN_FIRST = auto()
|
||||
STARTUP_FIRST = auto()
|
||||
|
||||
|
||||
class ProcessState(IntEnum):
|
||||
IDLE = auto()
|
||||
RESTARTING = auto()
|
||||
STARTING = auto()
|
||||
STARTED = auto()
|
||||
ACKED = auto()
|
||||
JOINED = auto()
|
||||
TERMINATED = auto()
|
||||
@@ -1,17 +1,23 @@
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
|
||||
from datetime import datetime
|
||||
from inspect import isawaitable
|
||||
from multiprocessing.connection import Connection
|
||||
from os import environ
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Mapping, Union
|
||||
from signal import SIGINT, SIGTERM
|
||||
from signal import signal as signal_func
|
||||
from socket import AF_INET, SOCK_STREAM, socket, timeout
|
||||
from textwrap import indent
|
||||
from typing import Any, Dict
|
||||
|
||||
from sanic.exceptions import Unauthorized
|
||||
from sanic.helpers import Default
|
||||
from sanic.log import logger
|
||||
from sanic.request import Request
|
||||
from sanic.response import json
|
||||
from sanic.application.logo import get_logo
|
||||
from sanic.application.motd import MOTDTTY
|
||||
from sanic.log import Colors, error_logger, logger
|
||||
from sanic.server.socket import configure_socket
|
||||
|
||||
|
||||
try: # no cov
|
||||
from ujson import dumps, loads
|
||||
except ModuleNotFoundError: # no cov
|
||||
from json import dumps, loads # type: ignore
|
||||
|
||||
|
||||
class Inspector:
|
||||
@@ -19,105 +25,118 @@ class Inspector:
|
||||
self,
|
||||
publisher: Connection,
|
||||
app_info: Dict[str, Any],
|
||||
worker_state: Mapping[str, Any],
|
||||
worker_state: Dict[str, Any],
|
||||
host: str,
|
||||
port: int,
|
||||
api_key: str,
|
||||
tls_key: Union[Path, str, Default],
|
||||
tls_cert: Union[Path, str, Default],
|
||||
):
|
||||
self._publisher = publisher
|
||||
self.run = True
|
||||
self.app_info = app_info
|
||||
self.worker_state = worker_state
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.api_key = api_key
|
||||
self.tls_key = tls_key
|
||||
self.tls_cert = tls_cert
|
||||
|
||||
def __call__(self, run=True, **_) -> Inspector:
|
||||
from sanic import Sanic
|
||||
|
||||
self.app = Sanic("Inspector")
|
||||
self._setup()
|
||||
if run:
|
||||
self.app.run(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
single_process=True,
|
||||
ssl={"key": self.tls_key, "cert": self.tls_cert}
|
||||
if not isinstance(self.tls_key, Default)
|
||||
and not isinstance(self.tls_cert, Default)
|
||||
else None,
|
||||
)
|
||||
return self
|
||||
|
||||
def _setup(self):
|
||||
self.app.get("/")(self._info)
|
||||
self.app.post("/<action:str>")(self._action)
|
||||
if self.api_key:
|
||||
self.app.on_request(self._authentication)
|
||||
environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
|
||||
|
||||
def _authentication(self, request: Request) -> None:
|
||||
if request.token != self.api_key:
|
||||
raise Unauthorized("Bad API key")
|
||||
|
||||
async def _action(self, request: Request, action: str):
|
||||
logger.info("Incoming inspector action: %s", action)
|
||||
output: Any = None
|
||||
method = getattr(self, action, None)
|
||||
if method:
|
||||
kwargs = {}
|
||||
if request.body:
|
||||
kwargs = request.json
|
||||
args = kwargs.pop("args", ())
|
||||
output = method(*args, **kwargs)
|
||||
if isawaitable(output):
|
||||
output = await output
|
||||
|
||||
return await self._respond(request, output)
|
||||
|
||||
async def _info(self, request: Request):
|
||||
return await self._respond(request, self._state_to_json())
|
||||
|
||||
async def _respond(self, request: Request, output: Any):
|
||||
name = request.match_info.get("action", "info")
|
||||
return json(
|
||||
{"meta": {"action": name}, "result": output},
|
||||
escape_forward_slashes=False,
|
||||
def __call__(self) -> None:
|
||||
sock = configure_socket(
|
||||
{"host": self.host, "port": self.port, "unix": None, "backlog": 1}
|
||||
)
|
||||
assert sock
|
||||
signal_func(SIGINT, self.stop)
|
||||
signal_func(SIGTERM, self.stop)
|
||||
|
||||
def _state_to_json(self) -> Dict[str, Any]:
|
||||
logger.info(f"Inspector started on: {sock.getsockname()}")
|
||||
sock.settimeout(0.5)
|
||||
try:
|
||||
while self.run:
|
||||
try:
|
||||
conn, _ = sock.accept()
|
||||
except timeout:
|
||||
continue
|
||||
else:
|
||||
action = conn.recv(64)
|
||||
if action == b"reload":
|
||||
conn.send(b"\n")
|
||||
self.reload()
|
||||
elif action == b"shutdown":
|
||||
conn.send(b"\n")
|
||||
self.shutdown()
|
||||
else:
|
||||
data = dumps(self.state_to_json())
|
||||
conn.send(data.encode())
|
||||
conn.close()
|
||||
finally:
|
||||
logger.debug("Inspector closing")
|
||||
sock.close()
|
||||
|
||||
def stop(self, *_):
|
||||
self.run = False
|
||||
|
||||
def state_to_json(self):
|
||||
output = {"info": self.app_info}
|
||||
output["workers"] = self._make_safe(dict(self.worker_state))
|
||||
output["workers"] = self.make_safe(dict(self.worker_state))
|
||||
return output
|
||||
|
||||
def reload(self):
|
||||
message = "__ALL_PROCESSES__:"
|
||||
self._publisher.send(message)
|
||||
|
||||
def shutdown(self):
|
||||
message = "__TERMINATE__"
|
||||
self._publisher.send(message)
|
||||
|
||||
@staticmethod
|
||||
def _make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
for key, value in obj.items():
|
||||
if isinstance(value, dict):
|
||||
obj[key] = Inspector._make_safe(value)
|
||||
obj[key] = Inspector.make_safe(value)
|
||||
elif isinstance(value, datetime):
|
||||
obj[key] = value.isoformat()
|
||||
return obj
|
||||
|
||||
def reload(self, zero_downtime: bool = False) -> None:
|
||||
message = "__ALL_PROCESSES__:"
|
||||
if zero_downtime:
|
||||
message += ":STARTUP_FIRST"
|
||||
self._publisher.send(message)
|
||||
|
||||
def scale(self, replicas) -> str:
|
||||
num_workers = 1
|
||||
if replicas:
|
||||
num_workers = int(replicas)
|
||||
log_msg = f"Scaling to {num_workers}"
|
||||
logger.info(log_msg)
|
||||
message = f"__SCALE__:{num_workers}"
|
||||
self._publisher.send(message)
|
||||
return log_msg
|
||||
|
||||
def shutdown(self) -> None:
|
||||
message = "__TERMINATE__"
|
||||
self._publisher.send(message)
|
||||
def inspect(host: str, port: int, action: str):
|
||||
out = sys.stdout.write
|
||||
with socket(AF_INET, SOCK_STREAM) as sock:
|
||||
try:
|
||||
sock.connect((host, port))
|
||||
except ConnectionRefusedError:
|
||||
error_logger.error(
|
||||
f"{Colors.RED}Could not connect to inspector at: "
|
||||
f"{Colors.YELLOW}{(host, port)}{Colors.END}\n"
|
||||
"Either the application is not running, or it did not start "
|
||||
"an inspector instance."
|
||||
)
|
||||
sock.close()
|
||||
sys.exit(1)
|
||||
sock.sendall(action.encode())
|
||||
data = sock.recv(4096)
|
||||
if action == "raw":
|
||||
out(data.decode())
|
||||
elif action == "pretty":
|
||||
loaded = loads(data)
|
||||
display = loaded.pop("info")
|
||||
extra = display.pop("extra", {})
|
||||
display["packages"] = ", ".join(display["packages"])
|
||||
MOTDTTY(get_logo(), f"{host}:{port}", display, extra).display(
|
||||
version=False,
|
||||
action="Inspecting",
|
||||
out=out,
|
||||
)
|
||||
for name, info in loaded["workers"].items():
|
||||
info = "\n".join(
|
||||
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
|
||||
for key, value in info.items()
|
||||
)
|
||||
out(
|
||||
"\n"
|
||||
+ indent(
|
||||
"\n".join(
|
||||
[
|
||||
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
|
||||
info,
|
||||
]
|
||||
),
|
||||
" ",
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import os
|
||||
|
||||
from contextlib import suppress
|
||||
from itertools import count
|
||||
from random import choice
|
||||
from signal import SIGINT, SIGTERM, Signals
|
||||
from signal import signal as signal_func
|
||||
from typing import Dict, List, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from sanic.compat import OS_IS_WINDOWS
|
||||
from sanic.constants import RestartOrder
|
||||
from sanic.exceptions import ServerKilled
|
||||
from sanic.log import error_logger, logger
|
||||
from sanic.worker.constants import RestartOrder
|
||||
from sanic.worker.process import ProcessState, Worker, WorkerProcess
|
||||
|
||||
|
||||
@@ -21,7 +18,7 @@ else:
|
||||
|
||||
|
||||
class WorkerManager:
|
||||
THRESHOLD = WorkerProcess.THRESHOLD
|
||||
THRESHOLD = 300 # == 30 seconds
|
||||
MAIN_IDENT = "Sanic-Main"
|
||||
|
||||
def __init__(
|
||||
@@ -32,69 +29,51 @@ class WorkerManager:
|
||||
context,
|
||||
monitor_pubsub,
|
||||
worker_state,
|
||||
restart_order: RestartOrder = RestartOrder.SHUTDOWN_FIRST,
|
||||
):
|
||||
self.num_server = number
|
||||
self.context = context
|
||||
self.transient: Dict[str, Worker] = {}
|
||||
self.durable: Dict[str, Worker] = {}
|
||||
self.transient: List[Worker] = []
|
||||
self.durable: List[Worker] = []
|
||||
self.monitor_publisher, self.monitor_subscriber = monitor_pubsub
|
||||
self.worker_state = worker_state
|
||||
self.worker_state[self.MAIN_IDENT] = {"pid": self.pid}
|
||||
self._shutting_down = False
|
||||
self._serve = serve
|
||||
self._server_settings = server_settings
|
||||
self._server_count = count()
|
||||
self.terminated = False
|
||||
self.restart_order = restart_order
|
||||
|
||||
if number == 0:
|
||||
raise RuntimeError("Cannot serve with no workers")
|
||||
|
||||
for _ in range(number):
|
||||
self.create_server()
|
||||
for i in range(number):
|
||||
self.manage(
|
||||
f"{WorkerProcess.SERVER_LABEL}-{i}",
|
||||
serve,
|
||||
server_settings,
|
||||
transient=True,
|
||||
)
|
||||
|
||||
signal_func(SIGINT, self.shutdown_signal)
|
||||
signal_func(SIGTERM, self.shutdown_signal)
|
||||
|
||||
def manage(self, ident, func, kwargs, transient=False) -> Worker:
|
||||
def manage(self, ident, func, kwargs, transient=False):
|
||||
container = self.transient if transient else self.durable
|
||||
worker = Worker(ident, func, kwargs, self.context, self.worker_state)
|
||||
container[worker.ident] = worker
|
||||
return worker
|
||||
|
||||
def create_server(self) -> Worker:
|
||||
server_number = next(self._server_count)
|
||||
return self.manage(
|
||||
f"{WorkerProcess.SERVER_LABEL}-{server_number}",
|
||||
self._serve,
|
||||
self._server_settings,
|
||||
transient=True,
|
||||
container.append(
|
||||
Worker(
|
||||
ident,
|
||||
func,
|
||||
kwargs,
|
||||
self.context,
|
||||
self.worker_state,
|
||||
self.restart_order,
|
||||
)
|
||||
)
|
||||
|
||||
def shutdown_server(self, ident: Optional[str] = None) -> None:
|
||||
if not ident:
|
||||
servers = [
|
||||
worker
|
||||
for worker in self.transient.values()
|
||||
if worker.ident.startswith(WorkerProcess.SERVER_LABEL)
|
||||
]
|
||||
if not servers:
|
||||
error_logger.error(
|
||||
"Server shutdown failed because a server was not found."
|
||||
)
|
||||
return
|
||||
worker = choice(servers) # nosec B311
|
||||
else:
|
||||
worker = self.transient[ident]
|
||||
|
||||
for process in worker.processes:
|
||||
process.terminate()
|
||||
|
||||
del self.transient[worker.ident]
|
||||
|
||||
def run(self):
|
||||
self.start()
|
||||
self.monitor()
|
||||
self.join()
|
||||
self.terminate()
|
||||
# self.kill()
|
||||
|
||||
def start(self):
|
||||
for process in self.processes:
|
||||
@@ -116,41 +95,15 @@ class WorkerManager:
|
||||
self.join()
|
||||
|
||||
def terminate(self):
|
||||
if not self._shutting_down:
|
||||
if not self.terminated:
|
||||
for process in self.processes:
|
||||
process.terminate()
|
||||
self.terminated = True
|
||||
|
||||
def restart(
|
||||
self,
|
||||
process_names: Optional[List[str]] = None,
|
||||
restart_order=RestartOrder.SHUTDOWN_FIRST,
|
||||
**kwargs,
|
||||
):
|
||||
def restart(self, process_names: Optional[List[str]] = None, **kwargs):
|
||||
for process in self.transient_processes:
|
||||
if not process_names or process.name in process_names:
|
||||
process.restart(restart_order=restart_order, **kwargs)
|
||||
|
||||
def scale(self, num_worker: int):
|
||||
if num_worker <= 0:
|
||||
raise ValueError("Cannot scale to 0 workers.")
|
||||
|
||||
change = num_worker - self.num_server
|
||||
if change == 0:
|
||||
logger.info(
|
||||
f"No change needed. There are already {num_worker} workers."
|
||||
)
|
||||
return
|
||||
|
||||
logger.info(f"Scaling from {self.num_server} to {num_worker} workers")
|
||||
if change > 0:
|
||||
for _ in range(change):
|
||||
worker = self.create_server()
|
||||
for process in worker.processes:
|
||||
process.start()
|
||||
else:
|
||||
for _ in range(abs(change)):
|
||||
self.shutdown_server()
|
||||
self.num_server = num_worker
|
||||
process.restart(**kwargs)
|
||||
|
||||
def monitor(self):
|
||||
self.wait_for_ack()
|
||||
@@ -166,15 +119,7 @@ class WorkerManager:
|
||||
elif message == "__TERMINATE__":
|
||||
self.shutdown()
|
||||
break
|
||||
logger.debug(
|
||||
"Incoming monitor message: %s",
|
||||
message,
|
||||
extra={"verbosity": 1},
|
||||
)
|
||||
split_message = message.split(":", 2)
|
||||
if message.startswith("__SCALE__"):
|
||||
self.scale(int(split_message[-1]))
|
||||
continue
|
||||
split_message = message.split(":", 1)
|
||||
processes = split_message[0]
|
||||
reloaded_files = (
|
||||
split_message[1] if len(split_message) > 1 else None
|
||||
@@ -184,15 +129,9 @@ class WorkerManager:
|
||||
]
|
||||
if "__ALL_PROCESSES__" in process_names:
|
||||
process_names = None
|
||||
order = (
|
||||
RestartOrder.STARTUP_FIRST
|
||||
if "STARTUP_FIRST" in split_message
|
||||
else RestartOrder.SHUTDOWN_FIRST
|
||||
)
|
||||
self.restart(
|
||||
process_names=process_names,
|
||||
reloaded_files=reloaded_files,
|
||||
restart_order=order,
|
||||
)
|
||||
self._sync_states()
|
||||
except InterruptedError:
|
||||
@@ -200,6 +139,12 @@ class WorkerManager:
|
||||
raise
|
||||
break
|
||||
|
||||
def _sync_states(self):
|
||||
for process in self.processes:
|
||||
state = self.worker_state[process.name].get("state")
|
||||
if state and process.state.name != state:
|
||||
process.set_state(ProcessState[state], True)
|
||||
|
||||
def wait_for_ack(self): # no cov
|
||||
misses = 0
|
||||
message = (
|
||||
@@ -207,7 +152,7 @@ class WorkerManager:
|
||||
"online in the allowed time. Sanic is shutting down to avoid a "
|
||||
f"deadlock. The current threshold is {self.THRESHOLD / 10}s. "
|
||||
"If this problem persists, please check out the documentation "
|
||||
"https://sanic.dev/en/guide/deployment/manager.html#worker-ack."
|
||||
"___."
|
||||
)
|
||||
while not self._all_workers_ack():
|
||||
if self.monitor_subscriber.poll(0.1):
|
||||
@@ -234,8 +179,8 @@ class WorkerManager:
|
||||
self.kill()
|
||||
|
||||
@property
|
||||
def workers(self) -> List[Worker]:
|
||||
return list(self.transient.values()) + list(self.durable.values())
|
||||
def workers(self):
|
||||
return self.transient + self.durable
|
||||
|
||||
@property
|
||||
def processes(self):
|
||||
@@ -245,7 +190,7 @@ class WorkerManager:
|
||||
|
||||
@property
|
||||
def transient_processes(self):
|
||||
for worker in self.transient.values():
|
||||
for worker in self.transient:
|
||||
for process in worker.processes:
|
||||
yield process
|
||||
|
||||
@@ -256,11 +201,6 @@ class WorkerManager:
|
||||
raise ServerKilled
|
||||
|
||||
def shutdown_signal(self, signal, frame):
|
||||
if self._shutting_down:
|
||||
logger.info("Shutdown interrupted. Killing.")
|
||||
with suppress(ServerKilled):
|
||||
self.kill()
|
||||
|
||||
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
|
||||
self.monitor_publisher.send(None)
|
||||
self.shutdown()
|
||||
@@ -269,7 +209,6 @@ class WorkerManager:
|
||||
for process in self.processes:
|
||||
if process.is_alive():
|
||||
process.terminate()
|
||||
self._shutting_down = True
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
@@ -282,9 +221,3 @@ class WorkerManager:
|
||||
if worker_state.get("server")
|
||||
]
|
||||
return all(acked) and len(acked) == self.num_server
|
||||
|
||||
def _sync_states(self):
|
||||
for process in self.processes:
|
||||
state = self.worker_state[process.name].get("state")
|
||||
if state and process.state.name != state:
|
||||
process.set_state(ProcessState[state], True)
|
||||
|
||||
@@ -2,7 +2,6 @@ from multiprocessing.connection import Connection
|
||||
from os import environ, getpid
|
||||
from typing import Any, Dict
|
||||
|
||||
from sanic.log import Colors, logger
|
||||
from sanic.worker.process import ProcessState
|
||||
from sanic.worker.state import WorkerState
|
||||
|
||||
@@ -17,23 +16,12 @@ class WorkerMultiplexer:
|
||||
self._state = WorkerState(worker_state, self.name)
|
||||
|
||||
def ack(self):
|
||||
logger.debug(
|
||||
f"{Colors.BLUE}Process ack: {Colors.BOLD}{Colors.SANIC}"
|
||||
f"%s {Colors.BLUE}[%s]{Colors.END}",
|
||||
self.name,
|
||||
self.pid,
|
||||
)
|
||||
self._state._state[self.name] = {
|
||||
**self._state._state[self.name],
|
||||
"state": ProcessState.ACKED.name,
|
||||
}
|
||||
|
||||
def restart(
|
||||
self,
|
||||
name: str = "",
|
||||
all_workers: bool = False,
|
||||
zero_downtime: bool = False,
|
||||
):
|
||||
def restart(self, name: str = "", all_workers: bool = False):
|
||||
if name and all_workers:
|
||||
raise ValueError(
|
||||
"Ambiguous restart with both a named process and"
|
||||
@@ -41,18 +29,10 @@ class WorkerMultiplexer:
|
||||
)
|
||||
if not name:
|
||||
name = "__ALL_PROCESSES__:" if all_workers else self.name
|
||||
if not name.endswith(":"):
|
||||
name += ":"
|
||||
if zero_downtime:
|
||||
name += ":STARTUP_FIRST"
|
||||
self._monitor_publisher.send(name)
|
||||
|
||||
reload = restart # no cov
|
||||
|
||||
def scale(self, num_workers: int):
|
||||
message = f"__SCALE__:{num_workers}"
|
||||
self._monitor_publisher.send(message)
|
||||
|
||||
def terminate(self, early: bool = False):
|
||||
message = "__TERMINATE_EARLY__" if early else "__TERMINATE__"
|
||||
self._monitor_publisher.send(message)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import os
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from enum import IntEnum, auto
|
||||
from multiprocessing.context import BaseContext
|
||||
from signal import SIGINT
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Set
|
||||
|
||||
from sanic.constants import RestartOrder
|
||||
from sanic.log import Colors, logger
|
||||
from sanic.worker.constants import ProcessState, RestartOrder
|
||||
|
||||
|
||||
def get_now():
|
||||
@@ -16,17 +16,35 @@ def get_now():
|
||||
return now
|
||||
|
||||
|
||||
class ProcessState(IntEnum):
|
||||
IDLE = auto()
|
||||
RESTARTING = auto()
|
||||
STARTING = auto()
|
||||
STARTED = auto()
|
||||
ACKED = auto()
|
||||
JOINED = auto()
|
||||
TERMINATED = auto()
|
||||
|
||||
|
||||
class WorkerProcess:
|
||||
THRESHOLD = 300 # == 30 seconds
|
||||
SERVER_LABEL = "Server"
|
||||
|
||||
def __init__(self, factory, name, target, kwargs, worker_state):
|
||||
def __init__(
|
||||
self,
|
||||
factory,
|
||||
name,
|
||||
target,
|
||||
kwargs,
|
||||
worker_state,
|
||||
restart_order: RestartOrder,
|
||||
):
|
||||
self.state = ProcessState.IDLE
|
||||
self.factory = factory
|
||||
self.name = name
|
||||
self.target = target
|
||||
self.kwargs = kwargs
|
||||
self.worker_state = worker_state
|
||||
self.restart_order = restart_order
|
||||
if self.name not in self.worker_state:
|
||||
self.worker_state[self.name] = {
|
||||
"server": self.SERVER_LABEL in self.name
|
||||
@@ -81,7 +99,7 @@ class WorkerProcess:
|
||||
except (KeyError, AttributeError, ProcessLookupError):
|
||||
...
|
||||
|
||||
def restart(self, restart_order=RestartOrder.SHUTDOWN_FIRST, **kwargs):
|
||||
def restart(self, **kwargs):
|
||||
logger.debug(
|
||||
f"{Colors.BLUE}Restarting a process: {Colors.BOLD}{Colors.SANIC}"
|
||||
f"%s {Colors.BLUE}[%s]{Colors.END}",
|
||||
@@ -89,7 +107,7 @@ class WorkerProcess:
|
||||
self.pid,
|
||||
)
|
||||
self.set_state(ProcessState.RESTARTING, force=True)
|
||||
if restart_order is RestartOrder.SHUTDOWN_FIRST:
|
||||
if self.restart_order is RestartOrder.SHUTDOWN_FIRST:
|
||||
self._terminate_now()
|
||||
else:
|
||||
self._old_process = self._current_process
|
||||
@@ -102,7 +120,7 @@ class WorkerProcess:
|
||||
except AttributeError:
|
||||
raise RuntimeError("Restart failed")
|
||||
|
||||
if restart_order is RestartOrder.STARTUP_FIRST:
|
||||
if self.restart_order is RestartOrder.STARTUP_FIRST:
|
||||
self._terminate_soon()
|
||||
|
||||
self.worker_state[self.name] = {
|
||||
@@ -112,26 +130,6 @@ class WorkerProcess:
|
||||
"restart_at": get_now(),
|
||||
}
|
||||
|
||||
def is_alive(self):
|
||||
try:
|
||||
return self._current_process.is_alive()
|
||||
except AssertionError:
|
||||
return False
|
||||
|
||||
def spawn(self):
|
||||
if self.state not in (ProcessState.IDLE, ProcessState.RESTARTING):
|
||||
raise Exception("Cannot spawn a worker process until it is idle.")
|
||||
self._current_process = self.factory(
|
||||
name=self.name,
|
||||
target=self.target,
|
||||
kwargs=self.kwargs,
|
||||
daemon=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
return self._current_process.pid
|
||||
|
||||
def _terminate_now(self):
|
||||
logger.debug(
|
||||
f"{Colors.BLUE}Begin restart termination: "
|
||||
@@ -161,15 +159,9 @@ class WorkerProcess:
|
||||
self.name,
|
||||
self._old_process.pid,
|
||||
)
|
||||
misses = 0
|
||||
# TODO: Add a timeout?
|
||||
while self.state is not ProcessState.ACKED:
|
||||
sleep(0.1)
|
||||
misses += 1
|
||||
if misses > self.THRESHOLD:
|
||||
raise TimeoutError(
|
||||
f"Worker {self.name} failed to come ack within "
|
||||
f"{self.THRESHOLD / 10} seconds"
|
||||
)
|
||||
...
|
||||
else:
|
||||
logger.debug(
|
||||
f"{Colors.BLUE}Process acked. Terminating: "
|
||||
@@ -181,6 +173,26 @@ class WorkerProcess:
|
||||
self._old_process.terminate()
|
||||
delattr(self, "_old_process")
|
||||
|
||||
def is_alive(self):
|
||||
try:
|
||||
return self._current_process.is_alive()
|
||||
except AssertionError:
|
||||
return False
|
||||
|
||||
def spawn(self):
|
||||
if self.state not in (ProcessState.IDLE, ProcessState.RESTARTING):
|
||||
raise Exception("Cannot spawn a worker process until it is idle.")
|
||||
self._current_process = self.factory(
|
||||
name=self.name,
|
||||
target=self.target,
|
||||
kwargs=self.kwargs,
|
||||
daemon=True,
|
||||
)
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
return self._current_process.pid
|
||||
|
||||
|
||||
class Worker:
|
||||
WORKER_PREFIX = "Sanic-"
|
||||
@@ -192,26 +204,25 @@ class Worker:
|
||||
server_settings,
|
||||
context: BaseContext,
|
||||
worker_state: Dict[str, Any],
|
||||
restart_order: RestartOrder,
|
||||
):
|
||||
self.ident = ident
|
||||
self.ident = f"{self.WORKER_PREFIX}{ident}"
|
||||
self.context = context
|
||||
self.serve = serve
|
||||
self.server_settings = server_settings
|
||||
self.worker_state = worker_state
|
||||
self.processes: Set[WorkerProcess] = set()
|
||||
self.restart_order = restart_order
|
||||
self.create_process()
|
||||
|
||||
def create_process(self) -> WorkerProcess:
|
||||
process = WorkerProcess(
|
||||
# Need to ignore this typing error - The problem is the
|
||||
# BaseContext itself has no Process. But, all of its
|
||||
# implementations do. We can safely ignore as it is a typing
|
||||
# issue in the standard lib.
|
||||
factory=self.context.Process, # type: ignore
|
||||
name=f"{self.WORKER_PREFIX}{self.ident}-{len(self.processes)}",
|
||||
factory=self.context.Process,
|
||||
name=f"{self.ident}-{len(self.processes)}",
|
||||
target=self.serve,
|
||||
kwargs={**self.server_settings},
|
||||
worker_state=self.worker_state,
|
||||
restart_order=self.restart_order,
|
||||
)
|
||||
self.processes.add(process)
|
||||
return process
|
||||
|
||||
@@ -17,8 +17,6 @@ from sanic.worker.loader import AppLoader
|
||||
|
||||
|
||||
class Reloader:
|
||||
INTERVAL = 1.0 # seconds
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
publisher: Connection,
|
||||
@@ -27,7 +25,7 @@ class Reloader:
|
||||
app_loader: AppLoader,
|
||||
):
|
||||
self._publisher = publisher
|
||||
self.interval = interval or self.INTERVAL
|
||||
self.interval = interval
|
||||
self.reload_dirs = reload_dirs
|
||||
self.run = True
|
||||
self.app_loader = app_loader
|
||||
|
||||
@@ -17,7 +17,6 @@ from sanic.server.protocols.http_protocol import HttpProtocol
|
||||
from sanic.server.runners import _serve_http_1, _serve_http_3
|
||||
from sanic.worker.loader import AppLoader, CertLoader
|
||||
from sanic.worker.multiplexer import WorkerMultiplexer
|
||||
from sanic.worker.process import Worker, WorkerProcess
|
||||
|
||||
|
||||
def worker_serve(
|
||||
@@ -80,10 +79,7 @@ def worker_serve(
|
||||
info.settings["ssl"] = ssl
|
||||
|
||||
# When in a worker process, do some init
|
||||
worker_name = os.environ.get("SANIC_WORKER_NAME")
|
||||
if worker_name and worker_name.startswith(
|
||||
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL
|
||||
):
|
||||
if os.environ.get("SANIC_WORKER_NAME"):
|
||||
# Hydrate apps with any passed server info
|
||||
|
||||
if monitor_publisher is None:
|
||||
|
||||
31
setup.py
31
setup.py
@@ -6,6 +6,8 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from distutils.util import strtobool
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
@@ -36,27 +38,6 @@ def open_local(paths, mode="r", encoding="utf8"):
|
||||
return codecs.open(path, mode, encoding)
|
||||
|
||||
|
||||
def str_to_bool(val: str) -> bool:
|
||||
val = val.lower()
|
||||
if val in {
|
||||
"y",
|
||||
"yes",
|
||||
"yep",
|
||||
"yup",
|
||||
"t",
|
||||
"true",
|
||||
"on",
|
||||
"enable",
|
||||
"enabled",
|
||||
"1",
|
||||
}:
|
||||
return True
|
||||
elif val in {"n", "no", "f", "false", "off", "disable", "disabled", "0"}:
|
||||
return False
|
||||
else:
|
||||
raise ValueError(f"Invalid truth value {val}")
|
||||
|
||||
|
||||
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
|
||||
try:
|
||||
version = re.findall(
|
||||
@@ -81,7 +62,7 @@ setup_kwargs = {
|
||||
),
|
||||
"long_description": long_description,
|
||||
"packages": find_packages(exclude=("tests", "tests.*")),
|
||||
"package_data": {"sanic": ["py.typed", "pages/styles/*"]},
|
||||
"package_data": {"sanic": ["py.typed"]},
|
||||
"platforms": "any",
|
||||
"python_requires": ">=3.7",
|
||||
"classifiers": [
|
||||
@@ -92,7 +73,6 @@ setup_kwargs = {
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
],
|
||||
"entry_points": {"console_scripts": ["sanic = sanic.__main__:main"]},
|
||||
}
|
||||
@@ -111,7 +91,6 @@ requirements = [
|
||||
"aiofiles>=0.6.0",
|
||||
"websockets>=10.0",
|
||||
"multidict>=5.0,<7.0",
|
||||
"html5tagger>=1.2.1",
|
||||
]
|
||||
|
||||
tests_require = [
|
||||
@@ -152,13 +131,13 @@ dev_require = tests_require + [
|
||||
|
||||
all_require = list(set(dev_require + docs_require))
|
||||
|
||||
if str_to_bool(os.environ.get("SANIC_NO_UJSON", "no")):
|
||||
if strtobool(os.environ.get("SANIC_NO_UJSON", "no")):
|
||||
print("Installing without uJSON")
|
||||
requirements.remove(ujson)
|
||||
tests_require.remove(types_ujson)
|
||||
|
||||
# 'nt' means windows OS
|
||||
if str_to_bool(os.environ.get("SANIC_NO_UVLOOP", "no")):
|
||||
if strtobool(os.environ.get("SANIC_NO_UVLOOP", "no")):
|
||||
print("Installing without uvLoop")
|
||||
requirements.remove(uvloop)
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
@@ -10,8 +8,8 @@ import uuid
|
||||
|
||||
from contextlib import suppress
|
||||
from logging import LogRecord
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
from typing import List, Tuple
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -56,10 +54,11 @@ TYPE_TO_GENERATOR_MAP = {
|
||||
"uuid": lambda: str(uuid.uuid1()),
|
||||
}
|
||||
|
||||
CACHE: Dict[str, Any] = {}
|
||||
CACHE = {}
|
||||
|
||||
|
||||
class RouteStringGenerator:
|
||||
|
||||
ROUTE_COUNT_PER_DEPTH = 100
|
||||
HTTP_METHODS = HTTP_METHODS
|
||||
ROUTE_PARAM_TYPES = ["str", "int", "float", "alpha", "uuid"]
|
||||
@@ -148,7 +147,6 @@ def app(request):
|
||||
for target, method_name in TouchUp._registry:
|
||||
CACHE[method_name] = getattr(target, method_name)
|
||||
app = Sanic(slugify.sub("-", request.node.name))
|
||||
|
||||
yield app
|
||||
for target, method_name in TouchUp._registry:
|
||||
setattr(target, method_name, CACHE[method_name])
|
||||
@@ -222,23 +220,3 @@ def sanic_ext(ext_instance): # noqa
|
||||
yield sanic_ext
|
||||
with suppress(KeyError):
|
||||
del sys.modules["sanic_ext"]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def urlopen():
|
||||
urlopen = Mock()
|
||||
urlopen.return_value = urlopen
|
||||
urlopen.__enter__ = Mock(return_value=urlopen)
|
||||
urlopen.__exit__ = Mock()
|
||||
urlopen.read = Mock()
|
||||
with patch("sanic.cli.inspector_client.urlopen", urlopen):
|
||||
yield urlopen
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def static_file_directory():
|
||||
"""The static directory to serve"""
|
||||
current_file = inspect.getfile(inspect.currentframe())
|
||||
current_directory = os.path.dirname(os.path.abspath(current_file))
|
||||
static_directory = os.path.join(current_directory, "static")
|
||||
return static_directory
|
||||
|
||||
@@ -36,7 +36,6 @@ def test_app_loop_running(app: Sanic):
|
||||
assert response.text == "pass"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_create_asyncio_server(app: Sanic):
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(return_asyncio_server=True)
|
||||
@@ -45,7 +44,6 @@ def test_create_asyncio_server(app: Sanic):
|
||||
assert srv.is_serving() is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_asyncio_server_no_start_serving(app: Sanic):
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(
|
||||
@@ -57,7 +55,6 @@ def test_asyncio_server_no_start_serving(app: Sanic):
|
||||
assert srv.is_serving() is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_asyncio_server_start_serving(app: Sanic):
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(
|
||||
@@ -75,7 +72,6 @@ def test_asyncio_server_start_serving(app: Sanic):
|
||||
# Looks like we can't easily test `serve_forever()`
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_create_server_main(app: Sanic, caplog):
|
||||
app.listener("main_process_start")(lambda *_: ...)
|
||||
loop = asyncio.get_event_loop()
|
||||
@@ -90,7 +86,6 @@ def test_create_server_main(app: Sanic, caplog):
|
||||
) in caplog.record_tuples
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_create_server_no_startup(app: Sanic):
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(
|
||||
@@ -106,7 +101,6 @@ def test_create_server_no_startup(app: Sanic):
|
||||
loop.run_until_complete(srv.start_serving())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_create_server_main_convenience(app: Sanic, caplog):
|
||||
app.main_process_start(lambda *_: ...)
|
||||
loop = asyncio.get_event_loop()
|
||||
@@ -132,6 +126,7 @@ def test_app_loop_not_running(app: Sanic):
|
||||
|
||||
|
||||
def test_app_run_raise_type_error(app: Sanic):
|
||||
|
||||
with pytest.raises(TypeError) as excinfo:
|
||||
app.run(loop="loop")
|
||||
|
||||
@@ -144,6 +139,7 @@ def test_app_run_raise_type_error(app: Sanic):
|
||||
|
||||
|
||||
def test_app_route_raise_value_error(app: Sanic):
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
|
||||
@app.route("/test")
|
||||
@@ -225,6 +221,7 @@ def test_app_websocket_parameters(websocket_protocol_mock, app: Sanic):
|
||||
|
||||
|
||||
def test_handle_request_with_nested_exception(app: Sanic, monkeypatch):
|
||||
|
||||
err_msg = "Mock Exception"
|
||||
|
||||
def mock_error_handler_response(*args, **kwargs):
|
||||
@@ -244,6 +241,7 @@ def test_handle_request_with_nested_exception(app: Sanic, monkeypatch):
|
||||
|
||||
|
||||
def test_handle_request_with_nested_exception_debug(app: Sanic, monkeypatch):
|
||||
|
||||
err_msg = "Mock Exception"
|
||||
|
||||
def mock_error_handler_response(*args, **kwargs):
|
||||
@@ -350,14 +348,12 @@ def test_app_registry_retrieval_from_multiple():
|
||||
def test_get_app_does_not_exist():
|
||||
with pytest.raises(
|
||||
SanicException,
|
||||
match=(
|
||||
"Sanic app name 'does-not-exist' not found.\n"
|
||||
"App instantiation must occur outside "
|
||||
"if __name__ == '__main__' "
|
||||
"block or by using an AppLoader.\nSee "
|
||||
"https://sanic.dev/en/guide/deployment/app-loader.html"
|
||||
" for more details."
|
||||
),
|
||||
match="Sanic app name 'does-not-exist' not found.\n"
|
||||
"App instantiation must occur outside "
|
||||
"if __name__ == '__main__' "
|
||||
"block or by using an AppLoader.\nSee "
|
||||
"https://sanic.dev/en/guide/deployment/app-loader.html"
|
||||
" for more details.",
|
||||
):
|
||||
Sanic.get_app("does-not-exist")
|
||||
|
||||
@@ -472,7 +468,6 @@ def test_uvloop_config(app: Sanic, monkeypatch, use):
|
||||
try_use_uvloop.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_uvloop_cannot_never_called_with_create_server(caplog, monkeypatch):
|
||||
apps = (Sanic("default-uvloop"), Sanic("no-uvloop"), Sanic("yes-uvloop"))
|
||||
|
||||
@@ -509,7 +504,6 @@ def test_uvloop_cannot_never_called_with_create_server(caplog, monkeypatch):
|
||||
assert counter[(logging.WARNING, message)] == modified
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_multiple_uvloop_configs_display_warning(caplog):
|
||||
Sanic._uvloop_setting = None # Reset the setting (changed in prev tests)
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import uvicorn
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.application.state import Mode
|
||||
from sanic.asgi import ASGIApp, MockTransport
|
||||
from sanic.asgi import MockTransport
|
||||
from sanic.exceptions import BadRequest, Forbidden, ServiceUnavailable
|
||||
from sanic.request import Request
|
||||
from sanic.response import json, text
|
||||
@@ -16,12 +16,6 @@ from sanic.server.websockets.connection import WebSocketConnection
|
||||
from sanic.signals import RESERVED_NAMESPACES
|
||||
|
||||
|
||||
try:
|
||||
from unittest.mock import AsyncMock
|
||||
except ImportError:
|
||||
from tests.asyncmock import AsyncMock # type: ignore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def message_stack():
|
||||
return deque()
|
||||
@@ -342,7 +336,7 @@ async def test_websocket_send(send, receive, message_stack):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_text_receive(send, receive, message_stack):
|
||||
async def test_websocket_receive(send, receive, message_stack):
|
||||
msg = {"text": "hello", "type": "websocket.receive"}
|
||||
message_stack.append(msg)
|
||||
|
||||
@@ -351,15 +345,6 @@ async def test_websocket_text_receive(send, receive, message_stack):
|
||||
|
||||
assert text == msg["text"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_bytes_receive(send, receive, message_stack):
|
||||
msg = {"bytes": b"hello", "type": "websocket.receive"}
|
||||
message_stack.append(msg)
|
||||
|
||||
ws = WebSocketConnection(send, receive)
|
||||
data = await ws.receive()
|
||||
|
||||
assert data == msg["bytes"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_accept_with_no_subprotocols(
|
||||
@@ -573,39 +558,3 @@ async def test_asgi_serve_location(app):
|
||||
|
||||
_, response = await app.asgi_client.get("/")
|
||||
assert response.text == "http://<ASGI>"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_error_on_lifespan_exception_start(app, caplog):
|
||||
@app.before_server_start
|
||||
async def before_server_start(_):
|
||||
1 / 0
|
||||
|
||||
recv = AsyncMock(return_value={"type": "lifespan.startup"})
|
||||
send = AsyncMock()
|
||||
app.asgi = True
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
|
||||
|
||||
send.assert_awaited_once_with(
|
||||
{"type": "lifespan.startup.failed", "message": "division by zero"}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_error_on_lifespan_exception_stop(app: Sanic):
|
||||
@app.before_server_stop
|
||||
async def before_server_stop(_):
|
||||
1 / 0
|
||||
|
||||
recv = AsyncMock(return_value={"type": "lifespan.shutdown"})
|
||||
send = AsyncMock()
|
||||
app.asgi = True
|
||||
await app._startup()
|
||||
|
||||
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
|
||||
|
||||
send.assert_awaited_once_with(
|
||||
{"type": "lifespan.shutdown.failed", "message": "division by zero"}
|
||||
)
|
||||
|
||||
@@ -4,7 +4,6 @@ import sys
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -12,7 +11,6 @@ from sanic_routing import __version__ as __routing_version__
|
||||
|
||||
from sanic import __version__
|
||||
from sanic.__main__ import main
|
||||
from sanic.cli.inspector_client import InspectorClient
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
@@ -119,13 +117,7 @@ def test_error_with_path_as_instance_without_simple_arg(caplog):
|
||||
),
|
||||
)
|
||||
def test_tls_options(cmd: Tuple[str, ...], caplog):
|
||||
command = [
|
||||
"fake.server.app",
|
||||
*cmd,
|
||||
"--port=9999",
|
||||
"--debug",
|
||||
"--single-process",
|
||||
]
|
||||
command = ["fake.server.app", *cmd, "--port=9999", "--debug"]
|
||||
lines = capture(command, caplog)
|
||||
assert "Goin' Fast @ https://127.0.0.1:9999" in lines
|
||||
|
||||
@@ -294,50 +286,3 @@ def test_noisy_exceptions(cmd: str, expected: bool, caplog):
|
||||
info = read_app_info(lines)
|
||||
|
||||
assert info["noisy_exceptions"] is expected
|
||||
|
||||
|
||||
def test_inspector_inspect(urlopen, caplog, capsys):
|
||||
urlopen.read.return_value = json.dumps(
|
||||
{
|
||||
"result": {
|
||||
"info": {
|
||||
"packages": ["foo"],
|
||||
},
|
||||
"extra": {
|
||||
"more": "data",
|
||||
},
|
||||
"workers": {"Worker-Name": {"some": "state"}},
|
||||
}
|
||||
}
|
||||
).encode()
|
||||
with patch("sys.argv", ["sanic", "inspect"]):
|
||||
capture(["inspect"], caplog)
|
||||
captured = capsys.readouterr()
|
||||
assert "Inspecting @ http://localhost:6457" in captured.out
|
||||
assert "Worker-Name" in captured.out
|
||||
assert captured.err == ""
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"command,params",
|
||||
(
|
||||
(["reload"], {"zero_downtime": False}),
|
||||
(["reload", "--zero-downtime"], {"zero_downtime": True}),
|
||||
(["shutdown"], {}),
|
||||
(["scale", "9"], {"replicas": 9}),
|
||||
(["foo", "--bar=something"], {"bar": "something"}),
|
||||
(["foo", "--bar"], {"bar": True}),
|
||||
(["foo", "--no-bar"], {"bar": False}),
|
||||
(["foo", "positional"], {"args": ["positional"]}),
|
||||
(
|
||||
["foo", "positional", "--bar=something"],
|
||||
{"args": ["positional"], "bar": "something"},
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_inspector_command(command, params):
|
||||
with patch.object(InspectorClient, "request") as client:
|
||||
with patch("sys.argv", ["sanic", "inspect", *command]):
|
||||
main()
|
||||
|
||||
client.assert_called_once_with(command[0], **params)
|
||||
|
||||
@@ -39,7 +39,7 @@ def test_logo_true(app, caplog):
|
||||
with patch("sys.stdout.isatty") as isatty:
|
||||
isatty.return_value = True
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
app.make_coffee(single_process=True)
|
||||
app.make_coffee()
|
||||
|
||||
# Only in the regular logo
|
||||
assert " ▄███ █████ ██ " not in caplog.text
|
||||
|
||||
@@ -14,7 +14,7 @@ from pytest import MonkeyPatch
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.config import DEFAULT_CONFIG, Config
|
||||
from sanic.constants import LocalCertCreator
|
||||
from sanic.constants import LocalCertCreator, RestartOrder
|
||||
from sanic.exceptions import PyFileError
|
||||
|
||||
|
||||
@@ -436,3 +436,19 @@ def test_convert_local_cert_creator(passed, expected):
|
||||
app = Sanic("Test")
|
||||
assert app.config.LOCAL_CERT_CREATOR is expected
|
||||
del os.environ["SANIC_LOCAL_CERT_CREATOR"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"passed,expected",
|
||||
(
|
||||
("shutdown_first", RestartOrder.SHUTDOWN_FIRST),
|
||||
("startup_first", RestartOrder.STARTUP_FIRST),
|
||||
("SHUTDOWN_FIRST", RestartOrder.SHUTDOWN_FIRST),
|
||||
("STARTUP_FIRST", RestartOrder.STARTUP_FIRST),
|
||||
),
|
||||
)
|
||||
def test_convert_restart_order(passed, expected):
|
||||
os.environ["SANIC_RESTART_ORDER"] = passed
|
||||
app = Sanic("Test")
|
||||
assert app.config.RESTART_ORDER is expected
|
||||
del os.environ["SANIC_RESTART_ORDER"]
|
||||
|
||||
@@ -148,6 +148,7 @@ def test_cookie_set_unknown_property():
|
||||
|
||||
|
||||
def test_cookie_set_same_key(app):
|
||||
|
||||
cookies = {"test": "wait"}
|
||||
|
||||
@app.get("/")
|
||||
|
||||
@@ -2,6 +2,7 @@ import asyncio
|
||||
import sys
|
||||
|
||||
from threading import Event
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -74,7 +75,7 @@ def test_create_named_task(app):
|
||||
|
||||
app.stop()
|
||||
|
||||
app.run(single_process=True)
|
||||
app.run()
|
||||
|
||||
|
||||
def test_named_task_called(app):
|
||||
|
||||
@@ -62,6 +62,7 @@ def exception_handler_app():
|
||||
|
||||
@exception_handler_app.route("/8", error_format="html")
|
||||
def handler_8(request):
|
||||
|
||||
raise ErrorWithRequestCtx("OK")
|
||||
|
||||
@exception_handler_app.exception(ErrorWithRequestCtx, NotFound)
|
||||
@@ -213,7 +214,7 @@ def test_error_handler_noisy_log(
|
||||
exception_handler_app: Sanic, monkeypatch: MonkeyPatch
|
||||
):
|
||||
err_logger = Mock()
|
||||
monkeypatch.setattr(handlers.error, "error_logger", err_logger)
|
||||
monkeypatch.setattr(handlers, "error_logger", err_logger)
|
||||
|
||||
exception_handler_app.config["NOISY_EXCEPTIONS"] = False
|
||||
exception_handler_app.test_client.get("/1")
|
||||
|
||||
@@ -185,22 +185,30 @@ def test_request_line(app):
|
||||
|
||||
assert request.request_line == b"GET / HTTP/1.1"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw",
|
||||
(
|
||||
"text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8",
|
||||
"application/xml;q=0.9, */*;q=0.8, text/html, application/xhtml+xml",
|
||||
"foo/bar;q=0.9, */*;q=0.8, text/html=0.8, text/plain, application/xhtml+xml",
|
||||
)
|
||||
"show/first, show/second",
|
||||
"show/*, show/first",
|
||||
"*/*, show/first",
|
||||
"*/*, show/*",
|
||||
"other/*; q=0.1, show/*; q=0.2",
|
||||
"show/first; q=0.5, show/second; q=0.5",
|
||||
"show/first; foo=bar, show/second; foo=bar",
|
||||
"show/second, show/first; foo=bar",
|
||||
"show/second; q=0.5, show/first; foo=bar; q=0.5",
|
||||
"show/second; q=0.5, show/first; q=1.0",
|
||||
"show/first, show/second; q=1.0",
|
||||
),
|
||||
)
|
||||
def test_accept_ordering(raw):
|
||||
"""Should sort by q but also be stable."""
|
||||
accept = headers.parse_accept(raw)
|
||||
assert accept[0].type_ == "text"
|
||||
raw1 = ", ".join(str(a) for a in accept)
|
||||
accept = headers.parse_accept(raw1)
|
||||
raw2 = ", ".join(str(a) for a in accept)
|
||||
assert raw1 == raw2
|
||||
def test_parse_accept_ordered_okay(raw):
|
||||
ordered = headers.parse_accept(raw)
|
||||
expected_subtype = (
|
||||
"*" if all(q.subtype.is_wildcard for q in ordered) else "first"
|
||||
)
|
||||
assert ordered[0].type_ == "show"
|
||||
assert ordered[0].subtype == expected_subtype
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -217,27 +225,40 @@ def test_bad_accept(raw):
|
||||
|
||||
|
||||
def test_empty_accept():
|
||||
a = headers.parse_accept("")
|
||||
assert a == []
|
||||
assert not a.match("*/*")
|
||||
assert headers.parse_accept("") == []
|
||||
|
||||
|
||||
def test_wildcard_accept_set_ok():
|
||||
accept = headers.parse_accept("*/*")[0]
|
||||
assert accept.is_wildcard
|
||||
assert accept.has_wildcard
|
||||
|
||||
accept = headers.parse_accept("foo/*")[0]
|
||||
assert not accept.is_wildcard
|
||||
assert accept.has_wildcard
|
||||
|
||||
accept = headers.parse_accept("*/bar")[0]
|
||||
assert not accept.is_wildcard
|
||||
assert accept.has_wildcard
|
||||
assert accept.type_.is_wildcard
|
||||
assert accept.subtype.is_wildcard
|
||||
|
||||
accept = headers.parse_accept("foo/bar")[0]
|
||||
assert not accept.is_wildcard
|
||||
assert not accept.has_wildcard
|
||||
assert not accept.type_.is_wildcard
|
||||
assert not accept.subtype.is_wildcard
|
||||
|
||||
|
||||
def test_accept_parsed_against_str():
|
||||
accept = headers.Accept.parse("foo/bar")
|
||||
assert accept > "foo/bar; q=0.1"
|
||||
|
||||
|
||||
def test_media_type_equality():
|
||||
assert headers.MediaType("foo") == headers.MediaType("foo") == "foo"
|
||||
assert headers.MediaType("foo") == headers.MediaType("*") == "*"
|
||||
assert headers.MediaType("foo") != headers.MediaType("bar")
|
||||
assert headers.MediaType("foo") != "bar"
|
||||
|
||||
|
||||
def test_media_type_matching():
|
||||
assert headers.MediaType("foo").match(headers.MediaType("foo"))
|
||||
assert headers.MediaType("foo").match("foo")
|
||||
|
||||
assert not headers.MediaType("foo").match(headers.MediaType("*"))
|
||||
assert not headers.MediaType("foo").match("*")
|
||||
|
||||
assert not headers.MediaType("foo").match(headers.MediaType("bar"))
|
||||
assert not headers.MediaType("foo").match("bar")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -245,52 +266,87 @@ def test_wildcard_accept_set_ok():
|
||||
(
|
||||
# ALLOW BOTH
|
||||
("foo/bar", "foo/bar", True, True, True),
|
||||
("foo/bar", headers.Accept.parse("foo/bar"), True, True, True),
|
||||
("foo/bar", "foo/*", True, True, True),
|
||||
("foo/bar", headers.Accept.parse("foo/*"), True, True, True),
|
||||
("foo/bar", "*/*", True, True, True),
|
||||
("foo/bar", headers.Accept.parse("*/*"), True, True, True),
|
||||
("foo/*", "foo/bar", True, True, True),
|
||||
("foo/*", headers.Accept.parse("foo/bar"), True, True, True),
|
||||
("foo/*", "foo/*", True, True, True),
|
||||
("foo/*", headers.Accept.parse("foo/*"), True, True, True),
|
||||
("foo/*", "*/*", True, True, True),
|
||||
("foo/*", headers.Accept.parse("*/*"), True, True, True),
|
||||
("*/*", "foo/bar", True, True, True),
|
||||
("*/*", headers.Accept.parse("foo/bar"), True, True, True),
|
||||
("*/*", "foo/*", True, True, True),
|
||||
("*/*", headers.Accept.parse("foo/*"), True, True, True),
|
||||
("*/*", "*/*", True, True, True),
|
||||
("*/*", headers.Accept.parse("*/*"), True, True, True),
|
||||
# ALLOW TYPE
|
||||
("foo/bar", "foo/bar", True, True, False),
|
||||
("foo/bar", headers.Accept.parse("foo/bar"), True, True, False),
|
||||
("foo/bar", "foo/*", False, True, False),
|
||||
("foo/bar", headers.Accept.parse("foo/*"), False, True, False),
|
||||
("foo/bar", "*/*", False, True, False),
|
||||
("foo/bar", headers.Accept.parse("*/*"), False, True, False),
|
||||
("foo/*", "foo/bar", False, True, False),
|
||||
("foo/*", headers.Accept.parse("foo/bar"), False, True, False),
|
||||
("foo/*", "foo/*", False, True, False),
|
||||
("foo/*", headers.Accept.parse("foo/*"), False, True, False),
|
||||
("foo/*", "*/*", False, True, False),
|
||||
("foo/*", headers.Accept.parse("*/*"), False, True, False),
|
||||
("*/*", "foo/bar", False, True, False),
|
||||
("*/*", headers.Accept.parse("foo/bar"), False, True, False),
|
||||
("*/*", "foo/*", False, True, False),
|
||||
("*/*", headers.Accept.parse("foo/*"), False, True, False),
|
||||
("*/*", "*/*", False, True, False),
|
||||
("*/*", headers.Accept.parse("*/*"), False, True, False),
|
||||
# ALLOW SUBTYPE
|
||||
("foo/bar", "foo/bar", True, False, True),
|
||||
("foo/bar", headers.Accept.parse("foo/bar"), True, False, True),
|
||||
("foo/bar", "foo/*", True, False, True),
|
||||
("foo/bar", headers.Accept.parse("foo/*"), True, False, True),
|
||||
("foo/bar", "*/*", False, False, True),
|
||||
("foo/bar", headers.Accept.parse("*/*"), False, False, True),
|
||||
("foo/*", "foo/bar", True, False, True),
|
||||
("foo/*", headers.Accept.parse("foo/bar"), True, False, True),
|
||||
("foo/*", "foo/*", True, False, True),
|
||||
("foo/*", headers.Accept.parse("foo/*"), True, False, True),
|
||||
("foo/*", "*/*", False, False, True),
|
||||
("foo/*", headers.Accept.parse("*/*"), False, False, True),
|
||||
("*/*", "foo/bar", False, False, True),
|
||||
("*/*", headers.Accept.parse("foo/bar"), False, False, True),
|
||||
("*/*", "foo/*", False, False, True),
|
||||
("*/*", headers.Accept.parse("foo/*"), False, False, True),
|
||||
("*/*", "*/*", False, False, True),
|
||||
("*/*", headers.Accept.parse("*/*"), False, False, True),
|
||||
),
|
||||
)
|
||||
def test_accept_matching(value, other, outcome, allow_type, allow_subtype):
|
||||
assert (
|
||||
bool(headers.MediaType._parse(value).match(
|
||||
headers.Accept.parse(value).match(
|
||||
other,
|
||||
allow_type_wildcard=allow_type,
|
||||
allow_subtype_wildcard=allow_subtype,
|
||||
))
|
||||
)
|
||||
is outcome
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value", ("foo/bar", "foo/*", "*/*"))
|
||||
def test_value_in_accept(value):
|
||||
acceptable = headers.parse_accept(value)
|
||||
assert "foo/bar" in acceptable
|
||||
assert "foo/*" in acceptable
|
||||
assert "*/*" in acceptable
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value", ("foo/bar", "foo/*"))
|
||||
def test_value_not_in_accept(value):
|
||||
acceptable = headers.parse_accept(value)
|
||||
assert "*/*" not in acceptable
|
||||
assert "*/bar" not in acceptable
|
||||
assert "no/match" not in acceptable
|
||||
assert "no/*" not in acceptable
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -299,22 +355,16 @@ def test_value_not_in_accept(value):
|
||||
(
|
||||
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8", # noqa: E501
|
||||
[
|
||||
("text/html", 1.0),
|
||||
("application/xhtml+xml", 1.0),
|
||||
("image/avif", 1.0),
|
||||
("image/webp", 1.0),
|
||||
("application/xml", 0.9),
|
||||
("*/*", 0.8),
|
||||
"text/html",
|
||||
"application/xhtml+xml",
|
||||
"image/avif",
|
||||
"image/webp",
|
||||
"application/xml;q=0.9",
|
||||
"*/*;q=0.8",
|
||||
],
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_browser_headers(header, expected):
|
||||
mimes = [e[0] for e in expected]
|
||||
qs = [e[1] for e in expected]
|
||||
request = Request(b"/", {"accept": header}, "1.1", "GET", None, None)
|
||||
assert request.accept == mimes
|
||||
for a, m, q in zip(request.accept, mimes, qs):
|
||||
assert a == m
|
||||
assert a.str == m
|
||||
assert a.q == q
|
||||
assert request.accept == expected
|
||||
|
||||
@@ -3,7 +3,7 @@ from functools import partial
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.middleware import Middleware, MiddlewareLocation
|
||||
from sanic.middleware import Middleware
|
||||
from sanic.response import json
|
||||
|
||||
|
||||
@@ -40,86 +40,6 @@ def reset_middleware():
|
||||
Middleware.reset_count()
|
||||
|
||||
|
||||
def test_add_register_priority(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
app.register_middleware(foo, priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 0
|
||||
assert app.request_middleware[0].priority == 999 # type: ignore
|
||||
app.register_middleware(foo, attach_to="response", priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 1
|
||||
assert app.response_middleware[0].priority == 999 # type: ignore
|
||||
|
||||
|
||||
def test_add_register_named_priority(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
app.register_named_middleware(foo, route_names=["foo"], priority=999)
|
||||
assert len(app.named_request_middleware) == 1
|
||||
assert len(app.named_response_middleware) == 0
|
||||
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
|
||||
app.register_named_middleware(
|
||||
foo, attach_to="response", route_names=["foo"], priority=999
|
||||
)
|
||||
assert len(app.named_request_middleware) == 1
|
||||
assert len(app.named_response_middleware) == 1
|
||||
assert app.named_response_middleware["foo"][0].priority == 999 # type: ignore
|
||||
|
||||
|
||||
def test_add_decorator_priority(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
app.middleware(foo, priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 0
|
||||
assert app.request_middleware[0].priority == 999 # type: ignore
|
||||
app.middleware(foo, attach_to="response", priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 1
|
||||
assert app.response_middleware[0].priority == 999 # type: ignore
|
||||
|
||||
|
||||
def test_add_convenience_priority(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
app.on_request(foo, priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 0
|
||||
assert app.request_middleware[0].priority == 999 # type: ignore
|
||||
app.on_response(foo, priority=999)
|
||||
assert len(app.request_middleware) == 1
|
||||
assert len(app.response_middleware) == 1
|
||||
assert app.response_middleware[0].priority == 999 # type: ignore
|
||||
|
||||
|
||||
def test_add_conflicting_priority(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
|
||||
app.register_middleware(middleware=middleware, priority=999)
|
||||
assert app.request_middleware[0].priority == 999 # type: ignore
|
||||
middleware.priority == 998
|
||||
|
||||
|
||||
def test_add_conflicting_priority_named(app: Sanic):
|
||||
def foo(*_):
|
||||
...
|
||||
|
||||
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
|
||||
app.register_named_middleware(
|
||||
middleware=middleware, route_names=["foo"], priority=999
|
||||
)
|
||||
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
|
||||
middleware.priority == 998
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected,priorities",
|
||||
PRIORITY_TEST_CASES,
|
||||
|
||||
@@ -3,7 +3,6 @@ import multiprocessing
|
||||
import pickle
|
||||
import random
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from asyncio import sleep
|
||||
|
||||
@@ -12,7 +11,6 @@ import pytest
|
||||
from sanic_testing.testing import HOST, PORT
|
||||
|
||||
from sanic import Blueprint, text
|
||||
from sanic.compat import use_context
|
||||
from sanic.log import logger
|
||||
from sanic.server.socket import configure_socket
|
||||
|
||||
@@ -22,10 +20,6 @@ from sanic.server.socket import configure_socket
|
||||
reason="SIGALRM is not implemented for this platform, we have to come "
|
||||
"up with another timeout strategy to test these",
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_multiprocessing(app):
|
||||
"""Tests that the number of children we produce is correct"""
|
||||
# Selects a number at random so we can spot check
|
||||
@@ -43,8 +37,7 @@ def test_multiprocessing(app):
|
||||
|
||||
signal.signal(signal.SIGALRM, stop_on_alarm)
|
||||
signal.alarm(2)
|
||||
with use_context("fork"):
|
||||
app.run(HOST, 4120, workers=num_workers, debug=True)
|
||||
app.run(HOST, 4120, workers=num_workers, debug=True)
|
||||
|
||||
assert len(process_list) == num_workers + 1
|
||||
|
||||
@@ -143,10 +136,6 @@ def test_multiprocessing_legacy_unix(app):
|
||||
not hasattr(signal, "SIGALRM"),
|
||||
reason="SIGALRM is not implemented for this platform",
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_multiprocessing_with_blueprint(app):
|
||||
# Selects a number at random so we can spot check
|
||||
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
|
||||
@@ -166,8 +155,7 @@ def test_multiprocessing_with_blueprint(app):
|
||||
|
||||
bp = Blueprint("test_text")
|
||||
app.blueprint(bp)
|
||||
with use_context("fork"):
|
||||
app.run(HOST, 4121, workers=num_workers, debug=True)
|
||||
app.run(HOST, 4121, workers=num_workers, debug=True)
|
||||
|
||||
assert len(process_list) == num_workers + 1
|
||||
|
||||
@@ -225,10 +213,6 @@ def test_pickle_app_with_static(app, protocol):
|
||||
up_p_app.run(single_process=True)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_main_process_event(app, caplog):
|
||||
# Selects a number at random so we can spot check
|
||||
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
|
||||
@@ -251,9 +235,8 @@ def test_main_process_event(app, caplog):
|
||||
def main_process_stop2(app, loop):
|
||||
logger.info("main_process_stop")
|
||||
|
||||
with use_context("fork"):
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(HOST, PORT, workers=num_workers)
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(HOST, PORT, workers=num_workers)
|
||||
|
||||
assert (
|
||||
caplog.record_tuples.count(("sanic.root", 20, "main_process_start"))
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import asyncio
|
||||
|
||||
from contextlib import closing
|
||||
from socket import socket
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
@@ -620,4 +623,6 @@ def test_streaming_echo():
|
||||
res = await read_chunk()
|
||||
assert res == None
|
||||
|
||||
app.run(access_log=False, single_process=True)
|
||||
# Use random port for tests
|
||||
with closing(socket()) as sock:
|
||||
app.run(access_log=False)
|
||||
|
||||
@@ -1293,24 +1293,6 @@ async def test_request_string_representation_asgi(app):
|
||||
"------sanic--\r\n",
|
||||
"filename_\u00A0_test",
|
||||
),
|
||||
# Umlaut using NFC normalization (Windows, Linux, Android)
|
||||
(
|
||||
"------sanic\r\n"
|
||||
'content-disposition: form-data; filename*="utf-8\'\'filename_%C3%A4_test"; name="test"\r\n'
|
||||
"\r\n"
|
||||
"OK\r\n"
|
||||
"------sanic--\r\n",
|
||||
"filename_\u00E4_test",
|
||||
),
|
||||
# Umlaut using NFD normalization (MacOS client)
|
||||
(
|
||||
"------sanic\r\n"
|
||||
'content-disposition: form-data; filename*="utf-8\'\'filename_a%CC%88_test"; name="test"\r\n'
|
||||
"\r\n"
|
||||
"OK\r\n"
|
||||
"------sanic--\r\n",
|
||||
"filename_\u00E4_test", # Sanic should normalize to NFC
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_request_multipart_files(app, payload, filename):
|
||||
|
||||
@@ -514,6 +514,7 @@ def test_file_stream_head_response(
|
||||
def test_file_stream_response_range(
|
||||
app: Sanic, file_name, static_file_directory, size, start, end
|
||||
):
|
||||
|
||||
Range = namedtuple("Range", ["size", "start", "end", "total"])
|
||||
total = len(get_file_content(static_file_directory, file_name))
|
||||
range = Range(size=size, start=start, end=end, total=total)
|
||||
|
||||
@@ -722,6 +722,7 @@ def test_add_webscoket_route_with_version(app):
|
||||
|
||||
|
||||
def test_route_duplicate(app):
|
||||
|
||||
with pytest.raises(RouteExists):
|
||||
|
||||
@app.route("/test")
|
||||
@@ -802,22 +803,8 @@ def test_static_add_route(app, strict_slashes):
|
||||
assert response.text == "OK2"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unquote", [True, False, None])
|
||||
def test_unquote_add_route(app, unquote):
|
||||
async def handler1(_, foo):
|
||||
return text(foo)
|
||||
|
||||
app.add_route(handler1, "/<foo>", unquote=unquote)
|
||||
value = "啊" if unquote else r"%E5%95%8A"
|
||||
|
||||
_, response = app.test_client.get("/啊")
|
||||
assert response.text == value
|
||||
|
||||
_, response = app.test_client.get(r"/%E5%95%8A")
|
||||
assert response.text == value
|
||||
|
||||
|
||||
def test_dynamic_add_route(app):
|
||||
|
||||
results = []
|
||||
|
||||
async def handler(request, name):
|
||||
@@ -832,6 +819,7 @@ def test_dynamic_add_route(app):
|
||||
|
||||
|
||||
def test_dynamic_add_route_string(app):
|
||||
|
||||
results = []
|
||||
|
||||
async def handler(request, name):
|
||||
@@ -935,6 +923,7 @@ def test_dynamic_add_route_unhashable(app):
|
||||
|
||||
|
||||
def test_add_route_duplicate(app):
|
||||
|
||||
with pytest.raises(RouteExists):
|
||||
|
||||
async def handler1(request):
|
||||
@@ -1116,6 +1105,7 @@ def test_route_raise_ParameterNameConflicts(app):
|
||||
|
||||
|
||||
def test_route_invalid_host(app):
|
||||
|
||||
host = 321
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
|
||||
|
||||
@@ -93,7 +93,6 @@ def test_dont_register_system_signals(app):
|
||||
@pytest.mark.skipif(os.name == "nt", reason="windows cannot SIGINT processes")
|
||||
def test_windows_workaround():
|
||||
"""Test Windows workaround (on any other OS)"""
|
||||
|
||||
# At least some code coverage, even though this test doesn't work on
|
||||
# Windows...
|
||||
class MockApp:
|
||||
|
||||
@@ -7,7 +7,7 @@ import pytest
|
||||
|
||||
from sanic_routing.exceptions import NotFound
|
||||
|
||||
from sanic import Blueprint, Sanic, empty
|
||||
from sanic import Blueprint
|
||||
from sanic.exceptions import InvalidSignal, SanicException
|
||||
|
||||
|
||||
@@ -20,31 +20,6 @@ def test_add_signal(app):
|
||||
assert len(app.signal_router.routes) == 1
|
||||
|
||||
|
||||
def test_add_signal_method_handler(app):
|
||||
counter = 0
|
||||
|
||||
class TestSanic(Sanic):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.add_signal(
|
||||
self.after_routing_signal_handler, "http.routing.after"
|
||||
)
|
||||
|
||||
def after_routing_signal_handler(self, *args, **kwargs):
|
||||
nonlocal counter
|
||||
counter += 1
|
||||
|
||||
app = TestSanic("Test")
|
||||
assert len(app.signal_router.routes) == 1
|
||||
|
||||
@app.route("/")
|
||||
async def handler(_):
|
||||
return empty()
|
||||
|
||||
app.test_client.get("/")
|
||||
assert counter == 1
|
||||
|
||||
|
||||
def test_add_signal_decorator(app):
|
||||
@app.signal("foo.bar.baz")
|
||||
def sync_signal(*_):
|
||||
@@ -314,10 +289,10 @@ async def test_dispatch_signal_triggers_event_on_bp(app):
|
||||
waiter = bp.event("foo.bar.baz")
|
||||
assert isawaitable(waiter)
|
||||
|
||||
fut = do_wait()
|
||||
fut = asyncio.ensure_future(do_wait())
|
||||
for signal in signal_group:
|
||||
signal.ctx.event.set()
|
||||
await asyncio.gather(fut)
|
||||
await fut
|
||||
|
||||
assert bp_counter == 1
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@@ -12,6 +13,15 @@ from sanic import Sanic, text
|
||||
from sanic.exceptions import FileNotFound
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def static_file_directory():
|
||||
"""The static directory to serve"""
|
||||
current_file = inspect.getfile(inspect.currentframe())
|
||||
current_directory = os.path.dirname(os.path.abspath(current_file))
|
||||
static_directory = os.path.join(current_directory, "static")
|
||||
return static_directory
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def double_dotted_directory_file(static_file_directory: str):
|
||||
"""Generate double dotted directory and its files"""
|
||||
@@ -108,12 +118,7 @@ def test_static_file_pathlib(app, static_file_directory, file_name):
|
||||
def test_static_file_bytes(app, static_file_directory, file_name):
|
||||
bsep = os.path.sep.encode("utf-8")
|
||||
file_path = static_file_directory.encode("utf-8") + bsep + file_name
|
||||
message = (
|
||||
"Serving a static directory with a bytes "
|
||||
"string is deprecated and will be removed in v22.9."
|
||||
)
|
||||
with pytest.warns(DeprecationWarning, match=message):
|
||||
app.static("/testing.file", file_path)
|
||||
app.static("/testing.file", file_path)
|
||||
request, response = app.test_client.get("/testing.file")
|
||||
assert response.status == 200
|
||||
|
||||
@@ -426,6 +431,7 @@ def test_static_stream_large_file(
|
||||
"file_name", ["test.file", "decode me.txt", "python.png"]
|
||||
)
|
||||
def test_use_modified_since(app, static_file_directory, file_name):
|
||||
|
||||
file_stat = os.stat(get_file_path(static_file_directory, file_name))
|
||||
modified_since = strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT", gmtime(file_stat.st_mtime)
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
import os
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.handlers.directory import DirectoryHandler
|
||||
|
||||
|
||||
def get_file_path(static_file_directory, file_name):
|
||||
return os.path.join(static_file_directory, file_name)
|
||||
|
||||
|
||||
def get_file_content(static_file_directory, file_name):
|
||||
"""The content of the static file to check"""
|
||||
with open(get_file_path(static_file_directory, file_name), "rb") as file:
|
||||
return file.read()
|
||||
|
||||
|
||||
def test_static_directory_view(app: Sanic, static_file_directory: str):
|
||||
app.static("/static", static_file_directory, directory_view=True)
|
||||
|
||||
_, response = app.test_client.get("/static/")
|
||||
assert response.status == 200
|
||||
assert response.content_type == "text/html; charset=utf-8"
|
||||
assert "<title>Directory Viewer</title>" in response.text
|
||||
|
||||
|
||||
def test_static_index_single(app: Sanic, static_file_directory: str):
|
||||
app.static("/static", static_file_directory, index="test.html")
|
||||
|
||||
_, response = app.test_client.get("/static/")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(
|
||||
static_file_directory, "test.html"
|
||||
)
|
||||
assert response.headers["Content-Type"] == "text/html"
|
||||
|
||||
|
||||
def test_static_index_single_not_found(app: Sanic, static_file_directory: str):
|
||||
app.static("/static", static_file_directory, index="index.html")
|
||||
|
||||
_, response = app.test_client.get("/static/")
|
||||
assert response.status == 404
|
||||
|
||||
|
||||
def test_static_index_multiple(app: Sanic, static_file_directory: str):
|
||||
app.static(
|
||||
"/static",
|
||||
static_file_directory,
|
||||
index=["index.html", "test.html"],
|
||||
)
|
||||
|
||||
_, response = app.test_client.get("/static/")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(
|
||||
static_file_directory, "test.html"
|
||||
)
|
||||
assert response.headers["Content-Type"] == "text/html"
|
||||
|
||||
|
||||
def test_static_directory_view_and_index(
|
||||
app: Sanic, static_file_directory: str
|
||||
):
|
||||
app.static(
|
||||
"/static",
|
||||
static_file_directory,
|
||||
directory_view=True,
|
||||
index="foo.txt",
|
||||
)
|
||||
|
||||
_, response = app.test_client.get("/static/nested/")
|
||||
assert response.status == 200
|
||||
assert response.content_type == "text/html; charset=utf-8"
|
||||
assert "<title>Directory Viewer</title>" in response.text
|
||||
|
||||
_, response = app.test_client.get("/static/nested/dir/")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(
|
||||
f"{static_file_directory}/nested/dir", "foo.txt"
|
||||
)
|
||||
assert response.content_type == "text/plain"
|
||||
|
||||
|
||||
def test_static_directory_handler(app: Sanic, static_file_directory: str):
|
||||
dh = DirectoryHandler(
|
||||
"/static",
|
||||
Path(static_file_directory),
|
||||
directory_view=True,
|
||||
index="foo.txt",
|
||||
)
|
||||
app.static("/static", static_file_directory, directory_handler=dh)
|
||||
|
||||
_, response = app.test_client.get("/static/nested/")
|
||||
assert response.status == 200
|
||||
assert response.content_type == "text/html; charset=utf-8"
|
||||
assert "<title>Directory Viewer</title>" in response.text
|
||||
|
||||
_, response = app.test_client.get("/static/nested/dir/")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(
|
||||
f"{static_file_directory}/nested/dir", "foo.txt"
|
||||
)
|
||||
assert response.content_type == "text/plain"
|
||||
|
||||
|
||||
def test_static_directory_handler_fails(app: Sanic):
|
||||
dh = DirectoryHandler(
|
||||
"/static",
|
||||
Path(""),
|
||||
directory_view=True,
|
||||
index="foo.txt",
|
||||
)
|
||||
message = (
|
||||
"When explicitly setting directory_handler, you cannot "
|
||||
"set either directory_view or index. Instead, pass "
|
||||
"these arguments to your DirectoryHandler instance."
|
||||
)
|
||||
with pytest.raises(ValueError, match=message):
|
||||
app.static("/static", "", directory_handler=dh, directory_view=True)
|
||||
with pytest.raises(ValueError, match=message):
|
||||
app.static("/static", "", directory_handler=dh, index="index.html")
|
||||
@@ -2,7 +2,6 @@ import logging
|
||||
import os
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing import Event
|
||||
@@ -18,7 +17,6 @@ import sanic.http.tls.creators
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.application.constants import Mode
|
||||
from sanic.compat import use_context
|
||||
from sanic.constants import LocalCertCreator
|
||||
from sanic.exceptions import SanicException
|
||||
from sanic.helpers import _default
|
||||
@@ -428,12 +426,7 @@ def test_logger_vhosts(caplog):
|
||||
app.stop()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(
|
||||
host="127.0.0.1",
|
||||
port=42102,
|
||||
ssl=[localhost_dir, sanic_dir],
|
||||
single_process=True,
|
||||
)
|
||||
app.run(host="127.0.0.1", port=42102, ssl=[localhost_dir, sanic_dir])
|
||||
|
||||
logmsg = [
|
||||
m for s, l, m in caplog.record_tuples if m.startswith("Certificate")
|
||||
@@ -649,11 +642,8 @@ def test_sanic_ssl_context_create():
|
||||
assert isinstance(sanic_context, SanicSSLContext)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_ssl_in_multiprocess_mode(app: Sanic, caplog):
|
||||
|
||||
ssl_dict = {"cert": localhost_cert, "key": localhost_key}
|
||||
event = Event()
|
||||
|
||||
@@ -667,9 +657,8 @@ def test_ssl_in_multiprocess_mode(app: Sanic, caplog):
|
||||
app.stop()
|
||||
|
||||
assert not event.is_set()
|
||||
with use_context("fork"):
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(ssl=ssl_dict)
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(ssl=ssl_dict)
|
||||
assert event.is_set()
|
||||
|
||||
assert (
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# import asyncio
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from asyncio import AbstractEventLoop, sleep
|
||||
from string import ascii_lowercase
|
||||
@@ -13,7 +12,6 @@ import pytest
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.compat import use_context
|
||||
from sanic.request import Request
|
||||
from sanic.response import text
|
||||
|
||||
@@ -176,8 +174,7 @@ def handler(request: Request):
|
||||
|
||||
async def client(app: Sanic, loop: AbstractEventLoop):
|
||||
try:
|
||||
transport = httpx.AsyncHTTPTransport(uds=SOCKPATH)
|
||||
async with httpx.AsyncClient(transport=transport) as client:
|
||||
async with httpx.AsyncClient(uds=SOCKPATH) as client:
|
||||
r = await client.get("http://myhost.invalid/")
|
||||
assert r.status_code == 200
|
||||
assert r.text == os.path.abspath(SOCKPATH)
|
||||
@@ -186,16 +183,11 @@ async def client(app: Sanic, loop: AbstractEventLoop):
|
||||
app.stop()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_unix_connection_multiple_workers():
|
||||
with use_context("fork"):
|
||||
app_multi = Sanic(name="test")
|
||||
app_multi.get("/")(handler)
|
||||
app_multi.listener("after_server_start")(client)
|
||||
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
|
||||
app_multi = Sanic(name="test")
|
||||
app_multi.get("/")(handler)
|
||||
app_multi.listener("after_server_start")(client)
|
||||
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
|
||||
|
||||
|
||||
# @pytest.mark.xfail(
|
||||
|
||||
@@ -83,6 +83,7 @@ def test_simple_url_for_getting_with_more_params(app, args, url):
|
||||
|
||||
|
||||
def test_url_for_with_server_name(app):
|
||||
|
||||
server_name = f"{test_host}:{test_port}"
|
||||
app.config.update({"SERVER_NAME": server_name})
|
||||
path = "/myurl"
|
||||
|
||||
@@ -38,6 +38,7 @@ def test_load_module_from_file_location_with_non_existing_env_variable():
|
||||
LoadFileException,
|
||||
match="The following environment variables are not set: MuuMilk",
|
||||
):
|
||||
|
||||
load_module_from_file_location("${MuuMilk}")
|
||||
|
||||
|
||||
|
||||
@@ -1,20 +1,14 @@
|
||||
try: # no cov
|
||||
from ujson import dumps
|
||||
except ModuleNotFoundError: # no cov
|
||||
from json import dumps # type: ignore
|
||||
import json
|
||||
|
||||
from datetime import datetime
|
||||
from logging import ERROR, INFO
|
||||
from socket import AF_INET, SOCK_STREAM, timeout
|
||||
from unittest.mock import Mock, patch
|
||||
from urllib.error import URLError
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic_testing import TestManager
|
||||
|
||||
from sanic.cli.inspector_client import InspectorClient
|
||||
from sanic.helpers import Default
|
||||
from sanic.log import Colors
|
||||
from sanic.worker.inspector import Inspector
|
||||
from sanic.worker.inspector import Inspector, inspect
|
||||
|
||||
|
||||
DATA = {
|
||||
@@ -26,90 +20,121 @@ DATA = {
|
||||
},
|
||||
"workers": {"Worker-Name": {"some": "state"}},
|
||||
}
|
||||
FULL_SERIALIZED = dumps({"result": DATA})
|
||||
OUT_SERIALIZED = dumps(DATA)
|
||||
SERIALIZED = json.dumps(DATA)
|
||||
|
||||
|
||||
class FooInspector(Inspector):
|
||||
async def foo(self, bar):
|
||||
return f"bar is {bar}"
|
||||
def test_inspector_stop():
|
||||
inspector = Inspector(Mock(), {}, {}, "", 1)
|
||||
assert inspector.run is True
|
||||
inspector.stop()
|
||||
assert inspector.run is False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def publisher():
|
||||
publisher = Mock()
|
||||
return publisher
|
||||
@patch("sanic.worker.inspector.sys.stdout.write")
|
||||
@patch("sanic.worker.inspector.socket")
|
||||
@pytest.mark.parametrize("command", ("foo", "raw", "pretty"))
|
||||
def test_send_inspect(socket: Mock, write: Mock, command: str):
|
||||
socket.return_value = socket
|
||||
socket.__enter__.return_value = socket
|
||||
socket.recv.return_value = SERIALIZED.encode()
|
||||
inspect("localhost", 9999, command)
|
||||
|
||||
socket.sendall.assert_called_once_with(command.encode())
|
||||
socket.recv.assert_called_once_with(4096)
|
||||
socket.connect.assert_called_once_with(("localhost", 9999))
|
||||
socket.assert_called_once_with(AF_INET, SOCK_STREAM)
|
||||
|
||||
if command == "raw":
|
||||
write.assert_called_once_with(SERIALIZED)
|
||||
elif command == "pretty":
|
||||
write.assert_called()
|
||||
else:
|
||||
write.assert_not_called()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inspector(publisher):
|
||||
inspector = FooInspector(
|
||||
publisher, {}, {}, "localhost", 9999, "", Default(), Default()
|
||||
)
|
||||
inspector(False)
|
||||
return inspector
|
||||
@patch("sanic.worker.inspector.sys")
|
||||
@patch("sanic.worker.inspector.socket")
|
||||
def test_send_inspect_conn_refused(socket: Mock, sys: Mock, caplog):
|
||||
with caplog.at_level(INFO):
|
||||
socket.return_value = socket
|
||||
socket.__enter__.return_value = socket
|
||||
socket.connect.side_effect = ConnectionRefusedError()
|
||||
inspect("localhost", 9999, "foo")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http_client(inspector):
|
||||
manager = TestManager(inspector.app)
|
||||
return manager.test_client
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command", ("info",))
|
||||
@patch("sanic.cli.inspector_client.sys.stdout.write")
|
||||
def test_send_inspect(write, urlopen, command: str):
|
||||
urlopen.read.return_value = FULL_SERIALIZED.encode()
|
||||
InspectorClient("localhost", 9999, False, False, None).do(command)
|
||||
write.assert_called()
|
||||
write.reset_mock()
|
||||
InspectorClient("localhost", 9999, False, True, None).do(command)
|
||||
write.assert_called_with(OUT_SERIALIZED + "\n")
|
||||
|
||||
|
||||
@patch("sanic.cli.inspector_client.sys")
|
||||
def test_send_inspect_conn_refused(sys: Mock, urlopen):
|
||||
urlopen.side_effect = URLError("")
|
||||
InspectorClient("localhost", 9999, False, False, None).do("info")
|
||||
socket.close.assert_called_once()
|
||||
sys.exit.assert_called_once_with(1)
|
||||
|
||||
message = (
|
||||
f"{Colors.RED}Could not connect to inspector at: "
|
||||
f"{Colors.YELLOW}http://localhost:9999{Colors.END}\n"
|
||||
f"{Colors.YELLOW}('localhost', 9999){Colors.END}\n"
|
||||
"Either the application is not running, or it did not start "
|
||||
"an inspector instance.\n<urlopen error >\n"
|
||||
"an inspector instance."
|
||||
)
|
||||
sys.exit.assert_called_once_with(1)
|
||||
sys.stderr.write.assert_called_once_with(message)
|
||||
assert ("sanic.error", ERROR, message) in caplog.record_tuples
|
||||
|
||||
|
||||
def test_run_inspector_reload(publisher, http_client):
|
||||
_, response = http_client.post("/reload")
|
||||
assert response.status == 200
|
||||
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
|
||||
@patch("sanic.worker.inspector.configure_socket")
|
||||
@pytest.mark.parametrize("action", (b"reload", b"shutdown", b"foo"))
|
||||
def test_run_inspector(configure_socket: Mock, action: bytes):
|
||||
sock = Mock()
|
||||
conn = Mock()
|
||||
conn.recv.return_value = action
|
||||
configure_socket.return_value = sock
|
||||
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
|
||||
inspector.reload = Mock() # type: ignore
|
||||
inspector.shutdown = Mock() # type: ignore
|
||||
inspector.state_to_json = Mock(return_value="foo") # type: ignore
|
||||
|
||||
def accept():
|
||||
inspector.run = False
|
||||
return conn, ...
|
||||
|
||||
sock.accept = accept
|
||||
|
||||
inspector()
|
||||
|
||||
configure_socket.assert_called_once_with(
|
||||
{"host": "localhost", "port": 9999, "unix": None, "backlog": 1}
|
||||
)
|
||||
conn.recv.assert_called_with(64)
|
||||
|
||||
if action == b"reload":
|
||||
conn.send.assert_called_with(b"\n")
|
||||
inspector.reload.assert_called()
|
||||
inspector.shutdown.assert_not_called()
|
||||
inspector.state_to_json.assert_not_called()
|
||||
elif action == b"shutdown":
|
||||
conn.send.assert_called_with(b"\n")
|
||||
inspector.reload.assert_not_called()
|
||||
inspector.shutdown.assert_called()
|
||||
inspector.state_to_json.assert_not_called()
|
||||
else:
|
||||
conn.send.assert_called_with(b'"foo"')
|
||||
inspector.reload.assert_not_called()
|
||||
inspector.shutdown.assert_not_called()
|
||||
inspector.state_to_json.assert_called()
|
||||
|
||||
|
||||
def test_run_inspector_reload_zero_downtime(publisher, http_client):
|
||||
_, response = http_client.post("/reload", json={"zero_downtime": True})
|
||||
assert response.status == 200
|
||||
publisher.send.assert_called_once_with("__ALL_PROCESSES__::STARTUP_FIRST")
|
||||
@patch("sanic.worker.inspector.configure_socket")
|
||||
def test_accept_timeout(configure_socket: Mock):
|
||||
sock = Mock()
|
||||
configure_socket.return_value = sock
|
||||
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
|
||||
inspector.reload = Mock() # type: ignore
|
||||
inspector.shutdown = Mock() # type: ignore
|
||||
inspector.state_to_json = Mock(return_value="foo") # type: ignore
|
||||
|
||||
def accept():
|
||||
inspector.run = False
|
||||
raise timeout
|
||||
|
||||
def test_run_inspector_shutdown(publisher, http_client):
|
||||
_, response = http_client.post("/shutdown")
|
||||
assert response.status == 200
|
||||
publisher.send.assert_called_once_with("__TERMINATE__")
|
||||
sock.accept = accept
|
||||
|
||||
inspector()
|
||||
|
||||
def test_run_inspector_scale(publisher, http_client):
|
||||
_, response = http_client.post("/scale", json={"replicas": 4})
|
||||
assert response.status == 200
|
||||
publisher.send.assert_called_once_with("__SCALE__:4")
|
||||
|
||||
|
||||
def test_run_inspector_arbitrary(http_client):
|
||||
_, response = http_client.post("/foo", json={"bar": 99})
|
||||
assert response.status == 200
|
||||
assert response.json == {"meta": {"action": "foo"}, "result": "bar is 99"}
|
||||
inspector.reload.assert_not_called()
|
||||
inspector.shutdown.assert_not_called()
|
||||
inspector.state_to_json.assert_not_called()
|
||||
|
||||
|
||||
def test_state_to_json():
|
||||
@@ -117,10 +142,8 @@ def test_state_to_json():
|
||||
now_iso = now.isoformat()
|
||||
app_info = {"app": "hello"}
|
||||
worker_state = {"Test": {"now": now, "nested": {"foo": now}}}
|
||||
inspector = Inspector(
|
||||
Mock(), app_info, worker_state, "", 0, "", Default(), Default()
|
||||
)
|
||||
state = inspector._state_to_json()
|
||||
inspector = Inspector(Mock(), app_info, worker_state, "", 0)
|
||||
state = inspector.state_to_json()
|
||||
|
||||
assert state == {
|
||||
"info": app_info,
|
||||
@@ -128,14 +151,17 @@ def test_state_to_json():
|
||||
}
|
||||
|
||||
|
||||
def test_run_inspector_authentication():
|
||||
inspector = Inspector(
|
||||
Mock(), {}, {}, "", 0, "super-secret", Default(), Default()
|
||||
)(False)
|
||||
manager = TestManager(inspector.app)
|
||||
_, response = manager.test_client.get("/")
|
||||
assert response.status == 401
|
||||
_, response = manager.test_client.get(
|
||||
"/", headers={"Authorization": "Bearer super-secret"}
|
||||
)
|
||||
assert response.status == 200
|
||||
def test_reload():
|
||||
publisher = Mock()
|
||||
inspector = Inspector(publisher, {}, {}, "", 0)
|
||||
inspector.reload()
|
||||
|
||||
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
|
||||
|
||||
|
||||
def test_shutdown():
|
||||
publisher = Mock()
|
||||
inspector = Inspector(publisher, {}, {}, "", 0)
|
||||
inspector.shutdown()
|
||||
|
||||
publisher.send.assert_called_once_with("__TERMINATE__")
|
||||
|
||||
@@ -1,21 +1,12 @@
|
||||
from logging import ERROR, INFO
|
||||
from signal import SIGINT
|
||||
from signal import SIGINT, SIGKILL
|
||||
from unittest.mock import Mock, call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic.compat import OS_IS_WINDOWS
|
||||
from sanic.exceptions import ServerKilled
|
||||
from sanic.worker.constants import RestartOrder
|
||||
from sanic.worker.manager import WorkerManager
|
||||
|
||||
|
||||
if not OS_IS_WINDOWS:
|
||||
from signal import SIGKILL
|
||||
else:
|
||||
SIGKILL = SIGINT
|
||||
|
||||
|
||||
def fake_serve():
|
||||
...
|
||||
|
||||
@@ -23,7 +14,14 @@ def fake_serve():
|
||||
def test_manager_no_workers():
|
||||
message = "Cannot serve with no workers"
|
||||
with pytest.raises(RuntimeError, match=message):
|
||||
WorkerManager(0, fake_serve, {}, Mock(), (Mock(), Mock()), {})
|
||||
WorkerManager(
|
||||
0,
|
||||
fake_serve,
|
||||
{},
|
||||
Mock(),
|
||||
(Mock(), Mock()),
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
@patch("sanic.worker.process.os")
|
||||
@@ -32,8 +30,17 @@ def test_terminate(os_mock: Mock):
|
||||
process.pid = 1234
|
||||
context = Mock()
|
||||
context.Process.return_value = process
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
|
||||
manager = WorkerManager(
|
||||
1,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), Mock()),
|
||||
{},
|
||||
)
|
||||
assert manager.terminated is False
|
||||
manager.terminate()
|
||||
assert manager.terminated is True
|
||||
os_mock.kill.assert_called_once_with(1234, SIGINT)
|
||||
|
||||
|
||||
@@ -44,7 +51,14 @@ def test_shutown(os_mock: Mock):
|
||||
process.is_alive.return_value = True
|
||||
context = Mock()
|
||||
context.Process.return_value = process
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
|
||||
manager = WorkerManager(
|
||||
1,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), Mock()),
|
||||
{},
|
||||
)
|
||||
manager.shutdown()
|
||||
os_mock.kill.assert_called_once_with(1234, SIGINT)
|
||||
|
||||
@@ -55,36 +69,32 @@ def test_kill(os_mock: Mock):
|
||||
process.pid = 1234
|
||||
context = Mock()
|
||||
context.Process.return_value = process
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
|
||||
manager = WorkerManager(
|
||||
1,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), Mock()),
|
||||
{},
|
||||
)
|
||||
with pytest.raises(ServerKilled):
|
||||
manager.kill()
|
||||
os_mock.kill.assert_called_once_with(1234, SIGKILL)
|
||||
|
||||
|
||||
@patch("sanic.worker.process.os")
|
||||
@patch("sanic.worker.manager.os")
|
||||
def test_shutdown_signal_send_kill(
|
||||
manager_os_mock: Mock, process_os_mock: Mock
|
||||
):
|
||||
process = Mock()
|
||||
process.pid = 1234
|
||||
context = Mock()
|
||||
context.Process.return_value = process
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
|
||||
assert manager._shutting_down is False
|
||||
manager.shutdown_signal(SIGINT, None)
|
||||
assert manager._shutting_down is True
|
||||
process_os_mock.kill.assert_called_once_with(1234, SIGINT)
|
||||
manager.shutdown_signal(SIGINT, None)
|
||||
manager_os_mock.kill.assert_called_once_with(1234, SIGKILL)
|
||||
|
||||
|
||||
def test_restart_all():
|
||||
p1 = Mock()
|
||||
p2 = Mock()
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2, p1, p2]
|
||||
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), Mock()), {})
|
||||
manager = WorkerManager(
|
||||
2,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), Mock()),
|
||||
{},
|
||||
)
|
||||
assert len(list(manager.transient_processes))
|
||||
manager.restart()
|
||||
p1.terminate.assert_called_once()
|
||||
@@ -119,187 +129,91 @@ def test_restart_all():
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("zero_downtime", (False, True))
|
||||
def test_monitor_all(zero_downtime):
|
||||
def test_monitor_all():
|
||||
p1 = Mock()
|
||||
p2 = Mock()
|
||||
sub = Mock()
|
||||
incoming = (
|
||||
"__ALL_PROCESSES__::STARTUP_FIRST"
|
||||
if zero_downtime
|
||||
else "__ALL_PROCESSES__:"
|
||||
)
|
||||
sub.recv.side_effect = [incoming, ""]
|
||||
sub.recv.side_effect = ["__ALL_PROCESSES__:", ""]
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2]
|
||||
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
|
||||
manager = WorkerManager(
|
||||
2,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), sub),
|
||||
{},
|
||||
)
|
||||
manager.restart = Mock() # type: ignore
|
||||
manager.wait_for_ack = Mock() # type: ignore
|
||||
manager.monitor()
|
||||
|
||||
restart_order = (
|
||||
RestartOrder.STARTUP_FIRST
|
||||
if zero_downtime
|
||||
else RestartOrder.SHUTDOWN_FIRST
|
||||
)
|
||||
manager.restart.assert_called_once_with(
|
||||
process_names=None,
|
||||
reloaded_files="",
|
||||
restart_order=restart_order,
|
||||
process_names=None, reloaded_files=""
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("zero_downtime", (False, True))
|
||||
def test_monitor_all_with_files(zero_downtime):
|
||||
def test_monitor_all_with_files():
|
||||
p1 = Mock()
|
||||
p2 = Mock()
|
||||
sub = Mock()
|
||||
incoming = (
|
||||
"__ALL_PROCESSES__:foo,bar:STARTUP_FIRST"
|
||||
if zero_downtime
|
||||
else "__ALL_PROCESSES__:foo,bar"
|
||||
)
|
||||
sub.recv.side_effect = [incoming, ""]
|
||||
sub.recv.side_effect = ["__ALL_PROCESSES__:foo,bar", ""]
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2]
|
||||
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
|
||||
manager = WorkerManager(
|
||||
2,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), sub),
|
||||
{},
|
||||
)
|
||||
manager.restart = Mock() # type: ignore
|
||||
manager.wait_for_ack = Mock() # type: ignore
|
||||
manager.monitor()
|
||||
|
||||
restart_order = (
|
||||
RestartOrder.STARTUP_FIRST
|
||||
if zero_downtime
|
||||
else RestartOrder.SHUTDOWN_FIRST
|
||||
)
|
||||
manager.restart.assert_called_once_with(
|
||||
process_names=None,
|
||||
reloaded_files="foo,bar",
|
||||
restart_order=restart_order,
|
||||
process_names=None, reloaded_files="foo,bar"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("zero_downtime", (False, True))
|
||||
def test_monitor_one_process(zero_downtime):
|
||||
def test_monitor_one_process():
|
||||
p1 = Mock()
|
||||
p1.name = "Testing"
|
||||
p2 = Mock()
|
||||
sub = Mock()
|
||||
incoming = (
|
||||
f"{p1.name}:foo,bar:STARTUP_FIRST"
|
||||
if zero_downtime
|
||||
else f"{p1.name}:foo,bar"
|
||||
)
|
||||
sub.recv.side_effect = [incoming, ""]
|
||||
sub.recv.side_effect = [f"{p1.name}:foo,bar", ""]
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2]
|
||||
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
|
||||
manager = WorkerManager(
|
||||
2,
|
||||
fake_serve,
|
||||
{},
|
||||
context,
|
||||
(Mock(), sub),
|
||||
{},
|
||||
)
|
||||
manager.restart = Mock() # type: ignore
|
||||
manager.wait_for_ack = Mock() # type: ignore
|
||||
manager.monitor()
|
||||
|
||||
restart_order = (
|
||||
RestartOrder.STARTUP_FIRST
|
||||
if zero_downtime
|
||||
else RestartOrder.SHUTDOWN_FIRST
|
||||
)
|
||||
manager.restart.assert_called_once_with(
|
||||
process_names=[p1.name],
|
||||
reloaded_files="foo,bar",
|
||||
restart_order=restart_order,
|
||||
process_names=[p1.name], reloaded_files="foo,bar"
|
||||
)
|
||||
|
||||
|
||||
def test_shutdown_signal():
|
||||
pub = Mock()
|
||||
manager = WorkerManager(1, fake_serve, {}, Mock(), (pub, Mock()), {})
|
||||
manager = WorkerManager(
|
||||
1,
|
||||
fake_serve,
|
||||
{},
|
||||
Mock(),
|
||||
(pub, Mock()),
|
||||
{},
|
||||
)
|
||||
manager.shutdown = Mock() # type: ignore
|
||||
|
||||
manager.shutdown_signal(SIGINT, None)
|
||||
pub.send.assert_called_with(None)
|
||||
manager.shutdown.assert_called_once_with()
|
||||
|
||||
|
||||
def test_shutdown_servers(caplog):
|
||||
p1 = Mock()
|
||||
p1.pid = 1234
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1]
|
||||
pub = Mock()
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
|
||||
|
||||
with patch("os.kill") as kill:
|
||||
with caplog.at_level(ERROR):
|
||||
manager.shutdown_server()
|
||||
|
||||
kill.assert_called_once_with(1234, SIGINT)
|
||||
kill.reset_mock()
|
||||
|
||||
assert not caplog.record_tuples
|
||||
|
||||
manager.shutdown_server()
|
||||
|
||||
kill.assert_not_called()
|
||||
|
||||
assert (
|
||||
"sanic.error",
|
||||
ERROR,
|
||||
"Server shutdown failed because a server was not found.",
|
||||
) in caplog.record_tuples
|
||||
|
||||
|
||||
def test_shutdown_servers_named():
|
||||
p1 = Mock()
|
||||
p1.pid = 1234
|
||||
p2 = Mock()
|
||||
p2.pid = 6543
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2]
|
||||
pub = Mock()
|
||||
manager = WorkerManager(2, fake_serve, {}, context, (pub, Mock()), {})
|
||||
|
||||
with patch("os.kill") as kill:
|
||||
with pytest.raises(KeyError):
|
||||
manager.shutdown_server("foo")
|
||||
manager.shutdown_server("Server-1")
|
||||
|
||||
kill.assert_called_once_with(6543, SIGINT)
|
||||
|
||||
|
||||
def test_scale(caplog):
|
||||
p1 = Mock()
|
||||
p1.pid = 1234
|
||||
p2 = Mock()
|
||||
p2.pid = 3456
|
||||
p3 = Mock()
|
||||
p3.pid = 5678
|
||||
context = Mock()
|
||||
context.Process.side_effect = [p1, p2, p3]
|
||||
pub = Mock()
|
||||
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
|
||||
|
||||
assert len(manager.transient) == 1
|
||||
|
||||
manager.scale(3)
|
||||
assert len(manager.transient) == 3
|
||||
|
||||
with patch("os.kill") as kill:
|
||||
manager.scale(2)
|
||||
assert len(manager.transient) == 2
|
||||
|
||||
manager.scale(1)
|
||||
assert len(manager.transient) == 1
|
||||
|
||||
kill.call_count == 2
|
||||
|
||||
with caplog.at_level(INFO):
|
||||
manager.scale(1)
|
||||
|
||||
assert (
|
||||
"sanic.root",
|
||||
INFO,
|
||||
"No change needed. There are already 1 workers.",
|
||||
) in caplog.record_tuples
|
||||
|
||||
with pytest.raises(ValueError, match=r"Cannot scale to 0 workers\."):
|
||||
manager.scale(0)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import sys
|
||||
|
||||
from multiprocessing import Event
|
||||
from os import environ, getpid
|
||||
from typing import Any, Dict, Type, Union
|
||||
@@ -8,7 +6,6 @@ from unittest.mock import Mock
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.compat import use_context
|
||||
from sanic.worker.multiplexer import WorkerMultiplexer
|
||||
from sanic.worker.state import WorkerState
|
||||
|
||||
@@ -31,10 +28,6 @@ def m(monitor_publisher, worker_state):
|
||||
del environ["SANIC_WORKER_NAME"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform not in ("linux", "darwin"),
|
||||
reason="This test requires fork context",
|
||||
)
|
||||
def test_has_multiplexer_default(app: Sanic):
|
||||
event = Event()
|
||||
|
||||
@@ -48,8 +41,7 @@ def test_has_multiplexer_default(app: Sanic):
|
||||
app.shared_ctx.event.set()
|
||||
app.stop()
|
||||
|
||||
with use_context("fork"):
|
||||
app.run()
|
||||
app.run()
|
||||
|
||||
assert event.is_set()
|
||||
|
||||
@@ -98,17 +90,17 @@ def test_ack(worker_state: Dict[str, Any], m: WorkerMultiplexer):
|
||||
|
||||
def test_restart_self(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
m.restart()
|
||||
monitor_publisher.send.assert_called_once_with("Test:")
|
||||
monitor_publisher.send.assert_called_once_with("Test")
|
||||
|
||||
|
||||
def test_restart_foo(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
m.restart("foo")
|
||||
monitor_publisher.send.assert_called_once_with("foo:")
|
||||
monitor_publisher.send.assert_called_once_with("foo")
|
||||
|
||||
|
||||
def test_reload_alias(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
m.reload()
|
||||
monitor_publisher.send.assert_called_once_with("Test:")
|
||||
monitor_publisher.send.assert_called_once_with("Test")
|
||||
|
||||
|
||||
def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
@@ -116,11 +108,6 @@ def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
monitor_publisher.send.assert_called_once_with("__TERMINATE__")
|
||||
|
||||
|
||||
def test_scale(monitor_publisher: Mock, m: WorkerMultiplexer):
|
||||
m.scale(99)
|
||||
monitor_publisher.send.assert_called_once_with("__SCALE__:99")
|
||||
|
||||
|
||||
def test_properties(
|
||||
monitor_publisher: Mock, worker_state: Dict[str, Any], m: WorkerMultiplexer
|
||||
):
|
||||
@@ -135,20 +122,10 @@ def test_properties(
|
||||
@pytest.mark.parametrize(
|
||||
"params,expected",
|
||||
(
|
||||
({}, "Test:"),
|
||||
({"name": "foo"}, "foo:"),
|
||||
({}, "Test"),
|
||||
({"name": "foo"}, "foo"),
|
||||
({"all_workers": True}, "__ALL_PROCESSES__:"),
|
||||
({"zero_downtime": True}, "Test::STARTUP_FIRST"),
|
||||
({"name": "foo", "all_workers": True}, ValueError),
|
||||
({"name": "foo", "zero_downtime": True}, "foo::STARTUP_FIRST"),
|
||||
(
|
||||
{"all_workers": True, "zero_downtime": True},
|
||||
"__ALL_PROCESSES__::STARTUP_FIRST",
|
||||
),
|
||||
(
|
||||
{"name": "foo", "all_workers": True, "zero_downtime": True},
|
||||
ValueError,
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_restart_params(
|
||||
|
||||
@@ -5,15 +5,14 @@ import threading
|
||||
from asyncio import Event
|
||||
from logging import DEBUG
|
||||
from pathlib import Path
|
||||
from time import sleep
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic.app import Sanic
|
||||
from sanic.worker.constants import ProcessState, RestartOrder
|
||||
from sanic.constants import RestartOrder
|
||||
from sanic.worker.loader import AppLoader
|
||||
from sanic.worker.process import WorkerProcess
|
||||
from sanic.worker.process import ProcessState, WorkerProcess
|
||||
from sanic.worker.reloader import Reloader
|
||||
|
||||
|
||||
@@ -77,7 +76,7 @@ def test_iter_files():
|
||||
"order,expected",
|
||||
(
|
||||
(
|
||||
RestartOrder.SHUTDOWN_FIRST,
|
||||
"shutdown_first",
|
||||
[
|
||||
"Restarting a process",
|
||||
"Begin restart termination",
|
||||
@@ -85,7 +84,7 @@ def test_iter_files():
|
||||
],
|
||||
),
|
||||
(
|
||||
RestartOrder.STARTUP_FIRST,
|
||||
"startup_first",
|
||||
[
|
||||
"Restarting a process",
|
||||
"Starting a process",
|
||||
@@ -97,6 +96,7 @@ def test_iter_files():
|
||||
),
|
||||
)
|
||||
def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
|
||||
|
||||
current_process = Mock()
|
||||
worker_process = WorkerProcess(
|
||||
lambda **_: current_process,
|
||||
@@ -104,6 +104,7 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
|
||||
lambda **_: ...,
|
||||
{},
|
||||
{},
|
||||
RestartOrder[order.upper()],
|
||||
)
|
||||
|
||||
def start(self):
|
||||
@@ -114,7 +115,7 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
|
||||
monkeypatch.setattr(threading.Thread, "start", start)
|
||||
|
||||
with caplog.at_level(DEBUG):
|
||||
worker_process.restart(restart_order=order)
|
||||
worker_process.restart()
|
||||
|
||||
ansi = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
|
||||
|
||||
@@ -129,32 +130,6 @@ def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
|
||||
monkeypatch.setattr(threading.Thread, "start", orig)
|
||||
|
||||
|
||||
def test_reload_delayed(monkeypatch):
|
||||
WorkerProcess.THRESHOLD = 1
|
||||
|
||||
current_process = Mock()
|
||||
worker_process = WorkerProcess(
|
||||
lambda **_: current_process,
|
||||
"Test",
|
||||
lambda **_: ...,
|
||||
{},
|
||||
{},
|
||||
)
|
||||
|
||||
def start(self):
|
||||
sleep(0.2)
|
||||
self._target()
|
||||
|
||||
orig = threading.Thread.start
|
||||
monkeypatch.setattr(threading.Thread, "start", start)
|
||||
|
||||
message = "Worker Test failed to come ack within 0.1 seconds"
|
||||
with pytest.raises(TimeoutError, match=message):
|
||||
worker_process.restart(restart_order=RestartOrder.STARTUP_FIRST)
|
||||
|
||||
monkeypatch.setattr(threading.Thread, "start", orig)
|
||||
|
||||
|
||||
def test_reloader_triggers_start_stop_listeners(
|
||||
app: Sanic, app_loader: AppLoader
|
||||
):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user