Compare commits

..

41 Commits

Author SHA1 Message Date
Adam Hopkins
4ad8168bb0 Version 22.12 release notes (#2637) 2022-12-27 16:50:36 +02:00
Adam Hopkins
28f5b3c301 Add better inspector arg parsing (#2642) 2022-12-26 12:27:40 +02:00
Adam Hopkins
c573019e7f ASGI websocket recv text or bytes (#2640) 2022-12-25 13:52:07 +02:00
Adam Hopkins
029f564032 Pass unquote thru add_route (#2639) 2022-12-21 10:45:23 +02:00
Adam Hopkins
2abe66b670 Add priority to register_middleware method (#2636) 2022-12-19 19:14:46 +02:00
Adam Hopkins
911485d52e Fix Windows sock share (#2635) 2022-12-18 15:04:10 +02:00
Adam Hopkins
4744a89c33 Fix double ctrl-c kill (#2634) 2022-12-18 14:40:38 +02:00
Adam Hopkins
f7040ccec8 Implement restart ordering (#2632) 2022-12-18 14:09:17 +02:00
Adam Hopkins
518152d97e Reload interval on class variable (#2633) 2022-12-18 13:36:54 +02:00
Adam Hopkins
0e44e9cacb Move to HTTP Inspector (#2626) 2022-12-18 10:29:58 +02:00
Adam Hopkins
bfb54b0969 Test for 3.11 support (#2612)
Co-authored-by: Zhiwei <zhi.wei.liang@outlook.com>
2022-12-17 23:46:22 +02:00
Zhiwei
154863d6c6 Method Signal Handler Test (#2630) 2022-12-17 20:38:46 +02:00
Adam Hopkins
a3ff0c13b7 ASGI lifespan failure on exception (#2627) 2022-12-16 08:56:07 +02:00
Mary
95ee518aec Replace deprecated distutils.strtobool (#2628) 2022-12-16 07:48:41 +02:00
Zhiwei
71d3d87bcc Deprecate Conditions and Triggers Saved in handler Callable; Save Condition in signal.extra Instead (#2608) 2022-12-15 12:32:07 +02:00
Adam Hopkins
b276b91c21 Allow fork in limited cases (#2624) 2022-12-15 11:49:26 +02:00
Adam Hopkins
064168f3c8 Add a SIGKILL to second ctrl+c (#2621) 2022-12-14 23:51:11 +02:00
Adam Hopkins
db39e127bf Scale workers (#2617) 2022-12-13 09:28:23 +02:00
L. Kärkkäinen
13e9ab7ba9 Filename normalisation of form-data/multipart file uploads (umlauts on Apple clients) (#2625)
Co-authored-by: L. Karkkainen <tronic@users.noreply.github.com>
2022-12-13 08:36:21 +02:00
Adam Hopkins
92e7463721 Add a restart mechanism to all workers in the multiplexer (#2622) 2022-12-11 11:33:42 +02:00
Néstor Pérez
8e720365c2 Add JSONResponse class (#2569)
Co-authored-by: Adam Hopkins <adam@amhopkins.com>
2022-12-11 10:37:45 +02:00
Adam Hopkins
d4041161c7 Ensure middleware executes once per request timeout (#2615) 2022-12-07 23:07:17 +02:00
Adam Hopkins
f32437bf13 Kill server early on worker error (#2610) 2022-12-07 14:42:17 +02:00
LiraNuna
0909e94527 Corrected Colors enum under Python 3.11 (#2590)
Co-authored-by: Adam Hopkins <adam@amhopkins.com>
Fixes https://github.com/sanic-org/sanic/issues/2589
2022-11-29 12:17:48 +02:00
Adam Hopkins
aef2673c38 Force socket shutdown before close (#2607)
Co-authored-by: Zhiwei <zhi.wei.liang@outlook.com>
2022-11-29 12:04:22 +02:00
Aymeric Augustin
4c14910d5b Add compatibility with websockets 11.0. (#2609)
Co-authored-by: Adam Hopkins <adam@amhopkins.com>
2022-11-29 11:45:18 +02:00
Adam Hopkins
beae35f921 Ignore recent failures on bad TLS tests (#2611) 2022-11-29 10:51:51 +02:00
Zhiwei
ad4e526c77 Require uvloop >= 0.15.0 (#2598) 2022-11-13 15:32:04 +02:00
Adam Hopkins
4422d0c34d Mergeback from current-release 2022-10-31 13:24:47 +02:00
Adam Hopkins
ad9183d21d Merge branch 'main' of github.com:sanic-org/sanic into current-release 2022-10-31 13:22:47 +02:00
Adam Hopkins
d70636ba2e Add GenericCreator for loading SSL certs in processes (#2578) 2022-10-31 13:22:30 +02:00
Adam Hopkins
da23f85675 Set version 2022-10-31 13:20:17 +02:00
Adam Hopkins
3f4663b9f8 Resolve edge case in nested BP Groups (#2592) 2022-10-31 12:58:41 +02:00
Adam Hopkins
65d7447cf6 Add interval sleep in reloader (#2595) 2022-10-31 12:34:01 +02:00
Adam Hopkins
5369291c27 22.9 Docs (#2556) 2022-10-31 11:47:23 +02:00
Ryu Juheon
1c4925edf7 fix: sideeffects created by changing fork to spawn (#2591) 2022-10-27 20:39:17 +03:00
Santi Cardozo
6b9edfd05c improve error message if no apps found in registry (#2585) 2022-10-25 16:54:44 +03:00
Adam Hopkins
97f33f42df Update SECURITY.md 2022-10-25 13:05:13 +03:00
Adam Hopkins
15a588a90c Upgrade markdown templates to issue forms (#2588) 2022-10-25 13:04:11 +03:00
Ryu Juheon
82421e7efc docs: sanic now supports windows. (#2582) 2022-10-21 14:31:22 +03:00
Adam Hopkins
f891995b48 Start v22.12 2022-09-29 13:04:46 +03:00
93 changed files with 3105 additions and 1049 deletions

66
.github/ISSUE_TEMPLATE/bug-report.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
name: 🐞 Bug report
description: Create a report to help us improve
labels: ["bug", "triage"]
body:
- type: checkboxes
id: existing
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the bug you encountered.
options:
- label: I have searched the existing issues
required: true
- type: textarea
id: description
attributes:
label: Describe the bug
description: A clear and concise description of what the bug is, make sure to paste any exceptions and tracebacks using markdown code-block syntax to make it easier to read.
validations:
required: true
- type: textarea
id: code
attributes:
label: Code snippet
description: Relevant source code, make sure to remove what is not necessary.
validations:
required: false
- type: textarea
id: expected
attributes:
label: Expected Behavior
description: A concise description of what you expected to happen.
validations:
required: false
- type: dropdown
id: running
attributes:
label: How do you run Sanic?
options:
- Sanic CLI
- As a module
- As a script (`app.run` or `Sanic.serve`)
- ASGI
validations:
required: true
- type: input
id: os
attributes:
label: Operating System
description: What OS?
validations:
required: true
- type: input
id: version
attributes:
label: Sanic Version
description: Check startup logs or try `sanic --version`
validations:
required: true
- type: textarea
id: additional
attributes:
label: Additional context
description: Add any other context about the problem here.
validations:
required: false

View File

@@ -1,27 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
labels: ["bug"]
---
**Describe the bug**
<!-- A clear and concise description of what the bug is, make sure to paste any exceptions and tracebacks. -->
**Code snippet**
<!-- Relevant source code, make sure to remove what is not necessary. -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Environment (please complete the following information):**
<!-- Please provide the information below. Instead, you can copy and paste the message that Sanic shows on startup. If you do, please remember to format it with ``` -->
- OS:
- Sanic Version:
**Additional context**
<!-- Add any other context about the problem here. -->

View File

@@ -1,4 +1,4 @@
blank_issues_enabled: true
blank_issues_enabled: false
contact_links:
- name: Questions and Help
url: https://community.sanicframework.org/c/questions-and-help

View File

@@ -0,0 +1,34 @@
name: 🌟 Feature request
description: Suggest an enhancement for Sanic
labels: ["feature request"]
body:
- type: checkboxes
id: existing
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the enhancement you are proposing.
options:
- label: I have searched the existing issues
required: true
- type: textarea
id: description
attributes:
label: Is your feature request related to a problem? Please describe.
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
validations:
required: false
- type: textarea
id: code
attributes:
label: Describe the solution you'd like
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: additional
attributes:
label: Additional context
description: Add any other context about the problem here.
validations:
required: false

View File

@@ -1,17 +0,0 @@
---
name: Feature request
about: Suggest an idea for Sanic
labels: ["feature request"]
---
**Is your feature request related to a problem? Please describe.**
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
**Describe the solution you'd like**
<!-- A clear and concise description of what you want to happen. -->
**Additional context**
<!-- Add any other context or sample code about the feature request here. -->

View File

@@ -20,6 +20,7 @@ jobs:
- { python-version: 3.8, tox-env: security}
- { python-version: 3.9, tox-env: security}
- { python-version: "3.10", tox-env: security}
- { python-version: "3.11", tox-env: security}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
config:
- {python-version: "3.8", tox-env: "docs"}
- {python-version: "3.10", tox-env: "docs"}
fail-fast: false

View File

@@ -16,7 +16,7 @@ jobs:
matrix:
os: [ubuntu-latest]
config:
- { python-version: 3.8, tox-env: lint}
- { python-version: "3.10", tox-env: lint}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

47
.github/workflows/pr-python311.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: Python 3.11 Tests
on:
pull_request:
branches:
- main
- "*LTS"
types: [opened, synchronize, reopened, ready_for_review]
jobs:
testPy311:
if: github.event.pull_request.draft == false
name: ut-${{ matrix.config.tox-env }}-${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
# os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest]
config:
- {
python-version: "3.11",
tox-env: py311,
ignore-error-flake: "false",
command-timeout: "0",
}
- {
python-version: "3.11",
tox-env: py311-no-ext,
ignore-error-flake: "true",
command-timeout: "600000",
}
steps:
- name: Checkout the Repository
uses: actions/checkout@v2
id: checkout-branch
- name: Run Unit Tests
uses: harshanarayana/custom-actions@main
with:
python-version: ${{ matrix.config.python-version }}
test-infra-tool: tox
test-infra-version: latest
action: tests
test-additional-args: "-e=${{ matrix.config.tox-env }},-vv=''"
experimental-ignore-error: "${{ matrix.config.ignore-error-flake }}"
command-timeout: "${{ matrix.config.command-timeout }}"
test-failure-retry: "3"

View File

@@ -20,6 +20,7 @@ jobs:
- { python-version: 3.8, tox-env: type-checking}
- { python-version: 3.9, tox-env: type-checking}
- { python-version: "3.10", tox-env: type-checking}
- { python-version: "3.11", tox-env: type-checking}
steps:
- name: Checkout the repository
uses: actions/checkout@v2

View File

@@ -19,6 +19,7 @@ jobs:
- { python-version: 3.8, tox-env: py38-no-ext }
- { python-version: 3.9, tox-env: py39-no-ext }
- { python-version: "3.10", tox-env: py310-no-ext }
- { python-version: "3.11", tox-env: py310-no-ext }
- { python-version: pypy-3.7, tox-env: pypy37-no-ext }
steps:

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
fail-fast: true
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
steps:
- name: Checkout repository

View File

@@ -11,7 +11,7 @@ jobs:
strategy:
fail-fast: true
matrix:
python-version: ["3.8"]
python-version: ["3.10"]
steps:
- name: Checkout Repository

View File

@@ -313,8 +313,8 @@ Version 21.3.0
`#2074 <https://github.com/sanic-org/sanic/pull/2074>`_
Performance adjustments in ``handle_request_``
Version 20.12.3 🔷
------------------
Version 20.12.3
---------------
`Current LTS version`
@@ -350,8 +350,8 @@ Version 19.12.5
`#2027 <https://github.com/sanic-org/sanic/pull/2027>`_
Remove old chardet requirement, add in hard multidict requirement
Version 20.12.0 🔹
-----------------
Version 20.12.0
---------------
**Features**

View File

@@ -102,9 +102,6 @@ Installation
If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to
use ``sanic`` with ``ujson`` dependency.
.. note::
Windows support is currently "experimental" and on a best-effort basis. Multiple workers are also not currently supported on Windows (see `Issue #1517 <https://github.com/sanic-org/sanic/issues/1517>`_), but setting ``workers=1`` should launch the server successfully.
Hello World Example
-------------------

View File

@@ -7,13 +7,15 @@ Sanic releases long term support release once a year in December. LTS releases r
| Version | LTS | Supported |
| ------- | ------------- | ----------------------- |
| 22.6 | | :white_check_mark: |
| 22.12 | until 2024-12 | :white_check_mark: |
| 22.9 | | :x: |
| 22.6 | | :x: |
| 22.3 | | :x: |
| 21.12 | until 2023-12 | :white_check_mark: |
| 21.12 | until 2023-12 | :ballot_box_with_check: |
| 21.9 | | :x: |
| 21.6 | | :x: |
| 21.3 | | :x: |
| 20.12 | until 2022-12 | :ballot_box_with_check: |
| 20.12 | | :x: |
| 20.9 | | :x: |
| 20.6 | | :x: |
| 20.3 | | :x: |

View File

@@ -2,3 +2,12 @@
.wy-nav-top {
background: #444444;
}
#changelog section {
padding-left: 3rem;
}
#changelog section h2,
#changelog section h3 {
margin-left: -3rem;
}

View File

@@ -1,6 +1,8 @@
📜 Changelog
============
.. mdinclude:: ./releases/22/22.12.md
.. mdinclude:: ./releases/22/22.9.md
.. mdinclude:: ./releases/22/22.6.md
.. mdinclude:: ./releases/22/22.3.md
.. mdinclude:: ./releases/21/21.12.md

View File

@@ -0,0 +1,55 @@
## Version 22.12.0 🔶
_Current version_
### Features
- [#2569](https://github.com/sanic-org/sanic/pull/2569) Add `JSONResponse` class with some convenient methods when updating a response object
- [#2598](https://github.com/sanic-org/sanic/pull/2598) Change `uvloop` requirement to `>=0.15.0`
- [#2609](https://github.com/sanic-org/sanic/pull/2609) Add compatibility with `websockets` v11.0
- [#2610](https://github.com/sanic-org/sanic/pull/2610) Kill server early on worker error
- Raise deadlock timeout to 30s
- [#2617](https://github.com/sanic-org/sanic/pull/2617) Scale number of running server workers
- [#2621](https://github.com/sanic-org/sanic/pull/2621) [#2634](https://github.com/sanic-org/sanic/pull/2634) Send `SIGKILL` on subsequent `ctrl+c` to force worker exit
- [#2622](https://github.com/sanic-org/sanic/pull/2622) Add API to restart all workers from the multiplexer
- [#2624](https://github.com/sanic-org/sanic/pull/2624) Default to `spawn` for all subprocesses unless specifically set:
```python
from sanic import Sanic
Sanic.start_method = "fork"
```
- [#2625](https://github.com/sanic-org/sanic/pull/2625) Filename normalisation of form-data/multipart file uploads
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector:
- Remote access to inspect running Sanic instances
- TLS support for encrypted calls to Inspector
- Authentication to Inspector with API key
- Ability to extend Inspector with custom commands
- [#2632](https://github.com/sanic-org/sanic/pull/2632) Control order of restart operations
- [#2633](https://github.com/sanic-org/sanic/pull/2633) Move reload interval to class variable
- [#2636](https://github.com/sanic-org/sanic/pull/2636) Add `priority` to `register_middleware` method
- [#2639](https://github.com/sanic-org/sanic/pull/2639) Add `unquote` to `add_route` method
- [#2640](https://github.com/sanic-org/sanic/pull/2640) ASGI websockets to receive `text` or `bytes`
### Bugfixes
- [#2607](https://github.com/sanic-org/sanic/pull/2607) Force socket shutdown before close to allow rebinding
- [#2590](https://github.com/sanic-org/sanic/pull/2590) Use actual `StrEnum` in Python 3.11+
- [#2615](https://github.com/sanic-org/sanic/pull/2615) Ensure middleware executes only once per request timeout
- [#2627](https://github.com/sanic-org/sanic/pull/2627) Crash ASGI application on lifespan failure
- [#2635](https://github.com/sanic-org/sanic/pull/2635) Resolve error with low-level server creation on Windows
### Deprecations and Removals
- [#2608](https://github.com/sanic-org/sanic/pull/2608) [#2630](https://github.com/sanic-org/sanic/pull/2630) Signal conditions and triggers saved on `signal.extra`
- [#2626](https://github.com/sanic-org/sanic/pull/2626) Move to HTTP Inspector
- 🚨 *BREAKING CHANGE*: Moves the Inspector to a Sanic app from a simple TCP socket with a custom protocol
- *DEPRECATE*: The `--inspect*` commands have been deprecated in favor of `inspect ...` commands
- [#2628](https://github.com/sanic-org/sanic/pull/2628) Replace deprecated `distutils.strtobool`
### Developer infrastructure
- [#2612](https://github.com/sanic-org/sanic/pull/2612) Add CI testing for Python 3.11

View File

@@ -1,6 +1,17 @@
## Version 22.6.0 🔶
## Version 22.6.2
_Current version_
### Bugfixes
- [#2522](https://github.com/sanic-org/sanic/pull/2522) Always show server location in ASGI
## Version 22.6.1
### Bugfixes
- [#2477](https://github.com/sanic-org/sanic/pull/2477) Sanic static directory fails when folder name ends with ".."
## Version 22.6.0
### Features
- [#2378](https://github.com/sanic-org/sanic/pull/2378) Introduce HTTP/3 and autogeneration of TLS certificates in `DEBUG` mode

View File

@@ -0,0 +1,74 @@
## Version 22.9.1
### Features
- [#2585](https://github.com/sanic-org/sanic/pull/2585) Improved error message when no applications have been registered
### Bugfixes
- [#2578](https://github.com/sanic-org/sanic/pull/2578) Add certificate loader for in process certificate creation
- [#2591](https://github.com/sanic-org/sanic/pull/2591) Do not use sentinel identity for `spawn` compatibility
- [#2592](https://github.com/sanic-org/sanic/pull/2592) Fix properties in nested blueprint groups
- [#2595](https://github.com/sanic-org/sanic/pull/2595) Introduce sleep interval on new worker reloader
### Deprecations and Removals
### Developer infrastructure
- [#2588](https://github.com/sanic-org/sanic/pull/2588) Markdown templates on issue forms
### Improved Documentation
- [#2556](https://github.com/sanic-org/sanic/pull/2556) v22.9 documentation
- [#2582](https://github.com/sanic-org/sanic/pull/2582) Cleanup documentation on Windows support
## Version 22.9.0
### Features
- [#2445](https://github.com/sanic-org/sanic/pull/2445) Add custom loads function
- [#2490](https://github.com/sanic-org/sanic/pull/2490) Make `WebsocketImplProtocol` async iterable
- [#2499](https://github.com/sanic-org/sanic/pull/2499) Sanic Server WorkerManager refactor
- [#2506](https://github.com/sanic-org/sanic/pull/2506) Use `pathlib` for path resolution (for static file serving)
- [#2508](https://github.com/sanic-org/sanic/pull/2508) Use `path.parts` instead of `match` (for static file serving)
- [#2513](https://github.com/sanic-org/sanic/pull/2513) Better request cancel handling
- [#2516](https://github.com/sanic-org/sanic/pull/2516) Add request properties for HTTP method info:
- `request.is_safe`
- `request.is_idempotent`
- `request.is_cacheable`
- *See* [MDN docs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods) *for more information about when these apply*
- [#2522](https://github.com/sanic-org/sanic/pull/2522) Always show server location in ASGI
- [#2526](https://github.com/sanic-org/sanic/pull/2526) Cache control support for static files for returning 304 when appropriate
- [#2533](https://github.com/sanic-org/sanic/pull/2533) Refactor `_static_request_handler`
- [#2540](https://github.com/sanic-org/sanic/pull/2540) Add signals before and after handler execution
- `http.handler.before`
- `http.handler.after`
- [#2542](https://github.com/sanic-org/sanic/pull/2542) Add *[redacted]* to CLI :)
- [#2546](https://github.com/sanic-org/sanic/pull/2546) Add deprecation warning filter
- [#2550](https://github.com/sanic-org/sanic/pull/2550) Middleware priority and performance enhancements
### Bugfixes
- [#2495](https://github.com/sanic-org/sanic/pull/2495) Prevent directory traversion with static files
- [#2515](https://github.com/sanic-org/sanic/pull/2515) Do not apply double slash to paths in certain static dirs in Blueprints
### Deprecations and Removals
- [#2525](https://github.com/sanic-org/sanic/pull/2525) Warn on duplicate route names, will be prevented outright in v23.3
- [#2537](https://github.com/sanic-org/sanic/pull/2537) Raise warning and deprecation notice on duplicate exceptions, will be prevented outright in v23.3
### Developer infrastructure
- [#2504](https://github.com/sanic-org/sanic/pull/2504) Cleanup test suite
- [#2505](https://github.com/sanic-org/sanic/pull/2505) Replace Unsupported Python Version Number from the Contributing Doc
- [#2530](https://github.com/sanic-org/sanic/pull/2530) Do not include tests folder in installed package resolver
### Improved Documentation
- [#2502](https://github.com/sanic-org/sanic/pull/2502) Fix a few typos
- [#2517](https://github.com/sanic-org/sanic/pull/2517) [#2536](https://github.com/sanic-org/sanic/pull/2536) Add some type hints

View File

@@ -22,5 +22,6 @@ module = [
"httptools.*",
"trustme.*",
"sanic_routing.*",
"aioquic.*",
]
ignore_missing_imports = true

View File

@@ -1 +1 @@
__version__ = "22.9.0"
__version__ = "22.12.0"

View File

@@ -61,7 +61,7 @@ from sanic.exceptions import (
URLBuildError,
)
from sanic.handlers import ErrorHandler
from sanic.helpers import _default
from sanic.helpers import Default, _default
from sanic.http import Stage
from sanic.log import (
LOGGING_CONFIG_DEFAULTS,
@@ -69,6 +69,7 @@ from sanic.log import (
error_logger,
logger,
)
from sanic.middleware import Middleware, MiddlewareLocation
from sanic.mixins.listeners import ListenerEvent
from sanic.mixins.startup import StartupMixin
from sanic.models.futures import (
@@ -140,6 +141,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
"configure_logging",
"ctx",
"error_handler",
"inspector_class",
"go_fast",
"listeners",
"multiplexer",
@@ -162,7 +164,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
def __init__(
self,
name: str = None,
name: Optional[str] = None,
config: Optional[Config] = None,
ctx: Optional[Any] = None,
router: Optional[Router] = None,
@@ -176,6 +178,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
dumps: Optional[Callable[..., AnyStr]] = None,
loads: Optional[Callable[..., Any]] = None,
inspector: bool = False,
inspector_class: Optional[Type[Inspector]] = None,
) -> None:
super().__init__(name=name)
# logging
@@ -211,6 +214,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
self.configure_logging: bool = configure_logging
self.ctx: Any = ctx or SimpleNamespace()
self.error_handler: ErrorHandler = error_handler or ErrorHandler()
self.inspector_class: Type[Inspector] = inspector_class or Inspector
self.listeners: Dict[str, List[ListenerType[Any]]] = defaultdict(list)
self.named_request_middleware: Dict[str, Deque[MiddlewareType]] = {}
self.named_response_middleware: Dict[str, Deque[MiddlewareType]] = {}
@@ -291,8 +295,12 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
return listener
def register_middleware(
self, middleware: MiddlewareType, attach_to: str = "request"
) -> MiddlewareType:
self,
middleware: Union[MiddlewareType, Middleware],
attach_to: str = "request",
*,
priority: Union[Default, int] = _default,
) -> Union[MiddlewareType, Middleware]:
"""
Register an application level middleware that will be attached
to all the API URLs registered under this application.
@@ -308,19 +316,37 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
**response** - Invoke before the response is returned back
:return: decorated method
"""
if attach_to == "request":
retval = middleware
location = MiddlewareLocation[attach_to.upper()]
if not isinstance(middleware, Middleware):
middleware = Middleware(
middleware,
location=location,
priority=priority if isinstance(priority, int) else 0,
)
elif middleware.priority != priority and isinstance(priority, int):
middleware = Middleware(
middleware.func,
location=middleware.location,
priority=priority,
)
if location is MiddlewareLocation.REQUEST:
if middleware not in self.request_middleware:
self.request_middleware.append(middleware)
if attach_to == "response":
if location is MiddlewareLocation.RESPONSE:
if middleware not in self.response_middleware:
self.response_middleware.appendleft(middleware)
return middleware
return retval
def register_named_middleware(
self,
middleware: MiddlewareType,
route_names: Iterable[str],
attach_to: str = "request",
*,
priority: Union[Default, int] = _default,
):
"""
Method for attaching middleware to specific routes. This is mainly an
@@ -334,19 +360,35 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
defaults to "request"
:type attach_to: str, optional
"""
if attach_to == "request":
retval = middleware
location = MiddlewareLocation[attach_to.upper()]
if not isinstance(middleware, Middleware):
middleware = Middleware(
middleware,
location=location,
priority=priority if isinstance(priority, int) else 0,
)
elif middleware.priority != priority and isinstance(priority, int):
middleware = Middleware(
middleware.func,
location=middleware.location,
priority=priority,
)
if location is MiddlewareLocation.REQUEST:
for _rn in route_names:
if _rn not in self.named_request_middleware:
self.named_request_middleware[_rn] = deque()
if middleware not in self.named_request_middleware[_rn]:
self.named_request_middleware[_rn].append(middleware)
if attach_to == "response":
if location is MiddlewareLocation.RESPONSE:
for _rn in route_names:
if _rn not in self.named_response_middleware:
self.named_response_middleware[_rn] = deque()
if middleware not in self.named_response_middleware[_rn]:
self.named_response_middleware[_rn].appendleft(middleware)
return middleware
return retval
def _apply_exception_handler(
self,
@@ -480,17 +522,16 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
for item in blueprint:
params = {**options}
if isinstance(blueprint, BlueprintGroup):
if blueprint.url_prefix:
merge_from = [
options.get("url_prefix", ""),
blueprint.url_prefix,
]
if not isinstance(item, BlueprintGroup):
merge_from.append(item.url_prefix or "")
merged_prefix = "/".join(
u.strip("/") for u in merge_from
).rstrip("/")
params["url_prefix"] = f"/{merged_prefix}"
merge_from = [
options.get("url_prefix", ""),
blueprint.url_prefix or "",
]
if not isinstance(item, BlueprintGroup):
merge_from.append(item.url_prefix or "")
merged_prefix = "/".join(
u.strip("/") for u in merge_from if u
).rstrip("/")
params["url_prefix"] = f"/{merged_prefix}"
for _attr in ["version", "strict_slashes"]:
if getattr(item, _attr) is None:
@@ -1453,7 +1494,14 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
return cls.get_app("__mp_main__", force_create=force_create)
if force_create:
return cls(name)
raise SanicException(f'Sanic app name "{name}" not found.')
raise SanicException(
f"Sanic app name '{name}' not found.\n"
"App instantiation must occur outside "
"if __name__ == '__main__' "
"block or by using an AppLoader.\nSee "
"https://sanic.dev/en/guide/deployment/app-loader.html"
" for more details."
)
@classmethod
def _check_uvloop_conflict(cls) -> None:
@@ -1495,7 +1543,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
if self.state.is_debug and self.config.TOUCHUP is not True:
self.config.TOUCHUP = False
elif self.config.TOUCHUP is _default:
elif isinstance(self.config.TOUCHUP, Default):
self.config.TOUCHUP = True
# Setup routers
@@ -1525,6 +1573,7 @@ class Sanic(BaseSanic, StartupMixin, metaclass=TouchUpMeta):
self.state.is_started = True
def ack(self):
if hasattr(self, "multiplexer"):
self.multiplexer.ack()

View File

@@ -8,11 +8,6 @@ from typing import TYPE_CHECKING
if TYPE_CHECKING:
from sanic import Sanic
try:
from sanic_ext import Extend # type: ignore
except ImportError:
...
def setup_ext(app: Sanic, *, fail: bool = False, **kwargs):
if not app.config.AUTO_EXTEND:
@@ -33,7 +28,7 @@ def setup_ext(app: Sanic, *, fail: bool = False, **kwargs):
return
if not getattr(app, "_ext", None):
Ext: Extend = getattr(sanic_ext, "Extend")
Ext = getattr(sanic_ext, "Extend")
app._ext = Ext(app, **kwargs)
return app.ext

View File

@@ -7,9 +7,9 @@ from urllib.parse import quote
from sanic.compat import Header
from sanic.exceptions import ServerError
from sanic.helpers import _default
from sanic.helpers import Default
from sanic.http import Stage
from sanic.log import logger
from sanic.log import error_logger, logger
from sanic.models.asgi import ASGIReceive, ASGIScope, ASGISend, MockTransport
from sanic.request import Request
from sanic.response import BaseHTTPResponse
@@ -61,7 +61,7 @@ class Lifespan:
await self.asgi_app.sanic_app._server_event("init", "before")
await self.asgi_app.sanic_app._server_event("init", "after")
if self.asgi_app.sanic_app.config.USE_UVLOOP is not _default:
if not isinstance(self.asgi_app.sanic_app.config.USE_UVLOOP, Default):
warnings.warn(
"You have set the USE_UVLOOP configuration option, but Sanic "
"cannot control the event loop when running in ASGI mode."
@@ -85,13 +85,27 @@ class Lifespan:
) -> None:
message = await receive()
if message["type"] == "lifespan.startup":
await self.startup()
await send({"type": "lifespan.startup.complete"})
try:
await self.startup()
except Exception as e:
error_logger.exception(e)
await send(
{"type": "lifespan.startup.failed", "message": str(e)}
)
else:
await send({"type": "lifespan.startup.complete"})
message = await receive()
if message["type"] == "lifespan.shutdown":
await self.shutdown()
await send({"type": "lifespan.shutdown.complete"})
try:
await self.shutdown()
except Exception as e:
error_logger.exception(e)
await send(
{"type": "lifespan.shutdown.failed", "message": str(e)}
)
else:
await send({"type": "lifespan.shutdown.complete"})
class ASGIApp:

View File

@@ -1,6 +1,6 @@
import re
from typing import Any
from typing import Any, Optional
from sanic.base.meta import SanicMeta
from sanic.exceptions import SanicException
@@ -24,7 +24,9 @@ class BaseSanic(
):
__slots__ = ("name",)
def __init__(self, name: str = None, *args: Any, **kwargs: Any) -> None:
def __init__(
self, name: Optional[str] = None, *args: Any, **kwargs: Any
) -> None:
class_name = self.__class__.__name__
if name is None:

View File

@@ -442,7 +442,7 @@ class Blueprint(BaseSanic):
events.add(signal.ctx.event)
return asyncio.wait(
[event.wait() for event in events],
[asyncio.create_task(event.wait()) for event in events],
return_when=asyncio.FIRST_COMPLETED,
timeout=timeout,
)

View File

@@ -3,23 +3,21 @@ import os
import shutil
import sys
from argparse import ArgumentParser, RawTextHelpFormatter
from argparse import Namespace
from functools import partial
from textwrap import indent
from typing import Any, List, Union
from typing import List, Union, cast
from sanic.app import Sanic
from sanic.application.logo import get_logo
from sanic.cli.arguments import Group
from sanic.log import error_logger
from sanic.worker.inspector import inspect
from sanic.cli.base import SanicArgumentParser, SanicHelpFormatter
from sanic.cli.inspector import make_inspector_parser
from sanic.cli.inspector_client import InspectorClient
from sanic.log import Colors, error_logger
from sanic.worker.loader import AppLoader
class SanicArgumentParser(ArgumentParser):
...
class SanicCLI:
DESCRIPTION = indent(
f"""
@@ -46,7 +44,7 @@ Or, a path to a directory to run as a simple HTTP server:
self.parser = SanicArgumentParser(
prog="sanic",
description=self.DESCRIPTION,
formatter_class=lambda prog: RawTextHelpFormatter(
formatter_class=lambda prog: SanicHelpFormatter(
prog,
max_help_position=36 if width > 96 else 24,
indent_increment=4,
@@ -58,16 +56,27 @@ Or, a path to a directory to run as a simple HTTP server:
self.main_process = (
os.environ.get("SANIC_RELOADER_PROCESS", "") != "true"
)
self.args: List[Any] = []
self.args: Namespace = Namespace()
self.groups: List[Group] = []
self.inspecting = False
def attach(self):
if len(sys.argv) > 1 and sys.argv[1] == "inspect":
self.inspecting = True
self.parser.description = get_logo(True)
make_inspector_parser(self.parser)
return
for group in Group._registry:
instance = group.create(self.parser)
instance.attach()
self.groups.append(instance)
def run(self, parse_args=None):
if self.inspecting:
self._inspector()
return
legacy_version = False
if not parse_args:
# This is to provide backwards compat -v to display version
@@ -86,36 +95,21 @@ Or, a path to a directory to run as a simple HTTP server:
self.args = self.parser.parse_args(args=parse_args)
self._precheck()
app_loader = AppLoader(
self.args.module,
self.args.factory,
self.args.simple,
self.args,
self.args.module, self.args.factory, self.args.simple, self.args
)
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
self._inspector_legacy(app_loader)
return
try:
app = self._get_app(app_loader)
kwargs = self._build_run_kwargs()
except ValueError as e:
error_logger.exception(f"Failed to run app: {e}")
else:
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
else:
for http_version in self.args.http:
app.prepare(**kwargs, version=http_version)
if self.args.inspect or self.args.inspect_raw or self.args.trigger:
action = self.args.trigger or (
"raw" if self.args.inspect_raw else "pretty"
)
inspect(
app.config.INSPECTOR_HOST,
app.config.INSPECTOR_PORT,
action,
)
del os.environ["SANIC_IGNORE_PRODUCTION_WARNING"]
return
for http_version in self.args.http:
app.prepare(**kwargs, version=http_version)
if self.args.single:
serve = Sanic.serve_single
elif self.args.legacy:
@@ -124,6 +118,64 @@ Or, a path to a directory to run as a simple HTTP server:
serve = partial(Sanic.serve, app_loader=app_loader)
serve(app)
def _inspector_legacy(self, app_loader: AppLoader):
host = port = None
module = cast(str, self.args.module)
if ":" in module:
maybe_host, maybe_port = module.rsplit(":", 1)
if maybe_port.isnumeric():
host, port = maybe_host, int(maybe_port)
if not host:
app = self._get_app(app_loader)
host, port = app.config.INSPECTOR_HOST, app.config.INSPECTOR_PORT
action = self.args.trigger or "info"
InspectorClient(
str(host), int(port or 6457), False, self.args.inspect_raw, ""
).do(action)
sys.stdout.write(
f"\n{Colors.BOLD}{Colors.YELLOW}WARNING:{Colors.END} "
"You are using the legacy CLI command that will be removed in "
f"{Colors.RED}v23.3{Colors.END}. See "
"https://sanic.dev/en/guide/release-notes/v22.12.html"
"#deprecations-and-removals or checkout the new "
"style commands:\n\n\t"
f"{Colors.YELLOW}sanic inspect --help{Colors.END}\n"
)
def _inspector(self):
args = sys.argv[2:]
self.args, unknown = self.parser.parse_known_args(args=args)
if unknown:
for arg in unknown:
if arg.startswith("--"):
try:
key, value = arg.split("=")
key = key.lstrip("-")
except ValueError:
value = False if arg.startswith("--no-") else True
key = (
arg.replace("--no-", "")
.lstrip("-")
.replace("-", "_")
)
setattr(self.args, key, value)
kwargs = {**self.args.__dict__}
host = kwargs.pop("host")
port = kwargs.pop("port")
secure = kwargs.pop("secure")
raw = kwargs.pop("raw")
action = kwargs.pop("action") or "info"
api_key = kwargs.pop("api_key")
positional = kwargs.pop("positional", None)
if action == "<custom>" and positional:
action = positional[0]
if len(positional) > 1:
kwargs["args"] = positional[1:]
InspectorClient(host, port, secure, raw, api_key).do(action, **kwargs)
def _precheck(self):
# Custom TLS mismatch handling for better diagnostics
if self.main_process and (

35
sanic/cli/base.py Normal file
View File

@@ -0,0 +1,35 @@
from argparse import (
SUPPRESS,
Action,
ArgumentParser,
RawTextHelpFormatter,
_SubParsersAction,
)
from typing import Any
class SanicArgumentParser(ArgumentParser):
def _check_value(self, action: Action, value: Any) -> None:
if isinstance(action, SanicSubParsersAction):
return
super()._check_value(action, value)
class SanicHelpFormatter(RawTextHelpFormatter):
def add_usage(self, usage, actions, groups, prefix=None):
if not usage:
usage = SUPPRESS
# Add one linebreak, but not two
self.add_text("\x1b[1A")
super().add_usage(usage, actions, groups, prefix)
class SanicSubParsersAction(_SubParsersAction):
def __call__(self, parser, namespace, values, option_string=None):
self._name_parser_map
parser_name = values[0]
if parser_name not in self._name_parser_map:
self._name_parser_map[parser_name] = parser
values = ["<custom>", *values]
super().__call__(parser, namespace, values, option_string)

105
sanic/cli/inspector.py Normal file
View File

@@ -0,0 +1,105 @@
from argparse import ArgumentParser
from sanic.application.logo import get_logo
from sanic.cli.base import SanicHelpFormatter, SanicSubParsersAction
def _add_shared(parser: ArgumentParser) -> None:
parser.add_argument(
"--host",
"-H",
default="localhost",
help="Inspector host address [default 127.0.0.1]",
)
parser.add_argument(
"--port",
"-p",
default=6457,
type=int,
help="Inspector port [default 6457]",
)
parser.add_argument(
"--secure",
"-s",
action="store_true",
help="Whether to access the Inspector via TLS encryption",
)
parser.add_argument("--api-key", "-k", help="Inspector authentication key")
parser.add_argument(
"--raw",
action="store_true",
help="Whether to output the raw response information",
)
class InspectorSubParser(ArgumentParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
_add_shared(self)
if not self.description:
self.description = ""
self.description = get_logo(True) + self.description
def make_inspector_parser(parser: ArgumentParser) -> None:
_add_shared(parser)
subparsers = parser.add_subparsers(
action=SanicSubParsersAction,
dest="action",
description=(
"Run one or none of the below subcommands. Using inspect without "
"a subcommand will fetch general information about the state "
"of the application instance.\n\n"
"Or, you can optionally follow inspect with a subcommand. "
"If you have created a custom "
"Inspector instance, then you can run custom commands. See "
"https://sanic.dev/en/guide/deployment/inspector.html "
"for more details."
),
title=" Subcommands",
parser_class=InspectorSubParser,
)
reloader = subparsers.add_parser(
"reload",
help="Trigger a reload of the server workers",
formatter_class=SanicHelpFormatter,
)
reloader.add_argument(
"--zero-downtime",
action="store_true",
help=(
"Whether to wait for the new process to be online before "
"terminating the old"
),
)
subparsers.add_parser(
"shutdown",
help="Shutdown the application and all processes",
formatter_class=SanicHelpFormatter,
)
scale = subparsers.add_parser(
"scale",
help="Scale the number of workers",
formatter_class=SanicHelpFormatter,
)
scale.add_argument(
"replicas",
type=int,
help="Number of workers requested",
)
custom = subparsers.add_parser(
"<custom>",
help="Run a custom command",
description=(
"keyword arguments:\n When running a custom command, you can "
"add keyword arguments by appending them to your command\n\n"
"\tsanic inspect foo --one=1 --two=2"
),
formatter_class=SanicHelpFormatter,
)
custom.add_argument(
"positional",
nargs="*",
help="Add one or more non-keyword args to your custom command",
)

View File

@@ -0,0 +1,119 @@
from __future__ import annotations
import sys
from http.client import RemoteDisconnected
from textwrap import indent
from typing import Any, Dict, Optional
from urllib.error import URLError
from urllib.request import Request as URequest
from urllib.request import urlopen
from sanic.application.logo import get_logo
from sanic.application.motd import MOTDTTY
from sanic.log import Colors
try: # no cov
from ujson import dumps, loads
except ModuleNotFoundError: # no cov
from json import dumps, loads # type: ignore
class InspectorClient:
def __init__(
self,
host: str,
port: int,
secure: bool,
raw: bool,
api_key: Optional[str],
) -> None:
self.scheme = "https" if secure else "http"
self.host = host
self.port = port
self.raw = raw
self.api_key = api_key
for scheme in ("http", "https"):
full = f"{scheme}://"
if self.host.startswith(full):
self.scheme = scheme
self.host = self.host[len(full) :] # noqa E203
def do(self, action: str, **kwargs: Any) -> None:
if action == "info":
self.info()
return
result = self.request(action, **kwargs).get("result")
if result:
out = (
dumps(result)
if isinstance(result, (list, dict))
else str(result)
)
sys.stdout.write(out + "\n")
def info(self) -> None:
out = sys.stdout.write
response = self.request("", "GET")
if self.raw or not response:
return
data = response["result"]
display = data.pop("info")
extra = display.pop("extra", {})
display["packages"] = ", ".join(display["packages"])
MOTDTTY(get_logo(), self.base_url, display, extra).display(
version=False,
action="Inspecting",
out=out,
)
for name, info in data["workers"].items():
info = "\n".join(
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
for key, value in info.items()
)
out(
"\n"
+ indent(
"\n".join(
[
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
info,
]
),
" ",
)
+ "\n"
)
def request(self, action: str, method: str = "POST", **kwargs: Any) -> Any:
url = f"{self.base_url}/{action}"
params: Dict[str, Any] = {"method": method, "headers": {}}
if kwargs:
params["data"] = dumps(kwargs).encode()
params["headers"]["content-type"] = "application/json"
if self.api_key:
params["headers"]["authorization"] = f"Bearer {self.api_key}"
request = URequest(url, **params)
try:
with urlopen(request) as response: # nosec B310
raw = response.read()
loaded = loads(raw)
if self.raw:
sys.stdout.write(dumps(loaded.get("result")) + "\n")
return {}
return loaded
except (URLError, RemoteDisconnected) as e:
sys.stderr.write(
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}{self.base_url}{Colors.END}\n"
"Either the application is not running, or it did not start "
f"an inspector instance.\n{e}\n"
)
sys.exit(1)
@property
def base_url(self):
return f"{self.scheme}://{self.host}:{self.port}"

View File

@@ -3,10 +3,23 @@ import os
import signal
import sys
from typing import Awaitable
from contextlib import contextmanager
from enum import Enum
from typing import Awaitable, Union
from multidict import CIMultiDict # type: ignore
from sanic.helpers import Default
if sys.version_info < (3, 8): # no cov
StartMethod = Union[Default, str]
else: # no cov
from typing import Literal
StartMethod = Union[
Default, Literal["fork"], Literal["forkserver"], Literal["spawn"]
]
OS_IS_WINDOWS = os.name == "nt"
UVLOOP_INSTALLED = False
@@ -18,6 +31,40 @@ try:
except ImportError:
pass
# Python 3.11 changed the way Enum formatting works for mixed-in types.
if sys.version_info < (3, 11, 0):
class StrEnum(str, Enum):
pass
else:
from enum import StrEnum # type: ignore # noqa
class UpperStrEnum(StrEnum):
def _generate_next_value_(name, start, count, last_values):
return name.upper()
def __eq__(self, value: object) -> bool:
value = str(value).upper()
return super().__eq__(value)
def __hash__(self) -> int:
return hash(self.value)
def __str__(self) -> str:
return self.value
@contextmanager
def use_context(method: StartMethod):
from sanic import Sanic
orig = Sanic.start_method
Sanic.start_method = method
yield
Sanic.start_method = orig
def enable_windows_color_support():
import ctypes

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import sys
from abc import ABCMeta
from inspect import getmembers, isclass, isdatadescriptor
from os import environ
from pathlib import Path
@@ -46,6 +47,9 @@ DEFAULT_CONFIG = {
"INSPECTOR": False,
"INSPECTOR_HOST": "localhost",
"INSPECTOR_PORT": 6457,
"INSPECTOR_TLS_KEY": _default,
"INSPECTOR_TLS_CERT": _default,
"INSPECTOR_API_KEY": "",
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
"KEEP_ALIVE": True,
"LOCAL_CERT_CREATOR": LocalCertCreator.AUTO,
@@ -72,7 +76,7 @@ DEFAULT_CONFIG = {
}
class DescriptorMeta(type):
class DescriptorMeta(ABCMeta):
def __init__(cls, *_):
cls.__setters__ = {name for name, _ in getmembers(cls, cls._is_setter)}
@@ -93,6 +97,9 @@ class Config(dict, metaclass=DescriptorMeta):
INSPECTOR: bool
INSPECTOR_HOST: str
INSPECTOR_PORT: int
INSPECTOR_TLS_KEY: Union[Path, str, Default]
INSPECTOR_TLS_CERT: Union[Path, str, Default]
INSPECTOR_API_KEY: str
KEEP_ALIVE_TIMEOUT: int
KEEP_ALIVE: bool
LOCAL_CERT_CREATOR: Union[str, LocalCertCreator]
@@ -120,7 +127,9 @@ class Config(dict, metaclass=DescriptorMeta):
def __init__(
self,
defaults: Dict[str, Union[str, bool, int, float, None]] = None,
defaults: Optional[
Dict[str, Union[str, bool, int, float, None]]
] = None,
env_prefix: Optional[str] = SANIC_PREFIX,
keep_alive: Optional[bool] = None,
*,
@@ -199,7 +208,7 @@ class Config(dict, metaclass=DescriptorMeta):
@property
def FALLBACK_ERROR_FORMAT(self) -> str:
if self._FALLBACK_ERROR_FORMAT is _default:
if isinstance(self._FALLBACK_ERROR_FORMAT, Default):
return DEFAULT_FORMAT
return self._FALLBACK_ERROR_FORMAT
@@ -207,7 +216,7 @@ class Config(dict, metaclass=DescriptorMeta):
def FALLBACK_ERROR_FORMAT(self, value):
self._check_error_format(value)
if (
self._FALLBACK_ERROR_FORMAT is not _default
not isinstance(self._FALLBACK_ERROR_FORMAT, Default)
and value != self._FALLBACK_ERROR_FORMAT
):
error_logger.warning(

View File

@@ -1,19 +1,9 @@
from enum import Enum, auto
from enum import auto
from sanic.compat import UpperStrEnum
class HTTPMethod(str, Enum):
def _generate_next_value_(name, start, count, last_values):
return name.upper()
def __eq__(self, value: object) -> bool:
value = str(value).upper()
return super().__eq__(value)
def __hash__(self) -> int:
return hash(self.value)
def __str__(self) -> str:
return self.value
class HTTPMethod(UpperStrEnum):
GET = auto()
POST = auto()
@@ -24,9 +14,7 @@ class HTTPMethod(str, Enum):
DELETE = auto()
class LocalCertCreator(str, Enum):
def _generate_next_value_(name, start, count, last_values):
return name.upper()
class LocalCertCreator(UpperStrEnum):
AUTO = auto()
TRUSTME = auto()

View File

@@ -8,6 +8,10 @@ class RequestCancelled(CancelledError):
quiet = True
class ServerKilled(Exception):
...
class SanicException(Exception):
message: str = ""

View File

@@ -36,14 +36,6 @@ class ErrorHandler:
self.debug = False
self.base = base
@classmethod
def finalize(cls, *args, **kwargs):
deprecation(
"ErrorHandler.finalize is deprecated and no longer needed. "
"Please remove update your code to remove it. ",
22.12,
)
def _full_lookup(self, exception, route_name: Optional[str] = None):
return self.lookup(exception, route_name)

View File

@@ -16,6 +16,7 @@ from sanic.exceptions import (
PayloadTooLarge,
RequestCancelled,
ServerError,
ServiceUnavailable,
)
from sanic.headers import format_http1_response
from sanic.helpers import has_message_body
@@ -70,7 +71,6 @@ class Http(Stream, metaclass=TouchUpMeta):
"request_body",
"request_bytes",
"request_bytes_left",
"request_max_size",
"response",
"response_func",
"response_size",
@@ -428,8 +428,11 @@ class Http(Stream, metaclass=TouchUpMeta):
if self.request is None:
self.create_empty_request()
request_middleware = not isinstance(exception, ServiceUnavailable)
try:
await app.handle_exception(self.request, exception)
await app.handle_exception(
self.request, exception, request_middleware
)
except Exception as e:
await app.handle_exception(self.request, e, False)

View File

@@ -19,7 +19,7 @@ class Stream:
request_max_size: Union[int, float]
__touchup__: Tuple[str, ...] = tuple()
__slots__ = ()
__slots__ = ("request_max_size",)
def respond(
self, response: BaseHTTPResponse

View File

@@ -24,13 +24,15 @@ def create_context(
certfile: Optional[str] = None,
keyfile: Optional[str] = None,
password: Optional[str] = None,
purpose: ssl.Purpose = ssl.Purpose.CLIENT_AUTH,
) -> ssl.SSLContext:
"""Create a context with secure crypto and HTTP/1.1 in protocols."""
context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
context = ssl.create_default_context(purpose=purpose)
context.minimum_version = ssl.TLSVersion.TLSv1_2
context.set_ciphers(":".join(CIPHERS_TLS12))
context.set_alpn_protocols(["http/1.1"])
context.sni_callback = server_name_callback
if purpose is ssl.Purpose.CLIENT_AUTH:
context.sni_callback = server_name_callback
if certfile and keyfile:
context.load_cert_chain(certfile, keyfile, password)
return context

View File

@@ -72,7 +72,8 @@ def get_ssl_context(
"without passing a TLS certificate. If you are developing "
"locally, please enable DEVELOPMENT mode and Sanic will "
"generate a localhost TLS certificate. For more information "
"please see: ___."
"please see: https://sanic.dev/en/guide/deployment/development."
"html#automatic-tls-certificate."
)
creator = CertCreator.select(
@@ -151,7 +152,8 @@ class CertCreator(ABC):
raise SanicException(
"Sanic could not find package to create a TLS certificate. "
"You must have either mkcert or trustme installed. See "
"_____ for more details."
"https://sanic.dev/en/guide/deployment/development.html"
"#automatic-tls-certificate for more details."
)
return creator
@@ -203,7 +205,8 @@ class MkcertCreator(CertCreator):
"to proceed. Installation instructions can be found here: "
"https://github.com/FiloSottile/mkcert.\n"
"Find out more information about your options here: "
"_____"
"https://sanic.dev/en/guide/deployment/development.html#"
"automatic-tls-certificate"
) from e
def generate_cert(self, localhost: str) -> ssl.SSLContext:
@@ -260,7 +263,8 @@ class TrustmeCreator(CertCreator):
"to proceed. Installation instructions can be found here: "
"https://github.com/python-trio/trustme.\n"
"Find out more information about your options here: "
"_____"
"https://sanic.dev/en/guide/deployment/development.html#"
"automatic-tls-certificate"
)
def generate_cert(self, localhost: str) -> ssl.SSLContext:

View File

@@ -2,12 +2,23 @@ import logging
import sys
from enum import Enum
from typing import Any, Dict
from typing import TYPE_CHECKING, Any, Dict
from warnings import warn
from sanic.compat import is_atty
# Python 3.11 changed the way Enum formatting works for mixed-in types.
if sys.version_info < (3, 11, 0):
class StrEnum(str, Enum):
pass
else:
if not TYPE_CHECKING:
from enum import StrEnum
LOGGING_CONFIG_DEFAULTS: Dict[str, Any] = dict( # no cov
version=1,
disable_existing_loggers=False,
@@ -68,7 +79,7 @@ Defult logging configuration
"""
class Colors(str, Enum): # no cov
class Colors(StrEnum): # no cov
END = "\033[0m"
BOLD = "\033[1m"
BLUE = "\033[34m"

View File

@@ -32,6 +32,9 @@ class Middleware:
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def __hash__(self) -> int:
return hash(self.func)
def __repr__(self) -> str:
return (
f"{self.__class__.__name__}("

View File

@@ -218,6 +218,7 @@ class RouteMixin(metaclass=SanicMeta):
stream: bool = False,
version_prefix: str = "/v",
error_format: Optional[str] = None,
unquote: bool = False,
**ctx_kwargs: Any,
) -> RouteHandler:
"""A helper method to register class instance or
@@ -264,6 +265,7 @@ class RouteMixin(metaclass=SanicMeta):
name=name,
version_prefix=version_prefix,
error_format=error_format,
unquote=unquote,
**ctx_kwargs,
)(handler)
return handler

View File

@@ -20,7 +20,7 @@ class SignalMixin(metaclass=SanicMeta):
event: Union[str, Enum],
*,
apply: bool = True,
condition: Dict[str, Any] = None,
condition: Optional[Dict[str, Any]] = None,
exclusive: bool = True,
) -> Callable[[SignalHandler], SignalHandler]:
"""
@@ -64,7 +64,7 @@ class SignalMixin(metaclass=SanicMeta):
self,
handler: Optional[Callable[..., Any]],
event: str,
condition: Dict[str, Any] = None,
condition: Optional[Dict[str, Any]] = None,
exclusive: bool = True,
):
if not handler:

View File

@@ -19,7 +19,7 @@ from importlib import import_module
from multiprocessing import Manager, Pipe, get_context
from multiprocessing.context import BaseContext
from pathlib import Path
from socket import socket
from socket import SHUT_RDWR, socket
from ssl import SSLContext
from typing import (
TYPE_CHECKING,
@@ -27,6 +27,7 @@ from typing import (
Callable,
Dict,
List,
Mapping,
Optional,
Set,
Tuple,
@@ -40,8 +41,9 @@ from sanic.application.logo import get_logo
from sanic.application.motd import MOTD
from sanic.application.state import ApplicationServerInfo, Mode, ServerStage
from sanic.base.meta import SanicMeta
from sanic.compat import OS_IS_WINDOWS, is_atty
from sanic.helpers import _default
from sanic.compat import OS_IS_WINDOWS, StartMethod, is_atty
from sanic.exceptions import ServerKilled
from sanic.helpers import Default, _default
from sanic.http.constants import HTTP
from sanic.http.tls import get_ssl_context, process_to_context
from sanic.http.tls.context import SanicSSLContext
@@ -57,7 +59,6 @@ from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.protocols.websocket_protocol import WebSocketProtocol
from sanic.server.runners import serve, serve_multiple, serve_single
from sanic.server.socket import configure_socket, remove_unix_socket
from sanic.worker.inspector import Inspector
from sanic.worker.loader import AppLoader
from sanic.worker.manager import WorkerManager
from sanic.worker.multiplexer import WorkerMultiplexer
@@ -87,11 +88,13 @@ class StartupMixin(metaclass=SanicMeta):
state: ApplicationState
websocket_enabled: bool
multiplexer: WorkerMultiplexer
start_method: StartMethod = _default
def setup_loop(self):
if not self.asgi:
if self.config.USE_UVLOOP is True or (
self.config.USE_UVLOOP is _default and not OS_IS_WINDOWS
isinstance(self.config.USE_UVLOOP, Default)
and not OS_IS_WINDOWS
):
try_use_uvloop()
elif OS_IS_WINDOWS:
@@ -123,7 +126,7 @@ class StartupMixin(metaclass=SanicMeta):
register_sys_signals: bool = True,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
loop: AbstractEventLoop = None,
loop: Optional[AbstractEventLoop] = None,
reload_dir: Optional[Union[List[str], str]] = None,
noisy_exceptions: Optional[bool] = None,
motd: bool = True,
@@ -222,7 +225,7 @@ class StartupMixin(metaclass=SanicMeta):
register_sys_signals: bool = True,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
loop: AbstractEventLoop = None,
loop: Optional[AbstractEventLoop] = None,
reload_dir: Optional[Union[List[str], str]] = None,
noisy_exceptions: Optional[bool] = None,
motd: bool = True,
@@ -352,12 +355,12 @@ class StartupMixin(metaclass=SanicMeta):
debug: bool = False,
ssl: Union[None, SSLContext, dict, str, list, tuple] = None,
sock: Optional[socket] = None,
protocol: Type[Protocol] = None,
protocol: Optional[Type[Protocol]] = None,
backlog: int = 100,
access_log: Optional[bool] = None,
unix: Optional[str] = None,
return_asyncio_server: bool = False,
asyncio_server_kwargs: Dict[str, Any] = None,
asyncio_server_kwargs: Optional[Dict[str, Any]] = None,
noisy_exceptions: Optional[bool] = None,
) -> Optional[AsyncioServer]:
"""
@@ -431,7 +434,7 @@ class StartupMixin(metaclass=SanicMeta):
run_async=return_asyncio_server,
)
if self.config.USE_UVLOOP is not _default:
if not isinstance(self.config.USE_UVLOOP, Default):
error_logger.warning(
"You are trying to change the uvloop configuration, but "
"this is only effective when using the run(...) method. "
@@ -478,7 +481,7 @@ class StartupMixin(metaclass=SanicMeta):
sock: Optional[socket] = None,
unix: Optional[str] = None,
workers: int = 1,
loop: AbstractEventLoop = None,
loop: Optional[AbstractEventLoop] = None,
protocol: Type[Protocol] = HttpProtocol,
backlog: int = 100,
register_sys_signals: bool = True,
@@ -690,12 +693,17 @@ class StartupMixin(metaclass=SanicMeta):
return any(app.state.auto_reload for app in cls._app_registry.values())
@classmethod
def _get_context(cls) -> BaseContext:
method = (
"spawn"
if "linux" not in sys.platform or cls.should_auto_reload()
else "fork"
def _get_startup_method(cls) -> str:
return (
cls.start_method
if not isinstance(cls.start_method, Default)
else "spawn"
)
@classmethod
def _get_context(cls) -> BaseContext:
method = cls._get_startup_method()
logger.debug("Creating multiprocessing context using '%s'", method)
return get_context(method)
@classmethod
@@ -733,13 +741,13 @@ class StartupMixin(metaclass=SanicMeta):
except IndexError:
raise RuntimeError(
f"No server information found for {primary.name}. Perhaps you "
"need to run app.prepare(...)?\n"
"See ____ for more information."
"need to run app.prepare(...)?"
) from None
socks = []
sync_manager = Manager()
setup_ext(primary)
exit_code = 0
try:
primary_server_info.settings.pop("main_start", None)
primary_server_info.settings.pop("main_stop", None)
@@ -761,7 +769,7 @@ class StartupMixin(metaclass=SanicMeta):
]
primary_server_info.settings["run_multiple"] = True
monitor_sub, monitor_pub = Pipe(True)
worker_state: Dict[str, Any] = sync_manager.dict()
worker_state: Mapping[str, Any] = sync_manager.dict()
kwargs: Dict[str, Any] = {
**primary_server_info.settings,
"monitor_publisher": monitor_pub,
@@ -817,7 +825,7 @@ class StartupMixin(metaclass=SanicMeta):
reload_dirs: Set[Path] = primary.state.reload_dirs.union(
*(app.state.reload_dirs for app in apps)
)
reloader = Reloader(monitor_pub, 1.0, reload_dirs, app_loader)
reloader = Reloader(monitor_pub, 0, reload_dirs, app_loader)
manager.manage("Reloader", reloader, {}, transient=False)
inspector = None
@@ -833,12 +841,15 @@ class StartupMixin(metaclass=SanicMeta):
"packages": [sanic_version, *packages],
"extra": extra,
}
inspector = Inspector(
inspector = primary.inspector_class(
monitor_pub,
app_info,
worker_state,
primary.config.INSPECTOR_HOST,
primary.config.INSPECTOR_PORT,
primary.config.INSPECTOR_API_KEY,
primary.config.INSPECTOR_TLS_KEY,
primary.config.INSPECTOR_TLS_CERT,
)
manager.manage("Inspector", inspector, {}, transient=False)
@@ -849,6 +860,8 @@ class StartupMixin(metaclass=SanicMeta):
trigger_events(ready, loop, primary)
manager.run()
except ServerKilled:
exit_code = 1
except BaseException:
kwargs = primary_server_info.settings
error_logger.exception(
@@ -864,6 +877,7 @@ class StartupMixin(metaclass=SanicMeta):
sync_manager.shutdown()
for sock in socks:
sock.shutdown(SHUT_RDWR)
sock.close()
socks = []
trigger_events(main_stop, loop, primary)
@@ -873,6 +887,8 @@ class StartupMixin(metaclass=SanicMeta):
unix = kwargs.get("unix")
if unix:
remove_unix_socket(unix)
if exit_code:
os._exit(exit_code)
@classmethod
def serve_single(cls, primary: Optional[Sanic] = None) -> None:

View File

@@ -27,6 +27,7 @@ if TYPE_CHECKING:
from sanic.app import Sanic
import email.utils
import unicodedata
import uuid
from collections import defaultdict
@@ -104,6 +105,7 @@ class Request:
"_protocol",
"_remote_addr",
"_request_middleware_started",
"_response_middleware_started",
"_scheme",
"_socket",
"_stream_id",
@@ -179,6 +181,7 @@ class Request:
Tuple[bool, bool, str, str], List[Tuple[str, str]]
] = defaultdict(list)
self._request_middleware_started = False
self._response_middleware_started = False
self.responded: bool = False
self.route: Optional[Route] = None
self.stream: Optional[Stream] = None
@@ -337,7 +340,8 @@ class Request:
middleware = (
self.route and self.route.extra.response_middleware
) or self.app.response_middleware
if middleware:
if middleware and not self._response_middleware_started:
self._response_middleware_started = True
response = await self.app._run_response_middleware(
self, response, middleware
)
@@ -1081,6 +1085,16 @@ def parse_multipart_form(body, boundary):
form_parameters["filename*"]
)
file_name = unquote(value, encoding=encoding)
# Normalize to NFC (Apple MacOS/iOS send NFD)
# Notes:
# - No effect for Windows, Linux or Android clients which
# already send NFC
# - Python open() is tricky (creates files in NFC no matter
# which form you use)
if file_name is not None:
file_name = unicodedata.normalize("NFC", file_name)
elif form_header_field == "content-type":
content_type = form_header_value
content_charset = form_parameters.get("charset", "utf-8")

View File

@@ -0,0 +1,36 @@
from .convenience import (
empty,
file,
file_stream,
html,
json,
raw,
redirect,
text,
validate_file,
)
from .types import (
BaseHTTPResponse,
HTTPResponse,
JSONResponse,
ResponseStream,
json_dumps,
)
__all__ = (
"BaseHTTPResponse",
"HTTPResponse",
"JSONResponse",
"ResponseStream",
"empty",
"json",
"text",
"raw",
"html",
"validate_file",
"file",
"redirect",
"file_stream",
"json_dumps",
)

View File

@@ -2,212 +2,20 @@ from __future__ import annotations
from datetime import datetime, timezone
from email.utils import formatdate, parsedate_to_datetime
from functools import partial
from mimetypes import guess_type
from os import path
from pathlib import PurePath
from time import time
from typing import (
TYPE_CHECKING,
Any,
AnyStr,
Callable,
Coroutine,
Dict,
Iterator,
Optional,
Tuple,
TypeVar,
Union,
)
from typing import Any, AnyStr, Callable, Dict, Optional, Union
from urllib.parse import quote_plus
from sanic.compat import Header, open_async, stat_async
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
from sanic.cookies import CookieJar
from sanic.exceptions import SanicException, ServerError
from sanic.helpers import (
Default,
_default,
has_message_body,
remove_entity_headers,
)
from sanic.http import Http
from sanic.helpers import Default, _default
from sanic.log import logger
from sanic.models.protocol_types import HTMLProtocol, Range
if TYPE_CHECKING:
from sanic.asgi import ASGIApp
from sanic.http.http3 import HTTPReceiver
from sanic.request import Request
else:
Request = TypeVar("Request")
try:
from ujson import dumps as json_dumps
except ImportError:
# This is done in order to ensure that the JSON response is
# kept consistent across both ujson and inbuilt json usage.
from json import dumps
json_dumps = partial(dumps, separators=(",", ":"))
class BaseHTTPResponse:
"""
The base class for all HTTP Responses
"""
__slots__ = (
"asgi",
"body",
"content_type",
"stream",
"status",
"headers",
"_cookies",
)
_dumps = json_dumps
def __init__(self):
self.asgi: bool = False
self.body: Optional[bytes] = None
self.content_type: Optional[str] = None
self.stream: Optional[Union[Http, ASGIApp, HTTPReceiver]] = None
self.status: int = None
self.headers = Header({})
self._cookies: Optional[CookieJar] = None
def __repr__(self):
class_name = self.__class__.__name__
return f"<{class_name}: {self.status} {self.content_type}>"
def _encode_body(self, data: Optional[AnyStr]):
if data is None:
return b""
return (
data.encode() if hasattr(data, "encode") else data # type: ignore
)
@property
def cookies(self) -> CookieJar:
"""
The response cookies. Cookies should be set and written as follows:
.. code-block:: python
response.cookies["test"] = "It worked!"
response.cookies["test"]["domain"] = ".yummy-yummy-cookie.com"
response.cookies["test"]["httponly"] = True
`See user guide re: cookies
<https://sanicframework.org/guide/basics/cookies.html>`__
:return: the cookie jar
:rtype: CookieJar
"""
if self._cookies is None:
self._cookies = CookieJar(self.headers)
return self._cookies
@property
def processed_headers(self) -> Iterator[Tuple[bytes, bytes]]:
"""
Obtain a list of header tuples encoded in bytes for sending.
Add and remove headers based on status and content_type.
:return: response headers
:rtype: Tuple[Tuple[bytes, bytes], ...]
"""
# TODO: Make a blacklist set of header names and then filter with that
if self.status in (304, 412): # Not Modified, Precondition Failed
self.headers = remove_entity_headers(self.headers)
if has_message_body(self.status):
self.headers.setdefault("content-type", self.content_type)
# Encode headers into bytes
return (
(name.encode("ascii"), f"{value}".encode(errors="surrogateescape"))
for name, value in self.headers.items()
)
async def send(
self,
data: Optional[AnyStr] = None,
end_stream: Optional[bool] = None,
) -> None:
"""
Send any pending response headers and the given data as body.
:param data: str or bytes to be written
:param end_stream: whether to close the stream after this block
"""
if data is None and end_stream is None:
end_stream = True
if self.stream is None:
raise SanicException(
"No stream is connected to the response object instance."
)
if self.stream.send is None:
if end_stream and not data:
return
raise ServerError(
"Response stream was ended, no more response data is "
"allowed to be sent."
)
data = (
data.encode() # type: ignore
if hasattr(data, "encode")
else data or b""
)
await self.stream.send(
data, # type: ignore
end_stream=end_stream or False,
)
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ()
def __init__(
self,
body: Optional[AnyStr] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
from .types import HTTPResponse, JSONResponse, ResponseStream
def empty(
@@ -229,7 +37,7 @@ def json(
content_type: str = "application/json",
dumps: Optional[Callable[..., str]] = None,
**kwargs: Any,
) -> HTTPResponse:
) -> JSONResponse:
"""
Returns response object with body in json format.
@@ -238,13 +46,14 @@ def json(
:param headers: Custom Headers.
:param kwargs: Remaining arguments that are passed to the json encoder.
"""
if not dumps:
dumps = BaseHTTPResponse._dumps
return HTTPResponse(
dumps(body, **kwargs),
headers=headers,
return JSONResponse(
body,
status=status,
headers=headers,
content_type=content_type,
dumps=dumps,
**kwargs,
)
@@ -465,80 +274,6 @@ def redirect(
)
class ResponseStream:
"""
ResponseStream is a compat layer to bridge the gap after the deprecation
of StreamingHTTPResponse. It will be removed when:
- file_stream is moved to new style streaming
- file and file_stream are combined into a single API
"""
__slots__ = (
"_cookies",
"content_type",
"headers",
"request",
"response",
"status",
"streaming_fn",
)
def __init__(
self,
streaming_fn: Callable[
[Union[BaseHTTPResponse, ResponseStream]],
Coroutine[Any, Any, None],
],
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
self.streaming_fn = streaming_fn
self.status = status
self.headers = headers or Header()
self.content_type = content_type
self.request: Optional[Request] = None
self._cookies: Optional[CookieJar] = None
async def write(self, message: str):
await self.response.send(message)
async def stream(self) -> HTTPResponse:
if not self.request:
raise ServerError("Attempted response to unknown request")
self.response = await self.request.respond(
headers=self.headers,
status=self.status,
content_type=self.content_type,
)
await self.streaming_fn(self)
return self.response
async def eof(self) -> None:
await self.response.eof()
@property
def cookies(self) -> CookieJar:
if self._cookies is None:
self._cookies = CookieJar(self.headers)
return self._cookies
@property
def processed_headers(self):
return self.response.processed_headers
@property
def body(self):
return self.response.body
def __call__(self, request: Request) -> ResponseStream:
self.request = request
return self
def __await__(self):
return self.stream().__await__()
async def file_stream(
location: Union[str, PurePath],
status: int = 200,

453
sanic/response/types.py Normal file
View File

@@ -0,0 +1,453 @@
from __future__ import annotations
from functools import partial
from typing import (
TYPE_CHECKING,
Any,
AnyStr,
Callable,
Coroutine,
Dict,
Iterator,
Optional,
Tuple,
TypeVar,
Union,
)
from sanic.compat import Header
from sanic.cookies import CookieJar
from sanic.exceptions import SanicException, ServerError
from sanic.helpers import (
Default,
_default,
has_message_body,
remove_entity_headers,
)
from sanic.http import Http
if TYPE_CHECKING:
from sanic.asgi import ASGIApp
from sanic.http.http3 import HTTPReceiver
from sanic.request import Request
else:
Request = TypeVar("Request")
try:
from ujson import dumps as json_dumps
except ImportError:
# This is done in order to ensure that the JSON response is
# kept consistent across both ujson and inbuilt json usage.
from json import dumps
json_dumps = partial(dumps, separators=(",", ":"))
class BaseHTTPResponse:
"""
The base class for all HTTP Responses
"""
__slots__ = (
"asgi",
"body",
"content_type",
"stream",
"status",
"headers",
"_cookies",
)
_dumps = json_dumps
def __init__(self):
self.asgi: bool = False
self.body: Optional[bytes] = None
self.content_type: Optional[str] = None
self.stream: Optional[Union[Http, ASGIApp, HTTPReceiver]] = None
self.status: int = None
self.headers = Header({})
self._cookies: Optional[CookieJar] = None
def __repr__(self):
class_name = self.__class__.__name__
return f"<{class_name}: {self.status} {self.content_type}>"
def _encode_body(self, data: Optional[AnyStr]):
if data is None:
return b""
return (
data.encode() if hasattr(data, "encode") else data # type: ignore
)
@property
def cookies(self) -> CookieJar:
"""
The response cookies. Cookies should be set and written as follows:
.. code-block:: python
response.cookies["test"] = "It worked!"
response.cookies["test"]["domain"] = ".yummy-yummy-cookie.com"
response.cookies["test"]["httponly"] = True
`See user guide re: cookies
<https://sanic.dev/en/guide/basics/cookies.html>`
:return: the cookie jar
:rtype: CookieJar
"""
if self._cookies is None:
self._cookies = CookieJar(self.headers)
return self._cookies
@property
def processed_headers(self) -> Iterator[Tuple[bytes, bytes]]:
"""
Obtain a list of header tuples encoded in bytes for sending.
Add and remove headers based on status and content_type.
:return: response headers
:rtype: Tuple[Tuple[bytes, bytes], ...]
"""
# TODO: Make a blacklist set of header names and then filter with that
if self.status in (304, 412): # Not Modified, Precondition Failed
self.headers = remove_entity_headers(self.headers)
if has_message_body(self.status):
self.headers.setdefault("content-type", self.content_type)
# Encode headers into bytes
return (
(name.encode("ascii"), f"{value}".encode(errors="surrogateescape"))
for name, value in self.headers.items()
)
async def send(
self,
data: Optional[AnyStr] = None,
end_stream: Optional[bool] = None,
) -> None:
"""
Send any pending response headers and the given data as body.
:param data: str or bytes to be written
:param end_stream: whether to close the stream after this block
"""
if data is None and end_stream is None:
end_stream = True
if self.stream is None:
raise SanicException(
"No stream is connected to the response object instance."
)
if self.stream.send is None:
if end_stream and not data:
return
raise ServerError(
"Response stream was ended, no more response data is "
"allowed to be sent."
)
data = (
data.encode() # type: ignore
if hasattr(data, "encode")
else data or b""
)
await self.stream.send(
data, # type: ignore
end_stream=end_stream or False,
)
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ()
def __init__(
self,
body: Optional[Any] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
class JSONResponse(HTTPResponse):
"""
HTTP response to be sent back to the client, when the response
is of json type. Offers several utilities to manipulate common
json data types.
:param body: the body content to be returned
:type body: Optional[Any]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
:param dumps: json.dumps function to use
:type dumps: Optional[Callable]
"""
__slots__ = (
"_body",
"_body_manually_set",
"_initialized",
"_raw_body",
"_use_dumps",
"_use_dumps_kwargs",
)
def __init__(
self,
body: Optional[Any] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
dumps: Optional[Callable[..., str]] = None,
**kwargs: Any,
):
self._initialized = False
self._body_manually_set = False
self._use_dumps = dumps or BaseHTTPResponse._dumps
self._use_dumps_kwargs = kwargs
self._raw_body = body
super().__init__(
self._encode_body(self._use_dumps(body, **self._use_dumps_kwargs)),
headers=headers,
status=status,
content_type=content_type,
)
self._initialized = True
def _check_body_not_manually_set(self):
if self._body_manually_set:
raise SanicException(
"Cannot use raw_body after body has been manually set."
)
@property
def raw_body(self) -> Optional[Any]:
"""Returns the raw body, as long as body has not been manually
set previously.
NOTE: This object should not be mutated, as it will not be
reflected in the response body. If you need to mutate the
response body, consider using one of the provided methods in
this class or alternatively call set_body() with the mutated
object afterwards or set the raw_body property to it.
"""
self._check_body_not_manually_set()
return self._raw_body
@raw_body.setter
def raw_body(self, value: Any):
self._body_manually_set = False
self._body = self._encode_body(
self._use_dumps(value, **self._use_dumps_kwargs)
)
self._raw_body = value
@property # type: ignore
def body(self) -> Optional[bytes]: # type: ignore
return self._body
@body.setter
def body(self, value: Optional[bytes]):
self._body = value
if not self._initialized:
return
self._body_manually_set = True
def set_body(
self,
body: Any,
dumps: Optional[Callable[..., str]] = None,
**dumps_kwargs: Any,
) -> None:
"""Sets a new response body using the given dumps function
and kwargs, or falling back to the defaults given when
creating the object if none are specified.
"""
self._body_manually_set = False
self._raw_body = body
use_dumps = dumps or self._use_dumps
use_dumps_kwargs = dumps_kwargs if dumps else self._use_dumps_kwargs
self._body = self._encode_body(use_dumps(body, **use_dumps_kwargs))
def append(self, value: Any) -> None:
"""Appends a value to the response raw_body, ensuring that
body is kept up to date. This can only be used if raw_body
is a list.
"""
self._check_body_not_manually_set()
if not isinstance(self._raw_body, list):
raise SanicException("Cannot append to a non-list object.")
self._raw_body.append(value)
self.raw_body = self._raw_body
def extend(self, value: Any) -> None:
"""Extends the response's raw_body with the given values, ensuring
that body is kept up to date. This can only be used if raw_body is
a list.
"""
self._check_body_not_manually_set()
if not isinstance(self._raw_body, list):
raise SanicException("Cannot extend a non-list object.")
self._raw_body.extend(value)
self.raw_body = self._raw_body
def update(self, *args, **kwargs) -> None:
"""Updates the response's raw_body with the given values, ensuring
that body is kept up to date. This can only be used if raw_body is
a dict.
"""
self._check_body_not_manually_set()
if not isinstance(self._raw_body, dict):
raise SanicException("Cannot update a non-dict object.")
self._raw_body.update(*args, **kwargs)
self.raw_body = self._raw_body
def pop(self, key: Any, default: Any = _default) -> Any:
"""Pops a key from the response's raw_body, ensuring that body is
kept up to date. This can only be used if raw_body is a dict or a
list.
"""
self._check_body_not_manually_set()
if not isinstance(self._raw_body, (list, dict)):
raise SanicException(
"Cannot pop from a non-list and non-dict object."
)
if isinstance(default, Default):
value = self._raw_body.pop(key)
elif isinstance(self._raw_body, list):
raise TypeError("pop doesn't accept a default argument for lists")
else:
value = self._raw_body.pop(key, default)
self.raw_body = self._raw_body
return value
class ResponseStream:
"""
ResponseStream is a compat layer to bridge the gap after the deprecation
of StreamingHTTPResponse. It will be removed when:
- file_stream is moved to new style streaming
- file and file_stream are combined into a single API
"""
__slots__ = (
"_cookies",
"content_type",
"headers",
"request",
"response",
"status",
"streaming_fn",
)
def __init__(
self,
streaming_fn: Callable[
[Union[BaseHTTPResponse, ResponseStream]],
Coroutine[Any, Any, None],
],
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
self.streaming_fn = streaming_fn
self.status = status
self.headers = headers or Header()
self.content_type = content_type
self.request: Optional[Request] = None
self._cookies: Optional[CookieJar] = None
async def write(self, message: str):
await self.response.send(message)
async def stream(self) -> HTTPResponse:
if not self.request:
raise ServerError("Attempted response to unknown request")
self.response = await self.request.respond(
headers=self.headers,
status=self.status,
content_type=self.content_type,
)
await self.streaming_fn(self)
return self.response
async def eof(self) -> None:
await self.response.eof()
@property
def cookies(self) -> CookieJar:
if self._cookies is None:
self._cookies = CookieJar(self.headers)
return self._cookies
@property
def processed_headers(self):
return self.response.processed_headers
@property
def body(self):
return self.response.body
def __call__(self, request: Request) -> ResponseStream:
self.request = request
return self
def __await__(self):
return self.stream().__await__()

View File

@@ -1,11 +1,11 @@
import asyncio
import sys
from distutils.util import strtobool
from os import getenv
from sanic.compat import OS_IS_WINDOWS
from sanic.log import error_logger
from sanic.utils import str_to_bool
def try_use_uvloop() -> None:
@@ -35,7 +35,7 @@ def try_use_uvloop() -> None:
)
return
uvloop_install_removed = strtobool(getenv("SANIC_NO_UVLOOP", "no"))
uvloop_install_removed = str_to_bool(getenv("SANIC_NO_UVLOOP", "no"))
if uvloop_install_removed:
error_logger.info(
"You are requesting to run Sanic using uvloop, but the "

View File

@@ -1,7 +1,13 @@
from typing import TYPE_CHECKING, Optional, Sequence, cast
from websockets.connection import CLOSED, CLOSING, OPEN
from websockets.server import ServerConnection
try: # websockets < 11.0
from websockets.connection import State
from websockets.server import ServerConnection as ServerProtocol
except ImportError: # websockets >= 11.0
from websockets.protocol import State # type: ignore
from websockets.server import ServerProtocol # type: ignore
from websockets.typing import Subprotocol
from sanic.exceptions import ServerError
@@ -15,6 +21,11 @@ if TYPE_CHECKING:
from websockets import http11
OPEN = State.OPEN
CLOSING = State.CLOSING
CLOSED = State.CLOSED
class WebSocketProtocol(HttpProtocol):
__slots__ = (
"websocket",
@@ -74,7 +85,7 @@ class WebSocketProtocol(HttpProtocol):
# Called by Sanic Server when shutting down
# If we've upgraded to websocket, shut it down
if self.websocket is not None:
if self.websocket.connection.state in (CLOSING, CLOSED):
if self.websocket.ws_proto.state in (CLOSING, CLOSED):
return True
elif self.websocket.loop is not None:
self.websocket.loop.create_task(self.websocket.close(1001))
@@ -90,7 +101,7 @@ class WebSocketProtocol(HttpProtocol):
try:
if subprotocols is not None:
# subprotocols can be a set or frozenset,
# but ServerConnection needs a list
# but ServerProtocol needs a list
subprotocols = cast(
Optional[Sequence[Subprotocol]],
list(
@@ -100,13 +111,13 @@ class WebSocketProtocol(HttpProtocol):
]
),
)
ws_conn = ServerConnection(
ws_proto = ServerProtocol(
max_size=self.websocket_max_size,
subprotocols=subprotocols,
state=OPEN,
logger=logger,
)
resp: "http11.Response" = ws_conn.accept(request)
resp: "http11.Response" = ws_proto.accept(request)
except Exception:
msg = (
"Failed to open a WebSocket connection.\n"
@@ -129,7 +140,7 @@ class WebSocketProtocol(HttpProtocol):
else:
raise ServerError(resp.body, resp.status_code)
self.websocket = WebsocketImplProtocol(
ws_conn,
ws_proto,
ping_interval=self.websocket_ping_interval,
ping_timeout=self.websocket_ping_timeout,
close_timeout=self.websocket_timeout,

View File

@@ -200,7 +200,7 @@ def _serve_http_1(
asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {}
)
if OS_IS_WINDOWS:
if OS_IS_WINDOWS and sock:
pid = os.getpid()
sock = sock.share(pid)
sock = socket.fromshare(sock)
@@ -229,6 +229,7 @@ def _serve_http_1(
loop.run_until_complete(app._startup())
loop.run_until_complete(app._server_event("init", "before"))
app.ack()
try:
http_server = loop.run_until_complete(server_coroutine)
@@ -306,6 +307,7 @@ def _serve_http_3(
server = AsyncioServer(app, loop, coro, [])
loop.run_until_complete(server.startup())
loop.run_until_complete(server.before_start())
app.ack()
loop.run_until_complete(server)
_setup_system_signals(app, run_multiple, register_sys_signals, loop)
loop.run_until_complete(server.after_start())

View File

@@ -113,13 +113,16 @@ def configure_socket(
backlog=backlog,
)
except OSError as e: # no cov
raise ServerError(
f"Sanic server could not start: {e}.\n"
error = ServerError(
f"Sanic server could not start: {e}.\n\n"
"This may have happened if you are running Sanic in the "
"global scope and not inside of a "
'`if __name__ == "__main__"` block. See more information: '
"____."
) from e
'`if __name__ == "__main__"` block.\n\nSee more information: '
"https://sanic.dev/en/guide/deployment/manager.html#"
"how-sanic-server-starts-processes\n"
)
error.quiet = True
raise error
sock.set_inheritable(True)
server_settings["sock"] = sock
server_settings["host"] = None

View File

@@ -9,8 +9,10 @@ from typing import (
Union,
)
from sanic.exceptions import InvalidUsage
ASIMessage = MutableMapping[str, Any]
ASGIMessage = MutableMapping[str, Any]
class WebSocketConnection:
@@ -25,8 +27,8 @@ class WebSocketConnection:
def __init__(
self,
send: Callable[[ASIMessage], Awaitable[None]],
receive: Callable[[], Awaitable[ASIMessage]],
send: Callable[[ASGIMessage], Awaitable[None]],
receive: Callable[[], Awaitable[ASGIMessage]],
subprotocols: Optional[List[str]] = None,
) -> None:
self._send = send
@@ -47,7 +49,13 @@ class WebSocketConnection:
message = await self._receive()
if message["type"] == "websocket.receive":
return message["text"]
try:
return message["text"]
except KeyError:
try:
return message["bytes"].decode()
except KeyError:
raise InvalidUsage("Bad ASGI message received")
elif message["type"] == "websocket.disconnect":
pass

View File

@@ -12,25 +12,37 @@ from typing import (
Union,
)
from websockets.connection import CLOSED, CLOSING, OPEN, Event
from websockets.exceptions import (
ConnectionClosed,
ConnectionClosedError,
ConnectionClosedOK,
)
from websockets.frames import Frame, Opcode
from websockets.server import ServerConnection
try: # websockets < 11.0
from websockets.connection import Event, State
from websockets.server import ServerConnection as ServerProtocol
except ImportError: # websockets >= 11.0
from websockets.protocol import Event, State # type: ignore
from websockets.server import ServerProtocol # type: ignore
from websockets.typing import Data
from sanic.log import error_logger, logger
from sanic.log import deprecation, error_logger, logger
from sanic.server.protocols.base_protocol import SanicProtocol
from ...exceptions import ServerError, WebsocketClosed
from .frame import WebsocketFrameAssembler
OPEN = State.OPEN
CLOSING = State.CLOSING
CLOSED = State.CLOSED
class WebsocketImplProtocol:
connection: ServerConnection
ws_proto: ServerProtocol
io_proto: Optional[SanicProtocol]
loop: Optional[asyncio.AbstractEventLoop]
max_queue: int
@@ -56,14 +68,14 @@ class WebsocketImplProtocol:
def __init__(
self,
connection,
ws_proto,
max_queue=None,
ping_interval: Optional[float] = 20,
ping_timeout: Optional[float] = 20,
close_timeout: float = 10,
loop=None,
):
self.connection = connection
self.ws_proto = ws_proto
self.io_proto = None
self.loop = None
self.max_queue = max_queue
@@ -85,7 +97,16 @@ class WebsocketImplProtocol:
@property
def subprotocol(self):
return self.connection.subprotocol
return self.ws_proto.subprotocol
@property
def connection(self):
deprecation(
"The connection property has been deprecated and will be removed. "
"Please use the ws_proto property instead going forward.",
22.6,
)
return self.ws_proto
def pause_frames(self):
if not self.can_pause:
@@ -299,15 +320,15 @@ class WebsocketImplProtocol:
# Not draining the write buffer is acceptable in this context.
# clear the send buffer
_ = self.connection.data_to_send()
_ = self.ws_proto.data_to_send()
# If we're not already CLOSED or CLOSING, then send the close.
if self.connection.state is OPEN:
if self.ws_proto.state is OPEN:
if code in (1000, 1001):
self.connection.send_close(code, reason)
self.ws_proto.send_close(code, reason)
else:
self.connection.fail(code, reason)
self.ws_proto.fail(code, reason)
try:
data_to_send = self.connection.data_to_send()
data_to_send = self.ws_proto.data_to_send()
while (
len(data_to_send)
and self.io_proto
@@ -321,7 +342,7 @@ class WebsocketImplProtocol:
...
if code == 1006:
# Special case: 1006 consider the transport already closed
self.connection.state = CLOSED
self.ws_proto.state = CLOSED
if self.data_finished_fut and not self.data_finished_fut.done():
# We have a graceful auto-closer. Use it to close the connection.
self.data_finished_fut.cancel()
@@ -342,10 +363,10 @@ class WebsocketImplProtocol:
# In Python Version 3.7: pause_reading is idempotent
# i.e. it can be called when the transport is already paused or closed.
self.io_proto.transport.pause_reading()
if self.connection.state == OPEN:
data_to_send = self.connection.data_to_send()
self.connection.send_close(code, reason)
data_to_send.extend(self.connection.data_to_send())
if self.ws_proto.state == OPEN:
data_to_send = self.ws_proto.data_to_send()
self.ws_proto.send_close(code, reason)
data_to_send.extend(self.ws_proto.data_to_send())
try:
while (
len(data_to_send)
@@ -454,7 +475,7 @@ class WebsocketImplProtocol:
Raise ConnectionClosed in pending keepalive pings.
They'll never receive a pong once the connection is closed.
"""
if self.connection.state is not CLOSED:
if self.ws_proto.state is not CLOSED:
raise ServerError(
"Webscoket about_pings should only be called "
"after connection state is changed to CLOSED"
@@ -483,9 +504,9 @@ class WebsocketImplProtocol:
self.fail_connection(code, reason)
return
async with self.conn_mutex:
if self.connection.state is OPEN:
self.connection.send_close(code, reason)
data_to_send = self.connection.data_to_send()
if self.ws_proto.state is OPEN:
self.ws_proto.send_close(code, reason)
data_to_send = self.ws_proto.data_to_send()
await self.send_data(data_to_send)
async def recv(self, timeout: Optional[float] = None) -> Optional[Data]:
@@ -515,7 +536,7 @@ class WebsocketImplProtocol:
"already waiting for the next message"
)
await self.recv_lock.acquire()
if self.connection.state is CLOSED:
if self.ws_proto.state is CLOSED:
self.recv_lock.release()
raise WebsocketClosed(
"Cannot receive from websocket interface after it is closed."
@@ -566,7 +587,7 @@ class WebsocketImplProtocol:
"for the next message"
)
await self.recv_lock.acquire()
if self.connection.state is CLOSED:
if self.ws_proto.state is CLOSED:
self.recv_lock.release()
raise WebsocketClosed(
"Cannot receive from websocket interface after it is closed."
@@ -625,7 +646,7 @@ class WebsocketImplProtocol:
"is already waiting for the next message"
)
await self.recv_lock.acquire()
if self.connection.state is CLOSED:
if self.ws_proto.state is CLOSED:
self.recv_lock.release()
raise WebsocketClosed(
"Cannot receive from websocket interface after it is closed."
@@ -666,7 +687,7 @@ class WebsocketImplProtocol:
"""
async with self.conn_mutex:
if self.connection.state in (CLOSED, CLOSING):
if self.ws_proto.state in (CLOSED, CLOSING):
raise WebsocketClosed(
"Cannot write to websocket interface after it is closed."
)
@@ -679,12 +700,12 @@ class WebsocketImplProtocol:
# strings and bytes-like objects are iterable.
if isinstance(message, str):
self.connection.send_text(message.encode("utf-8"))
await self.send_data(self.connection.data_to_send())
self.ws_proto.send_text(message.encode("utf-8"))
await self.send_data(self.ws_proto.data_to_send())
elif isinstance(message, (bytes, bytearray, memoryview)):
self.connection.send_binary(message)
await self.send_data(self.connection.data_to_send())
self.ws_proto.send_binary(message)
await self.send_data(self.ws_proto.data_to_send())
elif isinstance(message, Mapping):
# Catch a common mistake -- passing a dict to send().
@@ -713,7 +734,7 @@ class WebsocketImplProtocol:
(which will be encoded to UTF-8) or a bytes-like object.
"""
async with self.conn_mutex:
if self.connection.state in (CLOSED, CLOSING):
if self.ws_proto.state in (CLOSED, CLOSING):
raise WebsocketClosed(
"Cannot send a ping when the websocket interface "
"is closed."
@@ -741,8 +762,8 @@ class WebsocketImplProtocol:
self.pings[data] = self.io_proto.loop.create_future()
self.connection.send_ping(data)
await self.send_data(self.connection.data_to_send())
self.ws_proto.send_ping(data)
await self.send_data(self.ws_proto.data_to_send())
return asyncio.shield(self.pings[data])
@@ -754,15 +775,15 @@ class WebsocketImplProtocol:
be a string (which will be encoded to UTF-8) or a bytes-like object.
"""
async with self.conn_mutex:
if self.connection.state in (CLOSED, CLOSING):
if self.ws_proto.state in (CLOSED, CLOSING):
# Cannot send pong after transport is shutting down
return
if isinstance(data, str):
data = data.encode("utf-8")
elif isinstance(data, (bytearray, memoryview)):
data = bytes(data)
self.connection.send_pong(data)
await self.send_data(self.connection.data_to_send())
self.ws_proto.send_pong(data)
await self.send_data(self.ws_proto.data_to_send())
async def send_data(self, data_to_send):
for data in data_to_send:
@@ -784,7 +805,7 @@ class WebsocketImplProtocol:
SanicProtocol.close(self.io_proto, timeout=1.0)
async def async_data_received(self, data_to_send, events_to_process):
if self.connection.state in (OPEN, CLOSING) and len(data_to_send) > 0:
if self.ws_proto.state in (OPEN, CLOSING) and len(data_to_send) > 0:
# receiving data can generate data to send (eg, pong for a ping)
# send connection.data_to_send()
await self.send_data(data_to_send)
@@ -792,9 +813,9 @@ class WebsocketImplProtocol:
await self.process_events(events_to_process)
def data_received(self, data):
self.connection.receive_data(data)
data_to_send = self.connection.data_to_send()
events_to_process = self.connection.events_received()
self.ws_proto.receive_data(data)
data_to_send = self.ws_proto.data_to_send()
events_to_process = self.ws_proto.events_received()
if len(data_to_send) > 0 or len(events_to_process) > 0:
asyncio.create_task(
self.async_data_received(data_to_send, events_to_process)
@@ -803,7 +824,7 @@ class WebsocketImplProtocol:
async def async_eof_received(self, data_to_send, events_to_process):
# receiving EOF can generate data to send
# send connection.data_to_send()
if self.connection.state in (OPEN, CLOSING) and len(data_to_send) > 0:
if self.ws_proto.state in (OPEN, CLOSING) and len(data_to_send) > 0:
await self.send_data(data_to_send)
if len(events_to_process) > 0:
await self.process_events(events_to_process)
@@ -823,9 +844,9 @@ class WebsocketImplProtocol:
SanicProtocol.close(self.io_proto, timeout=1.0)
def eof_received(self) -> Optional[bool]:
self.connection.receive_eof()
data_to_send = self.connection.data_to_send()
events_to_process = self.connection.events_received()
self.ws_proto.receive_eof()
data_to_send = self.ws_proto.data_to_send()
events_to_process = self.ws_proto.events_received()
asyncio.create_task(
self.async_eof_received(data_to_send, events_to_process)
)
@@ -835,11 +856,11 @@ class WebsocketImplProtocol:
"""
The WebSocket Connection is Closed.
"""
if not self.connection.state == CLOSED:
if not self.ws_proto.state == CLOSED:
# signal to the websocket connection handler
# we've lost the connection
self.connection.fail(code=1006)
self.connection.state = CLOSED
self.ws_proto.fail(code=1006)
self.ws_proto.state = CLOSED
self.abort_pings()
if self.connection_lost_waiter:

View File

@@ -154,9 +154,7 @@ class SignalRouter(BaseRouter):
try:
for signal in signals:
params.pop("__trigger__", None)
requirements = getattr(
signal.handler, "__requirements__", None
)
requirements = signal.extra.requirements
if (
(condition is None and signal.ctx.exclusive is False)
or (condition is None and not requirements)
@@ -219,8 +217,13 @@ class SignalRouter(BaseRouter):
if not trigger:
event = ".".join([*parts[:2], "<__trigger__>"])
handler.__requirements__ = condition # type: ignore
handler.__trigger__ = trigger # type: ignore
try:
# Attaching __requirements__ and __trigger__ to the handler
# is deprecated and will be removed in v23.6.
handler.__requirements__ = condition # type: ignore
handler.__trigger__ = trigger # type: ignore
except AttributeError:
pass
signal = super().add(
event,
@@ -232,6 +235,7 @@ class SignalRouter(BaseRouter):
signal.ctx.exclusive = exclusive
signal.ctx.trigger = trigger
signal.ctx.definition = event_definition
signal.extra.requirements = condition
return cast(Signal, signal)

View File

@@ -44,7 +44,9 @@ class SharedContext(SimpleNamespace):
f"{Colors.YELLOW}with type {Colors.PURPLE}{type(value)} "
f"{Colors.YELLOW}was added to shared_ctx. It may not "
"not function as intended. Consider using the regular "
f"ctx. For more information, please see ____.{Colors.END}"
f"ctx.\nFor more information, please see https://sanic.dev/en"
"/guide/deployment/manager.html#using-shared-context-between-"
f"worker-processes.{Colors.END}"
)
@property

18
sanic/worker/constants.py Normal file
View File

@@ -0,0 +1,18 @@
from enum import IntEnum, auto
from sanic.compat import UpperStrEnum
class RestartOrder(UpperStrEnum):
SHUTDOWN_FIRST = auto()
STARTUP_FIRST = auto()
class ProcessState(IntEnum):
IDLE = auto()
RESTARTING = auto()
STARTING = auto()
STARTED = auto()
ACKED = auto()
JOINED = auto()
TERMINATED = auto()

View File

@@ -1,23 +1,17 @@
import sys
from __future__ import annotations
from datetime import datetime
from inspect import isawaitable
from multiprocessing.connection import Connection
from signal import SIGINT, SIGTERM
from signal import signal as signal_func
from socket import AF_INET, SOCK_STREAM, socket, timeout
from textwrap import indent
from typing import Any, Dict
from os import environ
from pathlib import Path
from typing import Any, Dict, Mapping, Union
from sanic.application.logo import get_logo
from sanic.application.motd import MOTDTTY
from sanic.log import Colors, error_logger, logger
from sanic.server.socket import configure_socket
try: # no cov
from ujson import dumps, loads
except ModuleNotFoundError: # no cov
from json import dumps, loads # type: ignore
from sanic.exceptions import Unauthorized
from sanic.helpers import Default
from sanic.log import logger
from sanic.request import Request
from sanic.response import json
class Inspector:
@@ -25,118 +19,105 @@ class Inspector:
self,
publisher: Connection,
app_info: Dict[str, Any],
worker_state: Dict[str, Any],
worker_state: Mapping[str, Any],
host: str,
port: int,
api_key: str,
tls_key: Union[Path, str, Default],
tls_cert: Union[Path, str, Default],
):
self._publisher = publisher
self.run = True
self.app_info = app_info
self.worker_state = worker_state
self.host = host
self.port = port
self.api_key = api_key
self.tls_key = tls_key
self.tls_cert = tls_cert
def __call__(self) -> None:
sock = configure_socket(
{"host": self.host, "port": self.port, "unix": None, "backlog": 1}
def __call__(self, run=True, **_) -> Inspector:
from sanic import Sanic
self.app = Sanic("Inspector")
self._setup()
if run:
self.app.run(
host=self.host,
port=self.port,
single_process=True,
ssl={"key": self.tls_key, "cert": self.tls_cert}
if not isinstance(self.tls_key, Default)
and not isinstance(self.tls_cert, Default)
else None,
)
return self
def _setup(self):
self.app.get("/")(self._info)
self.app.post("/<action:str>")(self._action)
if self.api_key:
self.app.on_request(self._authentication)
environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "true"
def _authentication(self, request: Request) -> None:
if request.token != self.api_key:
raise Unauthorized("Bad API key")
async def _action(self, request: Request, action: str):
logger.info("Incoming inspector action: %s", action)
output: Any = None
method = getattr(self, action, None)
if method:
kwargs = {}
if request.body:
kwargs = request.json
args = kwargs.pop("args", ())
output = method(*args, **kwargs)
if isawaitable(output):
output = await output
return await self._respond(request, output)
async def _info(self, request: Request):
return await self._respond(request, self._state_to_json())
async def _respond(self, request: Request, output: Any):
name = request.match_info.get("action", "info")
return json(
{"meta": {"action": name}, "result": output},
escape_forward_slashes=False,
)
assert sock
signal_func(SIGINT, self.stop)
signal_func(SIGTERM, self.stop)
logger.info(f"Inspector started on: {sock.getsockname()}")
sock.settimeout(0.5)
try:
while self.run:
try:
conn, _ = sock.accept()
except timeout:
continue
else:
action = conn.recv(64)
if action == b"reload":
conn.send(b"\n")
self.reload()
elif action == b"shutdown":
conn.send(b"\n")
self.shutdown()
else:
data = dumps(self.state_to_json())
conn.send(data.encode())
conn.close()
finally:
logger.debug("Inspector closing")
sock.close()
def stop(self, *_):
self.run = False
def state_to_json(self):
def _state_to_json(self) -> Dict[str, Any]:
output = {"info": self.app_info}
output["workers"] = self.make_safe(dict(self.worker_state))
output["workers"] = self._make_safe(dict(self.worker_state))
return output
def reload(self):
message = "__ALL_PROCESSES__:"
self._publisher.send(message)
def shutdown(self):
message = "__TERMINATE__"
self._publisher.send(message)
@staticmethod
def make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
def _make_safe(obj: Dict[str, Any]) -> Dict[str, Any]:
for key, value in obj.items():
if isinstance(value, dict):
obj[key] = Inspector.make_safe(value)
obj[key] = Inspector._make_safe(value)
elif isinstance(value, datetime):
obj[key] = value.isoformat()
return obj
def reload(self, zero_downtime: bool = False) -> None:
message = "__ALL_PROCESSES__:"
if zero_downtime:
message += ":STARTUP_FIRST"
self._publisher.send(message)
def inspect(host: str, port: int, action: str):
out = sys.stdout.write
with socket(AF_INET, SOCK_STREAM) as sock:
try:
sock.connect((host, port))
except ConnectionRefusedError:
error_logger.error(
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}{(host, port)}{Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance."
)
sock.close()
sys.exit(1)
sock.sendall(action.encode())
data = sock.recv(4096)
if action == "raw":
out(data.decode())
elif action == "pretty":
loaded = loads(data)
display = loaded.pop("info")
extra = display.pop("extra", {})
display["packages"] = ", ".join(display["packages"])
MOTDTTY(get_logo(), f"{host}:{port}", display, extra).display(
version=False,
action="Inspecting",
out=out,
)
for name, info in loaded["workers"].items():
info = "\n".join(
f"\t{key}: {Colors.BLUE}{value}{Colors.END}"
for key, value in info.items()
)
out(
"\n"
+ indent(
"\n".join(
[
f"{Colors.BOLD}{Colors.SANIC}{name}{Colors.END}",
info,
]
),
" ",
)
+ "\n"
)
def scale(self, replicas) -> str:
num_workers = 1
if replicas:
num_workers = int(replicas)
log_msg = f"Scaling to {num_workers}"
logger.info(log_msg)
message = f"__SCALE__:{num_workers}"
self._publisher.send(message)
return log_msg
def shutdown(self) -> None:
message = "__TERMINATE__"
self._publisher.send(message)

View File

@@ -5,18 +5,10 @@ import sys
from importlib import import_module
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Optional,
Type,
Union,
cast,
)
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union, cast
from sanic.http.tls.creators import CertCreator, MkcertCreator, TrustmeCreator
from sanic.http.tls.context import process_to_context
from sanic.http.tls.creators import MkcertCreator, TrustmeCreator
if TYPE_CHECKING:
@@ -106,21 +98,30 @@ class AppLoader:
class CertLoader:
_creator_class: Type[CertCreator]
_creators = {
"mkcert": MkcertCreator,
"trustme": TrustmeCreator,
}
def __init__(self, ssl_data: Dict[str, Union[str, os.PathLike]]):
creator_name = ssl_data.get("creator")
if creator_name not in ("mkcert", "trustme"):
self._ssl_data = ssl_data
creator_name = cast(str, ssl_data.get("creator"))
self._creator_class = self._creators.get(creator_name)
if not creator_name:
return
if not self._creator_class:
raise RuntimeError(f"Unknown certificate creator: {creator_name}")
elif creator_name == "mkcert":
self._creator_class = MkcertCreator
elif creator_name == "trustme":
self._creator_class = TrustmeCreator
self._key = ssl_data["key"]
self._cert = ssl_data["cert"]
self._localhost = cast(str, ssl_data["localhost"])
def load(self, app: SanicApp):
if not self._creator_class:
return process_to_context(self._ssl_data)
creator = self._creator_class(app, self._key, self._cert)
return creator.generate_cert(self._localhost)

View File

@@ -1,13 +1,16 @@
import os
import sys
from contextlib import suppress
from itertools import count
from random import choice
from signal import SIGINT, SIGTERM, Signals
from signal import signal as signal_func
from time import sleep
from typing import List, Optional
from typing import Dict, List, Optional
from sanic.compat import OS_IS_WINDOWS
from sanic.exceptions import ServerKilled
from sanic.log import error_logger, logger
from sanic.worker.constants import RestartOrder
from sanic.worker.process import ProcessState, Worker, WorkerProcess
@@ -18,7 +21,8 @@ else:
class WorkerManager:
THRESHOLD = 50
THRESHOLD = WorkerProcess.THRESHOLD
MAIN_IDENT = "Sanic-Main"
def __init__(
self,
@@ -31,39 +35,66 @@ class WorkerManager:
):
self.num_server = number
self.context = context
self.transient: List[Worker] = []
self.durable: List[Worker] = []
self.transient: Dict[str, Worker] = {}
self.durable: Dict[str, Worker] = {}
self.monitor_publisher, self.monitor_subscriber = monitor_pubsub
self.worker_state = worker_state
self.worker_state["Sanic-Main"] = {"pid": self.pid}
self.terminated = False
self.worker_state[self.MAIN_IDENT] = {"pid": self.pid}
self._shutting_down = False
self._serve = serve
self._server_settings = server_settings
self._server_count = count()
if number == 0:
raise RuntimeError("Cannot serve with no workers")
for i in range(number):
self.manage(
f"{WorkerProcess.SERVER_LABEL}-{i}",
serve,
server_settings,
transient=True,
)
for _ in range(number):
self.create_server()
signal_func(SIGINT, self.shutdown_signal)
signal_func(SIGTERM, self.shutdown_signal)
def manage(self, ident, func, kwargs, transient=False):
def manage(self, ident, func, kwargs, transient=False) -> Worker:
container = self.transient if transient else self.durable
container.append(
Worker(ident, func, kwargs, self.context, self.worker_state)
worker = Worker(ident, func, kwargs, self.context, self.worker_state)
container[worker.ident] = worker
return worker
def create_server(self) -> Worker:
server_number = next(self._server_count)
return self.manage(
f"{WorkerProcess.SERVER_LABEL}-{server_number}",
self._serve,
self._server_settings,
transient=True,
)
def shutdown_server(self, ident: Optional[str] = None) -> None:
if not ident:
servers = [
worker
for worker in self.transient.values()
if worker.ident.startswith(WorkerProcess.SERVER_LABEL)
]
if not servers:
error_logger.error(
"Server shutdown failed because a server was not found."
)
return
worker = choice(servers) # nosec B311
else:
worker = self.transient[ident]
for process in worker.processes:
process.terminate()
del self.transient[worker.ident]
def run(self):
self.start()
self.monitor()
self.join()
self.terminate()
# self.kill()
def start(self):
for process in self.processes:
@@ -85,15 +116,41 @@ class WorkerManager:
self.join()
def terminate(self):
if not self.terminated:
if not self._shutting_down:
for process in self.processes:
process.terminate()
self.terminated = True
def restart(self, process_names: Optional[List[str]] = None, **kwargs):
def restart(
self,
process_names: Optional[List[str]] = None,
restart_order=RestartOrder.SHUTDOWN_FIRST,
**kwargs,
):
for process in self.transient_processes:
if not process_names or process.name in process_names:
process.restart(**kwargs)
process.restart(restart_order=restart_order, **kwargs)
def scale(self, num_worker: int):
if num_worker <= 0:
raise ValueError("Cannot scale to 0 workers.")
change = num_worker - self.num_server
if change == 0:
logger.info(
f"No change needed. There are already {num_worker} workers."
)
return
logger.info(f"Scaling from {self.num_server} to {num_worker} workers")
if change > 0:
for _ in range(change):
worker = self.create_server()
for process in worker.processes:
process.start()
else:
for _ in range(abs(change)):
self.shutdown_server()
self.num_server = num_worker
def monitor(self):
self.wait_for_ack()
@@ -109,7 +166,15 @@ class WorkerManager:
elif message == "__TERMINATE__":
self.shutdown()
break
split_message = message.split(":", 1)
logger.debug(
"Incoming monitor message: %s",
message,
extra={"verbosity": 1},
)
split_message = message.split(":", 2)
if message.startswith("__SCALE__"):
self.scale(int(split_message[-1]))
continue
processes = split_message[0]
reloaded_files = (
split_message[1] if len(split_message) > 1 else None
@@ -119,10 +184,17 @@ class WorkerManager:
]
if "__ALL_PROCESSES__" in process_names:
process_names = None
order = (
RestartOrder.STARTUP_FIRST
if "STARTUP_FIRST" in split_message
else RestartOrder.SHUTDOWN_FIRST
)
self.restart(
process_names=process_names,
reloaded_files=reloaded_files,
restart_order=order,
)
self._sync_states()
except InterruptedError:
if not OS_IS_WINDOWS:
raise
@@ -130,17 +202,40 @@ class WorkerManager:
def wait_for_ack(self): # no cov
misses = 0
message = (
"It seems that one or more of your workers failed to come "
"online in the allowed time. Sanic is shutting down to avoid a "
f"deadlock. The current threshold is {self.THRESHOLD / 10}s. "
"If this problem persists, please check out the documentation "
"https://sanic.dev/en/guide/deployment/manager.html#worker-ack."
)
while not self._all_workers_ack():
sleep(0.1)
if self.monitor_subscriber.poll(0.1):
monitor_msg = self.monitor_subscriber.recv()
if monitor_msg != "__TERMINATE_EARLY__":
self.monitor_publisher.send(monitor_msg)
continue
misses = self.THRESHOLD
message = (
"One of your worker processes terminated before startup "
"was completed. Please solve any errors experienced "
"during startup. If you do not see an exception traceback "
"in your error logs, try running Sanic in in a single "
"process using --single-process or single_process=True. "
"Once you are confident that the server is able to start "
"without errors you can switch back to multiprocess mode."
)
misses += 1
if misses > self.THRESHOLD:
error_logger.error("Not all workers are ack. Shutting down.")
error_logger.error(
"Not all workers acknowledged a successful startup. "
"Shutting down.\n\n" + message
)
self.kill()
sys.exit(1)
@property
def workers(self):
return self.transient + self.durable
def workers(self) -> List[Worker]:
return list(self.transient.values()) + list(self.durable.values())
@property
def processes(self):
@@ -150,15 +245,22 @@ class WorkerManager:
@property
def transient_processes(self):
for worker in self.transient:
for worker in self.transient.values():
for process in worker.processes:
yield process
def kill(self):
for process in self.processes:
logger.info("Killing %s [%s]", process.name, process.pid)
os.kill(process.pid, SIGKILL)
raise ServerKilled
def shutdown_signal(self, signal, frame):
if self._shutting_down:
logger.info("Shutdown interrupted. Killing.")
with suppress(ServerKilled):
self.kill()
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
self.monitor_publisher.send(None)
self.shutdown()
@@ -167,6 +269,7 @@ class WorkerManager:
for process in self.processes:
if process.is_alive():
process.terminate()
self._shutting_down = True
@property
def pid(self):
@@ -179,3 +282,9 @@ class WorkerManager:
if worker_state.get("server")
]
return all(acked) and len(acked) == self.num_server
def _sync_states(self):
for process in self.processes:
state = self.worker_state[process.name].get("state")
if state and process.state.name != state:
process.set_state(ProcessState[state], True)

View File

@@ -2,6 +2,7 @@ from multiprocessing.connection import Connection
from os import environ, getpid
from typing import Any, Dict
from sanic.log import Colors, logger
from sanic.worker.process import ProcessState
from sanic.worker.state import WorkerState
@@ -16,20 +17,45 @@ class WorkerMultiplexer:
self._state = WorkerState(worker_state, self.name)
def ack(self):
logger.debug(
f"{Colors.BLUE}Process ack: {Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self.pid,
)
self._state._state[self.name] = {
**self._state._state[self.name],
"state": ProcessState.ACKED.name,
}
def restart(self, name: str = ""):
def restart(
self,
name: str = "",
all_workers: bool = False,
zero_downtime: bool = False,
):
if name and all_workers:
raise ValueError(
"Ambiguous restart with both a named process and"
" all_workers=True"
)
if not name:
name = self.name
name = "__ALL_PROCESSES__:" if all_workers else self.name
if not name.endswith(":"):
name += ":"
if zero_downtime:
name += ":STARTUP_FIRST"
self._monitor_publisher.send(name)
reload = restart # no cov
def terminate(self):
self._monitor_publisher.send("__TERMINATE__")
def scale(self, num_workers: int):
message = f"__SCALE__:{num_workers}"
self._monitor_publisher.send(message)
def terminate(self, early: bool = False):
message = "__TERMINATE_EARLY__" if early else "__TERMINATE__"
self._monitor_publisher.send(message)
@property
def pid(self) -> int:

View File

@@ -1,12 +1,14 @@
import os
from datetime import datetime, timezone
from enum import IntEnum, auto
from multiprocessing.context import BaseContext
from signal import SIGINT
from threading import Thread
from time import sleep
from typing import Any, Dict, Set
from sanic.log import Colors, logger
from sanic.worker.constants import ProcessState, RestartOrder
def get_now():
@@ -14,15 +16,8 @@ def get_now():
return now
class ProcessState(IntEnum):
IDLE = auto()
STARTED = auto()
ACKED = auto()
JOINED = auto()
TERMINATED = auto()
class WorkerProcess:
THRESHOLD = 300 # == 30 seconds
SERVER_LABEL = "Server"
def __init__(self, factory, name, target, kwargs, worker_state):
@@ -54,8 +49,9 @@ class WorkerProcess:
f"{Colors.SANIC}%s{Colors.END}",
self.name,
)
self.set_state(ProcessState.STARTING)
self._current_process.start()
self.set_state(ProcessState.STARTED)
self._process.start()
if not self.worker_state[self.name].get("starts"):
self.worker_state[self.name] = {
**self.worker_state[self.name],
@@ -67,7 +63,7 @@ class WorkerProcess:
def join(self):
self.set_state(ProcessState.JOINED)
self._process.join()
self._current_process.join()
def terminate(self):
if self.state is not ProcessState.TERMINATED:
@@ -80,21 +76,23 @@ class WorkerProcess:
)
self.set_state(ProcessState.TERMINATED, force=True)
try:
# self._process.terminate()
os.kill(self.pid, SIGINT)
del self.worker_state[self.name]
except (KeyError, AttributeError, ProcessLookupError):
...
def restart(self, **kwargs):
def restart(self, restart_order=RestartOrder.SHUTDOWN_FIRST, **kwargs):
logger.debug(
f"{Colors.BLUE}Restarting a process: {Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self.pid,
)
self._process.terminate()
self.set_state(ProcessState.IDLE, force=True)
self.set_state(ProcessState.RESTARTING, force=True)
if restart_order is RestartOrder.SHUTDOWN_FIRST:
self._terminate_now()
else:
self._old_process = self._current_process
self.kwargs.update(
{"config": {k.upper(): v for k, v in kwargs.items()}}
)
@@ -104,6 +102,9 @@ class WorkerProcess:
except AttributeError:
raise RuntimeError("Restart failed")
if restart_order is RestartOrder.STARTUP_FIRST:
self._terminate_soon()
self.worker_state[self.name] = {
**self.worker_state[self.name],
"pid": self.pid,
@@ -113,14 +114,14 @@ class WorkerProcess:
def is_alive(self):
try:
return self._process.is_alive()
return self._current_process.is_alive()
except AssertionError:
return False
def spawn(self):
if self.state is not ProcessState.IDLE:
if self.state not in (ProcessState.IDLE, ProcessState.RESTARTING):
raise Exception("Cannot spawn a worker process until it is idle.")
self._process = self.factory(
self._current_process = self.factory(
name=self.name,
target=self.target,
kwargs=self.kwargs,
@@ -129,10 +130,61 @@ class WorkerProcess:
@property
def pid(self):
return self._process.pid
return self._current_process.pid
def _terminate_now(self):
logger.debug(
f"{Colors.BLUE}Begin restart termination: "
f"{Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self._current_process.pid,
)
self._current_process.terminate()
def _terminate_soon(self):
logger.debug(
f"{Colors.BLUE}Begin restart termination: "
f"{Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self._current_process.pid,
)
termination_thread = Thread(target=self._wait_to_terminate)
termination_thread.start()
def _wait_to_terminate(self):
logger.debug(
f"{Colors.BLUE}Waiting for process to be acked: "
f"{Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self._old_process.pid,
)
misses = 0
while self.state is not ProcessState.ACKED:
sleep(0.1)
misses += 1
if misses > self.THRESHOLD:
raise TimeoutError(
f"Worker {self.name} failed to come ack within "
f"{self.THRESHOLD / 10} seconds"
)
else:
logger.debug(
f"{Colors.BLUE}Process acked. Terminating: "
f"{Colors.BOLD}{Colors.SANIC}"
f"%s {Colors.BLUE}[%s]{Colors.END}",
self.name,
self._old_process.pid,
)
self._old_process.terminate()
delattr(self, "_old_process")
class Worker:
WORKER_PREFIX = "Sanic-"
def __init__(
self,
ident: str,
@@ -151,8 +203,12 @@ class Worker:
def create_process(self) -> WorkerProcess:
process = WorkerProcess(
factory=self.context.Process,
name=f"Sanic-{self.ident}-{len(self.processes)}",
# Need to ignore this typing error - The problem is the
# BaseContext itself has no Process. But, all of its
# implementations do. We can safely ignore as it is a typing
# issue in the standard lib.
factory=self.context.Process, # type: ignore
name=f"{self.WORKER_PREFIX}{self.ident}-{len(self.processes)}",
target=self.serve,
kwargs={**self.server_settings},
worker_state=self.worker_state,

View File

@@ -9,6 +9,7 @@ from multiprocessing.connection import Connection
from pathlib import Path
from signal import SIGINT, SIGTERM
from signal import signal as signal_func
from time import sleep
from typing import Dict, Set
from sanic.server.events import trigger_events
@@ -16,6 +17,8 @@ from sanic.worker.loader import AppLoader
class Reloader:
INTERVAL = 1.0 # seconds
def __init__(
self,
publisher: Connection,
@@ -24,7 +27,7 @@ class Reloader:
app_loader: AppLoader,
):
self._publisher = publisher
self.interval = interval
self.interval = interval or self.INTERVAL
self.reload_dirs = reload_dirs
self.run = True
self.app_loader = app_loader
@@ -62,6 +65,7 @@ class Reloader:
self.reload(",".join(changed) if changed else "unknown")
if after_trigger:
trigger_events(after_trigger, loop, app)
sleep(self.interval)
else:
if reloader_stop:
trigger_events(reloader_stop, loop, app)

View File

@@ -1,6 +1,7 @@
import asyncio
import os
import socket
import warnings
from functools import partial
from multiprocessing.connection import Connection
@@ -10,11 +11,13 @@ from typing import Any, Dict, List, Optional, Type, Union
from sanic.application.constants import ServerStage
from sanic.application.state import ApplicationServerInfo
from sanic.http.constants import HTTP
from sanic.log import error_logger
from sanic.models.server_types import Signal
from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.runners import _serve_http_1, _serve_http_3
from sanic.worker.loader import AppLoader, CertLoader
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.process import Worker, WorkerProcess
def worker_serve(
@@ -45,80 +48,96 @@ def worker_serve(
config=None,
passthru: Optional[Dict[str, Any]] = None,
):
from sanic import Sanic
try:
from sanic import Sanic
if app_loader:
app = app_loader.load()
else:
app = Sanic.get_app(app_name)
if app_loader:
app = app_loader.load()
else:
app = Sanic.get_app(app_name)
app.refresh(passthru)
app.setup_loop()
app.refresh(passthru)
app.setup_loop()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Hydrate server info if needed
if server_info:
for app_name, server_info_objects in server_info.items():
a = Sanic.get_app(app_name)
if not a.state.server_info:
a.state.server_info = []
for info in server_info_objects:
if not info.settings.get("app"):
info.settings["app"] = a
a.state.server_info.append(info)
# Hydrate server info if needed
if server_info:
for app_name, server_info_objects in server_info.items():
a = Sanic.get_app(app_name)
if not a.state.server_info:
a.state.server_info = []
for info in server_info_objects:
if not info.settings.get("app"):
info.settings["app"] = a
a.state.server_info.append(info)
if isinstance(ssl, dict):
cert_loader = CertLoader(ssl)
ssl = cert_loader.load(app)
for info in app.state.server_info:
info.settings["ssl"] = ssl
if isinstance(ssl, dict):
cert_loader = CertLoader(ssl)
ssl = cert_loader.load(app)
for info in app.state.server_info:
info.settings["ssl"] = ssl
# When in a worker process, do some init
if os.environ.get("SANIC_WORKER_NAME"):
# Hydrate apps with any passed server info
# When in a worker process, do some init
worker_name = os.environ.get("SANIC_WORKER_NAME")
if worker_name and worker_name.startswith(
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL
):
# Hydrate apps with any passed server info
if monitor_publisher is None:
raise RuntimeError("No restart publisher found in worker process")
if worker_state is None:
raise RuntimeError("No worker state found in worker process")
if monitor_publisher is None:
raise RuntimeError(
"No restart publisher found in worker process"
)
if worker_state is None:
raise RuntimeError("No worker state found in worker process")
# Run secondary servers
apps = list(Sanic._app_registry.values())
app.before_server_start(partial(app._start_servers, apps=apps))
for a in apps:
a.multiplexer = WorkerMultiplexer(monitor_publisher, worker_state)
# Run secondary servers
apps = list(Sanic._app_registry.values())
app.before_server_start(partial(app._start_servers, apps=apps))
for a in apps:
a.multiplexer = WorkerMultiplexer(
monitor_publisher, worker_state
)
if app.debug:
loop.set_debug(app.debug)
if app.debug:
loop.set_debug(app.debug)
app.asgi = False
app.asgi = False
if app.state.server_info:
primary_server_info = app.state.server_info[0]
primary_server_info.stage = ServerStage.SERVING
if config:
app.update_config(config)
if app.state.server_info:
primary_server_info = app.state.server_info[0]
primary_server_info.stage = ServerStage.SERVING
if config:
app.update_config(config)
if version is HTTP.VERSION_3:
return _serve_http_3(host, port, app, loop, ssl)
return _serve_http_1(
host,
port,
app,
ssl,
sock,
unix,
reuse_port,
loop,
protocol,
backlog,
register_sys_signals,
run_multiple,
run_async,
connections,
signal,
state,
asyncio_server_kwargs,
)
if version is HTTP.VERSION_3:
return _serve_http_3(host, port, app, loop, ssl)
return _serve_http_1(
host,
port,
app,
ssl,
sock,
unix,
reuse_port,
loop,
protocol,
backlog,
register_sys_signals,
run_multiple,
run_async,
connections,
signal,
state,
asyncio_server_kwargs,
)
except Exception as e:
warnings.simplefilter("ignore", category=RuntimeWarning)
if monitor_publisher:
error_logger.exception(e)
multiplexer = WorkerMultiplexer(monitor_publisher, {})
multiplexer.terminate(True)
else:
raise e

View File

@@ -6,8 +6,6 @@ import os
import re
import sys
from distutils.util import strtobool
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
@@ -37,6 +35,25 @@ def open_local(paths, mode="r", encoding="utf8"):
return codecs.open(path, mode, encoding)
def str_to_bool(val: str) -> bool:
val = val.lower()
if val in {
"y",
"yes",
"yep",
"yup",
"t",
"true",
"on",
"enable",
"enabled",
"1",
}:
return True
elif val in {"n", "no", "f", "false", "off", "disable", "disabled", "0"}:
return False
else:
raise ValueError(f"Invalid truth value {val}")
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
try:
@@ -73,6 +90,7 @@ setup_kwargs = {
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
],
"entry_points": {"console_scripts": ["sanic = sanic.__main__:main"]},
}
@@ -81,7 +99,7 @@ env_dependency = (
'; sys_platform != "win32" ' 'and implementation_name == "cpython"'
)
ujson = "ujson>=1.35" + env_dependency
uvloop = "uvloop>=0.5.3" + env_dependency
uvloop = "uvloop>=0.15.0" + env_dependency
types_ujson = "types-ujson" + env_dependency
requirements = [
"sanic-routing>=22.8.0",
@@ -94,8 +112,8 @@ requirements = [
]
tests_require = [
"sanic-testing>=22.9.0b2",
"pytest",
"sanic-testing>=22.9.0",
"pytest==7.1.*",
"coverage",
"beautifulsoup4",
"pytest-sanic",
@@ -131,13 +149,13 @@ dev_require = tests_require + [
all_require = list(set(dev_require + docs_require))
if strtobool(os.environ.get("SANIC_NO_UJSON", "no")):
if str_to_bool(os.environ.get("SANIC_NO_UJSON", "no")):
print("Installing without uJSON")
requirements.remove(ujson)
tests_require.remove(types_ujson)
# 'nt' means windows OS
if strtobool(os.environ.get("SANIC_NO_UVLOOP", "no")):
if str_to_bool(os.environ.get("SANIC_NO_UVLOOP", "no")):
print("Installing without uvLoop")
requirements.remove(uvloop)

View File

@@ -8,8 +8,8 @@ import uuid
from contextlib import suppress
from logging import LogRecord
from typing import List, Tuple
from unittest.mock import MagicMock
from typing import Any, Dict, List, Tuple
from unittest.mock import MagicMock, Mock, patch
import pytest
@@ -54,7 +54,7 @@ TYPE_TO_GENERATOR_MAP = {
"uuid": lambda: str(uuid.uuid1()),
}
CACHE = {}
CACHE: Dict[str, Any] = {}
class RouteStringGenerator:
@@ -147,6 +147,7 @@ def app(request):
for target, method_name in TouchUp._registry:
CACHE[method_name] = getattr(target, method_name)
app = Sanic(slugify.sub("-", request.node.name))
yield app
for target, method_name in TouchUp._registry:
setattr(target, method_name, CACHE[method_name])
@@ -220,3 +221,14 @@ def sanic_ext(ext_instance): # noqa
yield sanic_ext
with suppress(KeyError):
del sys.modules["sanic_ext"]
@pytest.fixture
def urlopen():
urlopen = Mock()
urlopen.return_value = urlopen
urlopen.__enter__ = Mock(return_value=urlopen)
urlopen.__exit__ = Mock()
urlopen.read = Mock()
with patch("sanic.cli.inspector_client.urlopen", urlopen):
yield urlopen

View File

@@ -15,7 +15,7 @@ from sanic import Sanic
from sanic.compat import OS_IS_WINDOWS
from sanic.config import Config
from sanic.exceptions import SanicException
from sanic.helpers import _default
from sanic.helpers import Default
from sanic.log import LOGGING_CONFIG_DEFAULTS
from sanic.response import text
from sanic.router import Route
@@ -347,7 +347,15 @@ def test_app_registry_retrieval_from_multiple():
def test_get_app_does_not_exist():
with pytest.raises(
SanicException, match='Sanic app name "does-not-exist" not found.'
SanicException,
match=(
"Sanic app name 'does-not-exist' not found.\n"
"App instantiation must occur outside "
"if __name__ == '__main__' "
"block or by using an AppLoader.\nSee "
"https://sanic.dev/en/guide/deployment/app-loader.html"
" for more details."
),
):
Sanic.get_app("does-not-exist")
@@ -491,7 +499,9 @@ def test_uvloop_cannot_never_called_with_create_server(caplog, monkeypatch):
)
counter = Counter([(r[1], r[2]) for r in caplog.record_tuples])
modified = sum(1 for app in apps if app.config.USE_UVLOOP is not _default)
modified = sum(
1 for app in apps if not isinstance(app.config.USE_UVLOOP, Default)
)
assert counter[(logging.WARNING, message)] == modified

View File

@@ -8,7 +8,7 @@ import uvicorn
from sanic import Sanic
from sanic.application.state import Mode
from sanic.asgi import MockTransport
from sanic.asgi import ASGIApp, MockTransport
from sanic.exceptions import BadRequest, Forbidden, ServiceUnavailable
from sanic.request import Request
from sanic.response import json, text
@@ -16,6 +16,12 @@ from sanic.server.websockets.connection import WebSocketConnection
from sanic.signals import RESERVED_NAMESPACES
try:
from unittest.mock import AsyncMock
except ImportError:
from tests.asyncmock import AsyncMock # type: ignore
@pytest.fixture
def message_stack():
return deque()
@@ -558,3 +564,39 @@ async def test_asgi_serve_location(app):
_, response = await app.asgi_client.get("/")
assert response.text == "http://<ASGI>"
@pytest.mark.asyncio
async def test_error_on_lifespan_exception_start(app, caplog):
@app.before_server_start
async def before_server_start(_):
1 / 0
recv = AsyncMock(return_value={"type": "lifespan.startup"})
send = AsyncMock()
app.asgi = True
with caplog.at_level(logging.ERROR):
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
send.assert_awaited_once_with(
{"type": "lifespan.startup.failed", "message": "division by zero"}
)
@pytest.mark.asyncio
async def test_error_on_lifespan_exception_stop(app: Sanic):
@app.before_server_stop
async def before_server_stop(_):
1 / 0
recv = AsyncMock(return_value={"type": "lifespan.shutdown"})
send = AsyncMock()
app.asgi = True
await app._startup()
await ASGIApp.create(app, {"type": "lifespan"}, recv, send)
send.assert_awaited_once_with(
{"type": "lifespan.shutdown.failed", "message": "division by zero"}
)

View File

@@ -323,3 +323,20 @@ def test_bp_group_properties():
assert "api/v1/grouped/bp2/" in routes
assert "api/v1/primary/grouped/bp1" in routes
assert "api/v1/primary/grouped/bp2" in routes
def test_nested_bp_group_properties():
one = Blueprint("one", url_prefix="/one")
two = Blueprint.group(one)
three = Blueprint.group(two, url_prefix="/three")
@one.route("/four")
def handler(request):
return text("pi")
app = Sanic("PropTest")
app.blueprint(three)
app.router.finalize()
routes = [route.path for route in app.router.routes]
assert routes == ["three/one/four"]

View File

@@ -4,6 +4,7 @@ import sys
from pathlib import Path
from typing import List, Optional, Tuple
from unittest.mock import patch
import pytest
@@ -11,6 +12,7 @@ from sanic_routing import __version__ as __routing_version__
from sanic import __version__
from sanic.__main__ import main
from sanic.cli.inspector_client import InspectorClient
@pytest.fixture(scope="module", autouse=True)
@@ -117,7 +119,13 @@ def test_error_with_path_as_instance_without_simple_arg(caplog):
),
)
def test_tls_options(cmd: Tuple[str, ...], caplog):
command = ["fake.server.app", *cmd, "--port=9999", "--debug"]
command = [
"fake.server.app",
*cmd,
"--port=9999",
"--debug",
"--single-process",
]
lines = capture(command, caplog)
assert "Goin' Fast @ https://127.0.0.1:9999" in lines
@@ -286,3 +294,50 @@ def test_noisy_exceptions(cmd: str, expected: bool, caplog):
info = read_app_info(lines)
assert info["noisy_exceptions"] is expected
def test_inspector_inspect(urlopen, caplog, capsys):
urlopen.read.return_value = json.dumps(
{
"result": {
"info": {
"packages": ["foo"],
},
"extra": {
"more": "data",
},
"workers": {"Worker-Name": {"some": "state"}},
}
}
).encode()
with patch("sys.argv", ["sanic", "inspect"]):
capture(["inspect"], caplog)
captured = capsys.readouterr()
assert "Inspecting @ http://localhost:6457" in captured.out
assert "Worker-Name" in captured.out
assert captured.err == ""
@pytest.mark.parametrize(
"command,params",
(
(["reload"], {"zero_downtime": False}),
(["reload", "--zero-downtime"], {"zero_downtime": True}),
(["shutdown"], {}),
(["scale", "9"], {"replicas": 9}),
(["foo", "--bar=something"], {"bar": "something"}),
(["foo", "--bar"], {"bar": True}),
(["foo", "--no-bar"], {"bar": False}),
(["foo", "positional"], {"args": ["positional"]}),
(
["foo", "positional", "--bar=something"],
{"args": ["positional"], "bar": "something"},
),
),
)
def test_inspector_command(command, params):
with patch.object(InspectorClient, "request") as client:
with patch("sys.argv", ["sanic", "inspect", *command]):
main()
client.assert_called_once_with(command[0], **params)

View File

@@ -39,7 +39,7 @@ def test_logo_true(app, caplog):
with patch("sys.stdout.isatty") as isatty:
isatty.return_value = True
with caplog.at_level(logging.DEBUG):
app.make_coffee()
app.make_coffee(single_process=True)
# Only in the regular logo
assert " ▄███ █████ ██ " not in caplog.text

View File

@@ -2,7 +2,6 @@ import asyncio
import sys
from threading import Event
from unittest.mock import Mock
import pytest
@@ -75,7 +74,7 @@ def test_create_named_task(app):
app.stop()
app.run()
app.run(single_process=True)
def test_named_task_called(app):

View File

@@ -10,7 +10,7 @@ import pytest
import sanic
from sanic import Sanic
from sanic.log import LOGGING_CONFIG_DEFAULTS, logger
from sanic.log import LOGGING_CONFIG_DEFAULTS, Colors, logger
from sanic.response import text
@@ -250,3 +250,14 @@ def test_verbosity(app, caplog, app_verbosity, log_verbosity, exists):
if app_verbosity == 0:
assert ("sanic.root", logging.INFO, "DEFAULT") in caplog.record_tuples
def test_colors_enum_format():
assert f"{Colors.END}" == Colors.END.value
assert f"{Colors.BOLD}" == Colors.BOLD.value
assert f"{Colors.BLUE}" == Colors.BLUE.value
assert f"{Colors.GREEN}" == Colors.GREEN.value
assert f"{Colors.PURPLE}" == Colors.PURPLE.value
assert f"{Colors.RED}" == Colors.RED.value
assert f"{Colors.SANIC}" == Colors.SANIC.value
assert f"{Colors.YELLOW}" == Colors.YELLOW.value

View File

@@ -1,6 +1,6 @@
import logging
from asyncio import CancelledError
from asyncio import CancelledError, sleep
from itertools import count
from sanic.exceptions import NotFound
@@ -318,6 +318,32 @@ def test_middleware_return_response(app):
resp1 = await request.respond()
return resp1
_, response = app.test_client.get("/")
app.test_client.get("/")
assert response_middleware_run_count == 1
assert request_middleware_run_count == 1
def test_middleware_run_on_timeout(app):
app.config.RESPONSE_TIMEOUT = 0.1
response_middleware_run_count = 0
request_middleware_run_count = 0
@app.on_response
def response(_, response):
nonlocal response_middleware_run_count
response_middleware_run_count += 1
@app.on_request
def request(_):
nonlocal request_middleware_run_count
request_middleware_run_count += 1
@app.get("/")
async def handler(request):
resp1 = await request.respond()
await sleep(1)
return resp1
app.test_client.get("/")
assert request_middleware_run_count == 1
assert response_middleware_run_count == 1

View File

@@ -3,7 +3,7 @@ from functools import partial
import pytest
from sanic import Sanic
from sanic.middleware import Middleware
from sanic.middleware import Middleware, MiddlewareLocation
from sanic.response import json
@@ -40,6 +40,86 @@ def reset_middleware():
Middleware.reset_count()
def test_add_register_priority(app: Sanic):
def foo(*_):
...
app.register_middleware(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.register_middleware(foo, attach_to="response", priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_register_named_priority(app: Sanic):
def foo(*_):
...
app.register_named_middleware(foo, route_names=["foo"], priority=999)
assert len(app.named_request_middleware) == 1
assert len(app.named_response_middleware) == 0
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
app.register_named_middleware(
foo, attach_to="response", route_names=["foo"], priority=999
)
assert len(app.named_request_middleware) == 1
assert len(app.named_response_middleware) == 1
assert app.named_response_middleware["foo"][0].priority == 999 # type: ignore
def test_add_decorator_priority(app: Sanic):
def foo(*_):
...
app.middleware(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.middleware(foo, attach_to="response", priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_convenience_priority(app: Sanic):
def foo(*_):
...
app.on_request(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 0
assert app.request_middleware[0].priority == 999 # type: ignore
app.on_response(foo, priority=999)
assert len(app.request_middleware) == 1
assert len(app.response_middleware) == 1
assert app.response_middleware[0].priority == 999 # type: ignore
def test_add_conflicting_priority(app: Sanic):
def foo(*_):
...
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
app.register_middleware(middleware=middleware, priority=999)
assert app.request_middleware[0].priority == 999 # type: ignore
middleware.priority == 998
def test_add_conflicting_priority_named(app: Sanic):
def foo(*_):
...
middleware = Middleware(foo, MiddlewareLocation.REQUEST, priority=998)
app.register_named_middleware(
middleware=middleware, route_names=["foo"], priority=999
)
assert app.named_request_middleware["foo"][0].priority == 999 # type: ignore
middleware.priority == 998
@pytest.mark.parametrize(
"expected,priorities",
PRIORITY_TEST_CASES,

View File

@@ -3,6 +3,7 @@ import multiprocessing
import pickle
import random
import signal
import sys
from asyncio import sleep
@@ -11,6 +12,7 @@ import pytest
from sanic_testing.testing import HOST, PORT
from sanic import Blueprint, text
from sanic.compat import use_context
from sanic.log import logger
from sanic.server.socket import configure_socket
@@ -20,6 +22,10 @@ from sanic.server.socket import configure_socket
reason="SIGALRM is not implemented for this platform, we have to come "
"up with another timeout strategy to test these",
)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_multiprocessing(app):
"""Tests that the number of children we produce is correct"""
# Selects a number at random so we can spot check
@@ -37,7 +43,8 @@ def test_multiprocessing(app):
signal.signal(signal.SIGALRM, stop_on_alarm)
signal.alarm(2)
app.run(HOST, 4120, workers=num_workers, debug=True)
with use_context("fork"):
app.run(HOST, 4120, workers=num_workers, debug=True)
assert len(process_list) == num_workers + 1
@@ -136,6 +143,10 @@ def test_multiprocessing_legacy_unix(app):
not hasattr(signal, "SIGALRM"),
reason="SIGALRM is not implemented for this platform",
)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_multiprocessing_with_blueprint(app):
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
@@ -155,7 +166,8 @@ def test_multiprocessing_with_blueprint(app):
bp = Blueprint("test_text")
app.blueprint(bp)
app.run(HOST, 4121, workers=num_workers, debug=True)
with use_context("fork"):
app.run(HOST, 4121, workers=num_workers, debug=True)
assert len(process_list) == num_workers + 1
@@ -213,6 +225,10 @@ def test_pickle_app_with_static(app, protocol):
up_p_app.run(single_process=True)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_main_process_event(app, caplog):
# Selects a number at random so we can spot check
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
@@ -235,8 +251,9 @@ def test_main_process_event(app, caplog):
def main_process_stop2(app, loop):
logger.info("main_process_stop")
with caplog.at_level(logging.INFO):
app.run(HOST, PORT, workers=num_workers)
with use_context("fork"):
with caplog.at_level(logging.INFO):
app.run(HOST, PORT, workers=num_workers)
assert (
caplog.record_tuples.count(("sanic.root", 20, "main_process_start"))

View File

@@ -1,8 +1,5 @@
import asyncio
from contextlib import closing
from socket import socket
import pytest
from sanic import Sanic
@@ -623,6 +620,4 @@ def test_streaming_echo():
res = await read_chunk()
assert res == None
# Use random port for tests
with closing(socket()) as sock:
app.run(access_log=False)
app.run(access_log=False, single_process=True)

View File

@@ -1293,6 +1293,24 @@ async def test_request_string_representation_asgi(app):
"------sanic--\r\n",
"filename_\u00A0_test",
),
# Umlaut using NFC normalization (Windows, Linux, Android)
(
"------sanic\r\n"
'content-disposition: form-data; filename*="utf-8\'\'filename_%C3%A4_test"; name="test"\r\n'
"\r\n"
"OK\r\n"
"------sanic--\r\n",
"filename_\u00E4_test",
),
# Umlaut using NFD normalization (MacOS client)
(
"------sanic\r\n"
'content-disposition: form-data; filename*="utf-8\'\'filename_a%CC%88_test"; name="test"\r\n'
"\r\n"
"OK\r\n"
"------sanic--\r\n",
"filename_\u00E4_test", # Sanic should normalize to NFC
),
],
)
def test_request_multipart_files(app, payload, filename):

215
tests/test_response_json.py Normal file
View File

@@ -0,0 +1,215 @@
import json
from functools import partial
from unittest.mock import Mock
import pytest
from sanic import Request, Sanic
from sanic.exceptions import SanicException
from sanic.response import json as json_response
from sanic.response.types import JSONResponse
JSON_BODY = {"ok": True}
json_dumps = partial(json.dumps, separators=(",", ":"))
@pytest.fixture
def json_app(app: Sanic):
@app.get("/json")
async def handle(request: Request):
return json_response(JSON_BODY)
return app
def test_body_can_be_retrieved(json_app: Sanic):
_, resp = json_app.test_client.get("/json")
assert resp.body == json_dumps(JSON_BODY).encode()
def test_body_can_be_set(json_app: Sanic):
new_body = b'{"hello":"world"}'
@json_app.on_response
def set_body(request: Request, response: JSONResponse):
response.body = new_body
_, resp = json_app.test_client.get("/json")
assert resp.body == new_body
def test_raw_body_can_be_retrieved(json_app: Sanic):
@json_app.on_response
def check_body(request: Request, response: JSONResponse):
assert response.raw_body == JSON_BODY
json_app.test_client.get("/json")
def test_raw_body_can_be_set(json_app: Sanic):
new_body = {"hello": "world"}
@json_app.on_response
def set_body(request: Request, response: JSONResponse):
response.raw_body = new_body
assert response.raw_body == new_body
assert response.body == json_dumps(new_body).encode()
json_app.test_client.get("/json")
def test_raw_body_cant_be_retrieved_after_body_set(json_app: Sanic):
new_body = b'{"hello":"world"}'
@json_app.on_response
def check_raw_body(request: Request, response: JSONResponse):
response.body = new_body
with pytest.raises(SanicException):
response.raw_body
json_app.test_client.get("/json")
def test_raw_body_can_be_reset_after_body_set(json_app: Sanic):
new_body = b'{"hello":"world"}'
new_new_body = {"lorem": "ipsum"}
@json_app.on_response
def set_bodies(request: Request, response: JSONResponse):
response.body = new_body
response.raw_body = new_new_body
_, resp = json_app.test_client.get("/json")
assert resp.body == json_dumps(new_new_body).encode()
def test_set_body_method(json_app: Sanic):
new_body = {"lorem": "ipsum"}
@json_app.on_response
def set_body(request: Request, response: JSONResponse):
response.set_body(new_body)
_, resp = json_app.test_client.get("/json")
assert resp.body == json_dumps(new_body).encode()
def test_set_body_method_after_body_set(json_app: Sanic):
new_body = b'{"hello":"world"}'
new_new_body = {"lorem": "ipsum"}
@json_app.on_response
def set_body(request: Request, response: JSONResponse):
response.body = new_body
response.set_body(new_new_body)
_, resp = json_app.test_client.get("/json")
assert resp.body == json_dumps(new_new_body).encode()
def test_custom_dumps_and_kwargs(json_app: Sanic):
custom_dumps = Mock(return_value="custom")
@json_app.get("/json-custom")
async def handle_custom(request: Request):
return json_response(JSON_BODY, dumps=custom_dumps, prry="platypus")
_, resp = json_app.test_client.get("/json-custom")
assert resp.body == "custom".encode()
custom_dumps.assert_called_once_with(JSON_BODY, prry="platypus")
def test_override_dumps_and_kwargs(json_app: Sanic):
custom_dumps_1 = Mock(return_value="custom1")
custom_dumps_2 = Mock(return_value="custom2")
@json_app.get("/json-custom")
async def handle_custom(request: Request):
return json_response(JSON_BODY, dumps=custom_dumps_1, prry="platypus")
@json_app.on_response
def set_body(request: Request, response: JSONResponse):
response.set_body(JSON_BODY, dumps=custom_dumps_2, platypus="prry")
_, resp = json_app.test_client.get("/json-custom")
assert resp.body == "custom2".encode()
custom_dumps_1.assert_called_once_with(JSON_BODY, prry="platypus")
custom_dumps_2.assert_called_once_with(JSON_BODY, platypus="prry")
def test_append(json_app: Sanic):
@json_app.get("/json-append")
async def handler_append(request: Request):
return json_response(["a", "b"], status=200)
@json_app.on_response
def do_append(request: Request, response: JSONResponse):
response.append("c")
_, resp = json_app.test_client.get("/json-append")
assert resp.body == json_dumps(["a", "b", "c"]).encode()
def test_extend(json_app: Sanic):
@json_app.get("/json-extend")
async def handler_extend(request: Request):
return json_response(["a", "b"], status=200)
@json_app.on_response
def do_extend(request: Request, response: JSONResponse):
response.extend(["c", "d"])
_, resp = json_app.test_client.get("/json-extend")
assert resp.body == json_dumps(["a", "b", "c", "d"]).encode()
def test_update(json_app: Sanic):
@json_app.get("/json-update")
async def handler_update(request: Request):
return json_response({"a": "b"}, status=200)
@json_app.on_response
def do_update(request: Request, response: JSONResponse):
response.update({"c": "d"}, e="f")
_, resp = json_app.test_client.get("/json-update")
assert resp.body == json_dumps({"a": "b", "c": "d", "e": "f"}).encode()
def test_pop_dict(json_app: Sanic):
@json_app.get("/json-pop")
async def handler_pop(request: Request):
return json_response({"a": "b", "c": "d"}, status=200)
@json_app.on_response
def do_pop(request: Request, response: JSONResponse):
val = response.pop("c")
assert val == "d"
val_default = response.pop("e", "f")
assert val_default == "f"
_, resp = json_app.test_client.get("/json-pop")
assert resp.body == json_dumps({"a": "b"}).encode()
def test_pop_list(json_app: Sanic):
@json_app.get("/json-pop")
async def handler_pop(request: Request):
return json_response(["a", "b"], status=200)
@json_app.on_response
def do_pop(request: Request, response: JSONResponse):
val = response.pop(0)
assert val == "a"
with pytest.raises(
TypeError, match="pop doesn't accept a default argument for lists"
):
response.pop(21, "nah nah")
_, resp = json_app.test_client.get("/json-pop")
assert resp.body == json_dumps(["b"]).encode()

View File

@@ -803,6 +803,21 @@ def test_static_add_route(app, strict_slashes):
assert response.text == "OK2"
@pytest.mark.parametrize("unquote", [True, False, None])
def test_unquote_add_route(app, unquote):
async def handler1(_, foo):
return text(foo)
app.add_route(handler1, "/<foo>", unquote=unquote)
value = "" if unquote else r"%E5%95%8A"
_, response = app.test_client.get("/啊")
assert response.text == value
_, response = app.test_client.get(r"/%E5%95%8A")
assert response.text == value
def test_dynamic_add_route(app):
results = []

View File

@@ -7,7 +7,7 @@ import pytest
from sanic_routing.exceptions import NotFound
from sanic import Blueprint
from sanic import Blueprint, Sanic, empty
from sanic.exceptions import InvalidSignal, SanicException
@@ -20,6 +20,31 @@ def test_add_signal(app):
assert len(app.signal_router.routes) == 1
def test_add_signal_method_handler(app):
counter = 0
class TestSanic(Sanic):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_signal(
self.after_routing_signal_handler, "http.routing.after"
)
def after_routing_signal_handler(self, *args, **kwargs):
nonlocal counter
counter += 1
app = TestSanic("Test")
assert len(app.signal_router.routes) == 1
@app.route("/")
async def handler(_):
return empty()
app.test_client.get("/")
assert counter == 1
def test_add_signal_decorator(app):
@app.signal("foo.bar.baz")
def sync_signal(*_):
@@ -289,10 +314,10 @@ async def test_dispatch_signal_triggers_event_on_bp(app):
waiter = bp.event("foo.bar.baz")
assert isawaitable(waiter)
fut = asyncio.ensure_future(do_wait())
fut = do_wait()
for signal in signal_group:
signal.ctx.event.set()
await fut
await asyncio.gather(fut)
assert bp_counter == 1

View File

@@ -2,8 +2,10 @@ import logging
import os
import ssl
import subprocess
import sys
from contextlib import contextmanager
from multiprocessing import Event
from pathlib import Path
from unittest.mock import Mock, patch
from urllib.parse import urlparse
@@ -16,6 +18,7 @@ import sanic.http.tls.creators
from sanic import Sanic
from sanic.application.constants import Mode
from sanic.compat import use_context
from sanic.constants import LocalCertCreator
from sanic.exceptions import SanicException
from sanic.helpers import _default
@@ -264,6 +267,7 @@ def test_cert_sni_list(app):
assert response.text == "sanic.example"
@pytest.mark.xfail
def test_missing_sni(app):
"""The sanic cert does not list 127.0.0.1 and httpx does not send
IP as SNI anyway."""
@@ -282,6 +286,7 @@ def test_missing_sni(app):
assert "Request and response object expected" in str(exc.value)
@pytest.mark.xfail
def test_no_matching_cert(app):
"""The sanic cert does not list 127.0.0.1 and httpx does not send
IP as SNI anyway."""
@@ -301,6 +306,7 @@ def test_no_matching_cert(app):
assert "Request and response object expected" in str(exc.value)
@pytest.mark.xfail
def test_wildcards(app):
ssl_list = [None, localhost_dir, sanic_dir]
@@ -422,7 +428,12 @@ def test_logger_vhosts(caplog):
app.stop()
with caplog.at_level(logging.INFO):
app.run(host="127.0.0.1", port=42102, ssl=[localhost_dir, sanic_dir])
app.run(
host="127.0.0.1",
port=42102,
ssl=[localhost_dir, sanic_dir],
single_process=True,
)
logmsg = [
m for s, l, m in caplog.record_tuples if m.startswith("Certificate")
@@ -636,3 +647,34 @@ def test_sanic_ssl_context_create():
assert sanic_context is context
assert isinstance(sanic_context, SanicSSLContext)
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_ssl_in_multiprocess_mode(app: Sanic, caplog):
ssl_dict = {"cert": localhost_cert, "key": localhost_key}
event = Event()
@app.main_process_start
async def main_start(app: Sanic):
app.shared_ctx.event = event
@app.after_server_start
async def shutdown(app):
app.shared_ctx.event.set()
app.stop()
assert not event.is_set()
with use_context("fork"):
with caplog.at_level(logging.INFO):
app.run(ssl=ssl_dict)
assert event.is_set()
assert (
"sanic.root",
logging.INFO,
"Goin' Fast @ https://127.0.0.1:8000",
) in caplog.record_tuples

View File

@@ -1,8 +1,9 @@
# import asyncio
import logging
import os
import sys
from asyncio import AbstractEventLoop
from asyncio import AbstractEventLoop, sleep
from string import ascii_lowercase
import httpcore
@@ -12,6 +13,7 @@ import pytest
from pytest import LogCaptureFixture
from sanic import Sanic
from sanic.compat import use_context
from sanic.request import Request
from sanic.response import text
@@ -174,19 +176,27 @@ def handler(request: Request):
async def client(app: Sanic, loop: AbstractEventLoop):
try:
async with httpx.AsyncClient(uds=SOCKPATH) as client:
transport = httpx.AsyncHTTPTransport(uds=SOCKPATH)
async with httpx.AsyncClient(transport=transport) as client:
r = await client.get("http://myhost.invalid/")
assert r.status_code == 200
assert r.text == os.path.abspath(SOCKPATH)
finally:
await sleep(0.2)
app.stop()
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_unix_connection_multiple_workers():
app_multi = Sanic(name="test")
app_multi.get("/")(handler)
app_multi.listener("after_server_start")(client)
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
with use_context("fork"):
app_multi = Sanic(name="test")
app_multi.get("/")(handler)
app_multi.listener("after_server_start")(client)
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
# @pytest.mark.xfail(

View File

@@ -1,14 +1,20 @@
import json
try: # no cov
from ujson import dumps
except ModuleNotFoundError: # no cov
from json import dumps # type: ignore
from datetime import datetime
from logging import ERROR, INFO
from socket import AF_INET, SOCK_STREAM, timeout
from unittest.mock import Mock, patch
from urllib.error import URLError
import pytest
from sanic_testing import TestManager
from sanic.cli.inspector_client import InspectorClient
from sanic.helpers import Default
from sanic.log import Colors
from sanic.worker.inspector import Inspector, inspect
from sanic.worker.inspector import Inspector
DATA = {
@@ -20,121 +26,90 @@ DATA = {
},
"workers": {"Worker-Name": {"some": "state"}},
}
SERIALIZED = json.dumps(DATA)
FULL_SERIALIZED = dumps({"result": DATA})
OUT_SERIALIZED = dumps(DATA)
def test_inspector_stop():
inspector = Inspector(Mock(), {}, {}, "", 1)
assert inspector.run is True
inspector.stop()
assert inspector.run is False
class FooInspector(Inspector):
async def foo(self, bar):
return f"bar is {bar}"
@patch("sanic.worker.inspector.sys.stdout.write")
@patch("sanic.worker.inspector.socket")
@pytest.mark.parametrize("command", ("foo", "raw", "pretty"))
def test_send_inspect(socket: Mock, write: Mock, command: str):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.recv.return_value = SERIALIZED.encode()
inspect("localhost", 9999, command)
socket.sendall.assert_called_once_with(command.encode())
socket.recv.assert_called_once_with(4096)
socket.connect.assert_called_once_with(("localhost", 9999))
socket.assert_called_once_with(AF_INET, SOCK_STREAM)
if command == "raw":
write.assert_called_once_with(SERIALIZED)
elif command == "pretty":
write.assert_called()
else:
write.assert_not_called()
@pytest.fixture
def publisher():
publisher = Mock()
return publisher
@patch("sanic.worker.inspector.sys")
@patch("sanic.worker.inspector.socket")
def test_send_inspect_conn_refused(socket: Mock, sys: Mock, caplog):
with caplog.at_level(INFO):
socket.return_value = socket
socket.__enter__.return_value = socket
socket.connect.side_effect = ConnectionRefusedError()
inspect("localhost", 9999, "foo")
@pytest.fixture
def inspector(publisher):
inspector = FooInspector(
publisher, {}, {}, "localhost", 9999, "", Default(), Default()
)
inspector(False)
return inspector
socket.close.assert_called_once()
sys.exit.assert_called_once_with(1)
@pytest.fixture
def http_client(inspector):
manager = TestManager(inspector.app)
return manager.test_client
@pytest.mark.parametrize("command", ("info",))
@patch("sanic.cli.inspector_client.sys.stdout.write")
def test_send_inspect(write, urlopen, command: str):
urlopen.read.return_value = FULL_SERIALIZED.encode()
InspectorClient("localhost", 9999, False, False, None).do(command)
write.assert_called()
write.reset_mock()
InspectorClient("localhost", 9999, False, True, None).do(command)
write.assert_called_with(OUT_SERIALIZED + "\n")
@patch("sanic.cli.inspector_client.sys")
def test_send_inspect_conn_refused(sys: Mock, urlopen):
urlopen.side_effect = URLError("")
InspectorClient("localhost", 9999, False, False, None).do("info")
message = (
f"{Colors.RED}Could not connect to inspector at: "
f"{Colors.YELLOW}('localhost', 9999){Colors.END}\n"
f"{Colors.YELLOW}http://localhost:9999{Colors.END}\n"
"Either the application is not running, or it did not start "
"an inspector instance."
"an inspector instance.\n<urlopen error >\n"
)
assert ("sanic.error", ERROR, message) in caplog.record_tuples
sys.exit.assert_called_once_with(1)
sys.stderr.write.assert_called_once_with(message)
@patch("sanic.worker.inspector.configure_socket")
@pytest.mark.parametrize("action", (b"reload", b"shutdown", b"foo"))
def test_run_inspector(configure_socket: Mock, action: bytes):
sock = Mock()
conn = Mock()
conn.recv.return_value = action
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def accept():
inspector.run = False
return conn, ...
sock.accept = accept
inspector()
configure_socket.assert_called_once_with(
{"host": "localhost", "port": 9999, "unix": None, "backlog": 1}
)
conn.recv.assert_called_with(64)
if action == b"reload":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
elif action == b"shutdown":
conn.send.assert_called_with(b"\n")
inspector.reload.assert_not_called()
inspector.shutdown.assert_called()
inspector.state_to_json.assert_not_called()
else:
conn.send.assert_called_with(b'"foo"')
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_called()
def test_run_inspector_reload(publisher, http_client):
_, response = http_client.post("/reload")
assert response.status == 200
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
@patch("sanic.worker.inspector.configure_socket")
def test_accept_timeout(configure_socket: Mock):
sock = Mock()
configure_socket.return_value = sock
inspector = Inspector(Mock(), {}, {}, "localhost", 9999)
inspector.reload = Mock() # type: ignore
inspector.shutdown = Mock() # type: ignore
inspector.state_to_json = Mock(return_value="foo") # type: ignore
def test_run_inspector_reload_zero_downtime(publisher, http_client):
_, response = http_client.post("/reload", json={"zero_downtime": True})
assert response.status == 200
publisher.send.assert_called_once_with("__ALL_PROCESSES__::STARTUP_FIRST")
def accept():
inspector.run = False
raise timeout
sock.accept = accept
def test_run_inspector_shutdown(publisher, http_client):
_, response = http_client.post("/shutdown")
assert response.status == 200
publisher.send.assert_called_once_with("__TERMINATE__")
inspector()
inspector.reload.assert_not_called()
inspector.shutdown.assert_not_called()
inspector.state_to_json.assert_not_called()
def test_run_inspector_scale(publisher, http_client):
_, response = http_client.post("/scale", json={"replicas": 4})
assert response.status == 200
publisher.send.assert_called_once_with("__SCALE__:4")
def test_run_inspector_arbitrary(http_client):
_, response = http_client.post("/foo", json={"bar": 99})
assert response.status == 200
assert response.json == {"meta": {"action": "foo"}, "result": "bar is 99"}
def test_state_to_json():
@@ -142,8 +117,10 @@ def test_state_to_json():
now_iso = now.isoformat()
app_info = {"app": "hello"}
worker_state = {"Test": {"now": now, "nested": {"foo": now}}}
inspector = Inspector(Mock(), app_info, worker_state, "", 0)
state = inspector.state_to_json()
inspector = Inspector(
Mock(), app_info, worker_state, "", 0, "", Default(), Default()
)
state = inspector._state_to_json()
assert state == {
"info": app_info,
@@ -151,17 +128,14 @@ def test_state_to_json():
}
def test_reload():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.reload()
publisher.send.assert_called_once_with("__ALL_PROCESSES__:")
def test_shutdown():
publisher = Mock()
inspector = Inspector(publisher, {}, {}, "", 0)
inspector.shutdown()
publisher.send.assert_called_once_with("__TERMINATE__")
def test_run_inspector_authentication():
inspector = Inspector(
Mock(), {}, {}, "", 0, "super-secret", Default(), Default()
)(False)
manager = TestManager(inspector.app)
_, response = manager.test_client.get("/")
assert response.status == 401
_, response = manager.test_client.get(
"/", headers={"Authorization": "Bearer super-secret"}
)
assert response.status == 200

View File

@@ -86,6 +86,10 @@ def test_input_is_module():
@patch("sanic.worker.loader.TrustmeCreator")
@patch("sanic.worker.loader.MkcertCreator")
def test_cert_loader(MkcertCreator: Mock, TrustmeCreator: Mock, creator: str):
CertLoader._creators = {
"mkcert": MkcertCreator,
"trustme": TrustmeCreator,
}
MkcertCreator.return_value = MkcertCreator
TrustmeCreator.return_value = TrustmeCreator
data = {

View File

@@ -1,11 +1,21 @@
from signal import SIGINT, SIGKILL
from logging import ERROR, INFO
from signal import SIGINT
from unittest.mock import Mock, call, patch
import pytest
from sanic.compat import OS_IS_WINDOWS
from sanic.exceptions import ServerKilled
from sanic.worker.constants import RestartOrder
from sanic.worker.manager import WorkerManager
if not OS_IS_WINDOWS:
from signal import SIGKILL
else:
SIGKILL = SIGINT
def fake_serve():
...
@@ -13,14 +23,7 @@ def fake_serve():
def test_manager_no_workers():
message = "Cannot serve with no workers"
with pytest.raises(RuntimeError, match=message):
WorkerManager(
0,
fake_serve,
{},
Mock(),
(Mock(), Mock()),
{},
)
WorkerManager(0, fake_serve, {}, Mock(), (Mock(), Mock()), {})
@patch("sanic.worker.process.os")
@@ -29,17 +32,8 @@ def test_terminate(os_mock: Mock):
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
assert manager.terminated is False
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
manager.terminate()
assert manager.terminated is True
os_mock.kill.assert_called_once_with(1234, SIGINT)
@@ -50,14 +44,7 @@ def test_shutown(os_mock: Mock):
process.is_alive.return_value = True
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
manager.shutdown()
os_mock.kill.assert_called_once_with(1234, SIGINT)
@@ -68,31 +55,36 @@ def test_kill(os_mock: Mock):
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(
1,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager.kill()
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
with pytest.raises(ServerKilled):
manager.kill()
os_mock.kill.assert_called_once_with(1234, SIGKILL)
@patch("sanic.worker.process.os")
@patch("sanic.worker.manager.os")
def test_shutdown_signal_send_kill(
manager_os_mock: Mock, process_os_mock: Mock
):
process = Mock()
process.pid = 1234
context = Mock()
context.Process.return_value = process
manager = WorkerManager(1, fake_serve, {}, context, (Mock(), Mock()), {})
assert manager._shutting_down is False
manager.shutdown_signal(SIGINT, None)
assert manager._shutting_down is True
process_os_mock.kill.assert_called_once_with(1234, SIGINT)
manager.shutdown_signal(SIGINT, None)
manager_os_mock.kill.assert_called_once_with(1234, SIGKILL)
def test_restart_all():
p1 = Mock()
p2 = Mock()
context = Mock()
context.Process.side_effect = [p1, p2, p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), Mock()),
{},
)
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), Mock()), {})
assert len(list(manager.transient_processes))
manager.restart()
p1.terminate.assert_called_once()
@@ -127,91 +119,187 @@ def test_restart_all():
)
def test_monitor_all():
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_all(zero_downtime):
p1 = Mock()
p2 = Mock()
sub = Mock()
sub.recv.side_effect = ["__ALL_PROCESSES__:", ""]
incoming = (
"__ALL_PROCESSES__::STARTUP_FIRST"
if zero_downtime
else "__ALL_PROCESSES__:"
)
sub.recv.side_effect = [incoming, ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=None, reloaded_files=""
process_names=None,
reloaded_files="",
restart_order=restart_order,
)
def test_monitor_all_with_files():
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_all_with_files(zero_downtime):
p1 = Mock()
p2 = Mock()
sub = Mock()
sub.recv.side_effect = ["__ALL_PROCESSES__:foo,bar", ""]
incoming = (
"__ALL_PROCESSES__:foo,bar:STARTUP_FIRST"
if zero_downtime
else "__ALL_PROCESSES__:foo,bar"
)
sub.recv.side_effect = [incoming, ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=None, reloaded_files="foo,bar"
process_names=None,
reloaded_files="foo,bar",
restart_order=restart_order,
)
def test_monitor_one_process():
@pytest.mark.parametrize("zero_downtime", (False, True))
def test_monitor_one_process(zero_downtime):
p1 = Mock()
p1.name = "Testing"
p2 = Mock()
sub = Mock()
sub.recv.side_effect = [f"{p1.name}:foo,bar", ""]
incoming = (
f"{p1.name}:foo,bar:STARTUP_FIRST"
if zero_downtime
else f"{p1.name}:foo,bar"
)
sub.recv.side_effect = [incoming, ""]
context = Mock()
context.Process.side_effect = [p1, p2]
manager = WorkerManager(
2,
fake_serve,
{},
context,
(Mock(), sub),
{},
)
manager = WorkerManager(2, fake_serve, {}, context, (Mock(), sub), {})
manager.restart = Mock() # type: ignore
manager.wait_for_ack = Mock() # type: ignore
manager.monitor()
restart_order = (
RestartOrder.STARTUP_FIRST
if zero_downtime
else RestartOrder.SHUTDOWN_FIRST
)
manager.restart.assert_called_once_with(
process_names=[p1.name], reloaded_files="foo,bar"
process_names=[p1.name],
reloaded_files="foo,bar",
restart_order=restart_order,
)
def test_shutdown_signal():
pub = Mock()
manager = WorkerManager(
1,
fake_serve,
{},
Mock(),
(pub, Mock()),
{},
)
manager = WorkerManager(1, fake_serve, {}, Mock(), (pub, Mock()), {})
manager.shutdown = Mock() # type: ignore
manager.shutdown_signal(SIGINT, None)
pub.send.assert_called_with(None)
manager.shutdown.assert_called_once_with()
def test_shutdown_servers(caplog):
p1 = Mock()
p1.pid = 1234
context = Mock()
context.Process.side_effect = [p1]
pub = Mock()
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
with patch("os.kill") as kill:
with caplog.at_level(ERROR):
manager.shutdown_server()
kill.assert_called_once_with(1234, SIGINT)
kill.reset_mock()
assert not caplog.record_tuples
manager.shutdown_server()
kill.assert_not_called()
assert (
"sanic.error",
ERROR,
"Server shutdown failed because a server was not found.",
) in caplog.record_tuples
def test_shutdown_servers_named():
p1 = Mock()
p1.pid = 1234
p2 = Mock()
p2.pid = 6543
context = Mock()
context.Process.side_effect = [p1, p2]
pub = Mock()
manager = WorkerManager(2, fake_serve, {}, context, (pub, Mock()), {})
with patch("os.kill") as kill:
with pytest.raises(KeyError):
manager.shutdown_server("foo")
manager.shutdown_server("Server-1")
kill.assert_called_once_with(6543, SIGINT)
def test_scale(caplog):
p1 = Mock()
p1.pid = 1234
p2 = Mock()
p2.pid = 3456
p3 = Mock()
p3.pid = 5678
context = Mock()
context.Process.side_effect = [p1, p2, p3]
pub = Mock()
manager = WorkerManager(1, fake_serve, {}, context, (pub, Mock()), {})
assert len(manager.transient) == 1
manager.scale(3)
assert len(manager.transient) == 3
with patch("os.kill") as kill:
manager.scale(2)
assert len(manager.transient) == 2
manager.scale(1)
assert len(manager.transient) == 1
kill.call_count == 2
with caplog.at_level(INFO):
manager.scale(1)
assert (
"sanic.root",
INFO,
"No change needed. There are already 1 workers.",
) in caplog.record_tuples
with pytest.raises(ValueError, match=r"Cannot scale to 0 workers\."):
manager.scale(0)

View File

@@ -1,11 +1,14 @@
import sys
from multiprocessing import Event
from os import environ, getpid
from typing import Any, Dict
from typing import Any, Dict, Type, Union
from unittest.mock import Mock
import pytest
from sanic import Sanic
from sanic.compat import use_context
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.state import WorkerState
@@ -28,6 +31,10 @@ def m(monitor_publisher, worker_state):
del environ["SANIC_WORKER_NAME"]
@pytest.mark.skipif(
sys.platform not in ("linux", "darwin"),
reason="This test requires fork context",
)
def test_has_multiplexer_default(app: Sanic):
event = Event()
@@ -41,7 +48,8 @@ def test_has_multiplexer_default(app: Sanic):
app.shared_ctx.event.set()
app.stop()
app.run()
with use_context("fork"):
app.run()
assert event.is_set()
@@ -90,17 +98,17 @@ def test_ack(worker_state: Dict[str, Any], m: WorkerMultiplexer):
def test_restart_self(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart()
monitor_publisher.send.assert_called_once_with("Test")
monitor_publisher.send.assert_called_once_with("Test:")
def test_restart_foo(monitor_publisher: Mock, m: WorkerMultiplexer):
m.restart("foo")
monitor_publisher.send.assert_called_once_with("foo")
monitor_publisher.send.assert_called_once_with("foo:")
def test_reload_alias(monitor_publisher: Mock, m: WorkerMultiplexer):
m.reload()
monitor_publisher.send.assert_called_once_with("Test")
monitor_publisher.send.assert_called_once_with("Test:")
def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
@@ -108,6 +116,11 @@ def test_terminate(monitor_publisher: Mock, m: WorkerMultiplexer):
monitor_publisher.send.assert_called_once_with("__TERMINATE__")
def test_scale(monitor_publisher: Mock, m: WorkerMultiplexer):
m.scale(99)
monitor_publisher.send.assert_called_once_with("__SCALE__:99")
def test_properties(
monitor_publisher: Mock, worker_state: Dict[str, Any], m: WorkerMultiplexer
):
@@ -117,3 +130,36 @@ def test_properties(
assert m.workers == worker_state
assert m.state == worker_state["Test"]
assert isinstance(m.state, WorkerState)
@pytest.mark.parametrize(
"params,expected",
(
({}, "Test:"),
({"name": "foo"}, "foo:"),
({"all_workers": True}, "__ALL_PROCESSES__:"),
({"zero_downtime": True}, "Test::STARTUP_FIRST"),
({"name": "foo", "all_workers": True}, ValueError),
({"name": "foo", "zero_downtime": True}, "foo::STARTUP_FIRST"),
(
{"all_workers": True, "zero_downtime": True},
"__ALL_PROCESSES__::STARTUP_FIRST",
),
(
{"name": "foo", "all_workers": True, "zero_downtime": True},
ValueError,
),
),
)
def test_restart_params(
monitor_publisher: Mock,
m: WorkerMultiplexer,
params: Dict[str, Any],
expected: Union[str, Type[Exception]],
):
if isinstance(expected, str):
m.restart(**params)
monitor_publisher.send.assert_called_once_with(expected)
else:
with pytest.raises(expected):
m.restart(**params)

View File

@@ -1,13 +1,19 @@
import re
import signal
import threading
from asyncio import Event
from logging import DEBUG
from pathlib import Path
from time import sleep
from unittest.mock import Mock
import pytest
from sanic.app import Sanic
from sanic.worker.constants import ProcessState, RestartOrder
from sanic.worker.loader import AppLoader
from sanic.worker.process import WorkerProcess
from sanic.worker.reloader import Reloader
@@ -67,6 +73,88 @@ def test_iter_files():
assert len_total_files == len_python_files + len_static_files
@pytest.mark.parametrize(
"order,expected",
(
(
RestartOrder.SHUTDOWN_FIRST,
[
"Restarting a process",
"Begin restart termination",
"Starting a process",
],
),
(
RestartOrder.STARTUP_FIRST,
[
"Restarting a process",
"Starting a process",
"Begin restart termination",
"Waiting for process to be acked",
"Process acked. Terminating",
],
),
),
)
def test_default_reload_shutdown_order(monkeypatch, caplog, order, expected):
current_process = Mock()
worker_process = WorkerProcess(
lambda **_: current_process,
"Test",
lambda **_: ...,
{},
{},
)
def start(self):
worker_process.set_state(ProcessState.ACKED)
self._target()
orig = threading.Thread.start
monkeypatch.setattr(threading.Thread, "start", start)
with caplog.at_level(DEBUG):
worker_process.restart(restart_order=order)
ansi = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
def clean(msg: str):
msg, _ = ansi.sub("", msg).split(":", 1)
return msg
debug = [clean(record[2]) for record in caplog.record_tuples]
assert debug == expected
current_process.start.assert_called_once()
current_process.terminate.assert_called_once()
monkeypatch.setattr(threading.Thread, "start", orig)
def test_reload_delayed(monkeypatch):
WorkerProcess.THRESHOLD = 1
current_process = Mock()
worker_process = WorkerProcess(
lambda **_: current_process,
"Test",
lambda **_: ...,
{},
{},
)
def start(self):
sleep(0.2)
self._target()
orig = threading.Thread.start
monkeypatch.setattr(threading.Thread, "start", start)
message = "Worker Test failed to come ack within 0.1 seconds"
with pytest.raises(TimeoutError, match=message):
worker_process.restart(restart_order=RestartOrder.STARTUP_FIRST)
monkeypatch.setattr(threading.Thread, "start", orig)
def test_reloader_triggers_start_stop_listeners(
app: Sanic, app_loader: AppLoader
):

View File

@@ -0,0 +1,25 @@
from unittest.mock import patch
import pytest
from sanic import Sanic
@pytest.mark.parametrize(
"start_method,platform,expected",
(
(None, "linux", "spawn"),
(None, "other", "spawn"),
("fork", "linux", "fork"),
("fork", "other", "fork"),
("forkserver", "linux", "forkserver"),
("forkserver", "other", "forkserver"),
("spawn", "linux", "spawn"),
("spawn", "other", "spawn"),
),
)
def test_get_context(start_method, platform, expected):
if start_method:
Sanic.start_method = start_method
with patch("sys.platform", platform):
assert Sanic._get_startup_method() == expected

View File

@@ -1,3 +1,5 @@
import logging
from os import environ
from unittest.mock import Mock, patch
@@ -6,6 +8,7 @@ import pytest
from sanic.app import Sanic
from sanic.worker.loader import AppLoader
from sanic.worker.multiplexer import WorkerMultiplexer
from sanic.worker.process import Worker, WorkerProcess
from sanic.worker.serve import worker_serve
@@ -37,22 +40,30 @@ def test_config_app(mock_app: Mock):
mock_app.update_config.assert_called_once_with({"FOO": "BAR"})
def test_bad_process(mock_app: Mock):
environ["SANIC_WORKER_NAME"] = "FOO"
def test_bad_process(mock_app: Mock, caplog):
environ["SANIC_WORKER_NAME"] = (
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL + "-FOO"
)
message = "No restart publisher found in worker process"
with pytest.raises(RuntimeError, match=message):
worker_serve(**args(mock_app))
message = "No worker state found in worker process"
with pytest.raises(RuntimeError, match=message):
worker_serve(**args(mock_app, monitor_publisher=Mock()))
publisher = Mock()
with caplog.at_level(logging.ERROR):
worker_serve(**args(mock_app, monitor_publisher=publisher))
assert ("sanic.error", logging.ERROR, message) in caplog.record_tuples
publisher.send.assert_called_once_with("__TERMINATE_EARLY__")
del environ["SANIC_WORKER_NAME"]
def test_has_multiplexer(app: Sanic):
environ["SANIC_WORKER_NAME"] = "FOO"
environ["SANIC_WORKER_NAME"] = (
Worker.WORKER_PREFIX + WorkerProcess.SERVER_LABEL + "-FOO"
)
Sanic.register_app(app)
with patch("sanic.worker.serve._serve_http_1"):
@@ -91,12 +102,13 @@ def test_serve_app_factory(wm: Mock, mock_app):
@patch("sanic.mixins.startup.WorkerManager")
@patch("sanic.mixins.startup.Inspector")
@pytest.mark.parametrize("config", (True, False))
def test_serve_with_inspector(
Inspector: Mock, WorkerManager: Mock, mock_app: Mock, config: bool
WorkerManager: Mock, mock_app: Mock, config: bool
):
Inspector = Mock()
mock_app.config.INSPECTOR = config
mock_app.inspector_class = Inspector
inspector = Mock()
Inspector.return_value = inspector
WorkerManager.return_value = WorkerManager

View File

@@ -1,11 +1,11 @@
[tox]
envlist = py37, py38, py39, py310, pyNightly, pypy37, {py37,py38,py39,py310,pyNightly,pypy37}-no-ext, lint, check, security, docs, type-checking
envlist = py37, py38, py39, py310, py311, pyNightly, pypy37, {py37,py38,py39,py310,py311,pyNightly,pypy37}-no-ext, lint, check, security, docs, type-checking
[testenv]
usedevelop = true
setenv =
{py37,py38,py39,py310,pyNightly}-no-ext: SANIC_NO_UJSON=1
{py37,py38,py39,py310,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
{py37,py38,py39,py310,py311,pyNightly}-no-ext: SANIC_NO_UJSON=1
{py37,py38,py39,py310,py311,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
extras = test, http3
deps =
httpx==0.23