Compare commits
67 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
008b8ac394 | ||
|
|
0cfd7b528b | ||
|
|
35786b4b74 | ||
|
|
c7430d805a | ||
|
|
a62c84a954 | ||
|
|
4aba74d050 | ||
|
|
ab2cb88cf4 | ||
|
|
938c49b899 | ||
|
|
761eef7d96 | ||
|
|
83511a0ba7 | ||
|
|
cf9ccdae47 | ||
|
|
d81096fdc0 | ||
|
|
6c8e20a859 | ||
|
|
6239fa4f56 | ||
|
|
1b324ae981 | ||
|
|
bedf68a9b2 | ||
|
|
496e87e4ba | ||
|
|
fa4f85eb32 | ||
|
|
1b1dfedc74 | ||
|
|
230941ff4f | ||
|
|
4658e0f2f3 | ||
|
|
7c3c532dae | ||
|
|
7c04c9a227 | ||
|
|
44973125c1 | ||
|
|
6aaccd1e8b | ||
|
|
e7001b0074 | ||
|
|
aacbd022cf | ||
|
|
ae1874ce34 | ||
|
|
8abba597a8 | ||
|
|
9987893963 | ||
|
|
638322d905 | ||
|
|
ae40f960ff | ||
|
|
d969fdc19f | ||
|
|
710024125e | ||
|
|
9a39aff803 | ||
|
|
78e912ea45 | ||
|
|
aa6ea5b5a0 | ||
|
|
48800e657f | ||
|
|
120f0262f7 | ||
|
|
4db075ffc1 | ||
|
|
60b4efad67 | ||
|
|
319388d78b | ||
|
|
ce71514d71 | ||
|
|
7833d70d9e | ||
|
|
16961fab9d | ||
|
|
861e87347a | ||
|
|
91f6abaa81 | ||
|
|
d380b52f9a | ||
|
|
d656a06a19 | ||
|
|
258dbee3b9 | ||
|
|
6b9287b076 | ||
|
|
dac0514441 | ||
|
|
bffdb3b5c2 | ||
|
|
e908ca8cef | ||
|
|
801595e24a | ||
|
|
ba9b432993 | ||
|
|
b565072ed9 | ||
|
|
caa1b4d69b | ||
|
|
865536c5c4 | ||
|
|
784d5cce52 | ||
|
|
0fd08c6114 | ||
|
|
cd779b6e4f | ||
|
|
3430907046 | ||
|
|
2f776eba85 | ||
|
|
b9cd2ed1f1 | ||
|
|
3411a12c40 | ||
|
|
28899356c8 |
@@ -12,6 +12,11 @@ environment:
|
||||
PYTHON_VERSION: "3.7.x"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
- TOXENV: py38-no-ext
|
||||
PYTHON: "C:\\Python38-x64"
|
||||
PYTHON_VERSION: "3.8.x"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
init: SET "PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
install:
|
||||
|
||||
27
.travis.yml
27
.travis.yml
@@ -21,23 +21,46 @@ matrix:
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.7 without Extensions"
|
||||
- env: TOX_ENV=py38
|
||||
python: 3.8
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.8 with Extensions"
|
||||
- env: TOX_ENV=py38-no-ext
|
||||
python: 3.8
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.8 without Extensions"
|
||||
- env: TOX_ENV=type-checking
|
||||
python: 3.6
|
||||
name: "Python 3.6 Type checks"
|
||||
- env: TOX_ENV=type-checking
|
||||
python: 3.7
|
||||
name: "Python 3.7 Type checks"
|
||||
- env: TOX_ENV=type-checking
|
||||
python: 3.8
|
||||
name: "Python 3.8 Type checks"
|
||||
- env: TOX_ENV=lint
|
||||
python: 3.6
|
||||
name: "Python 3.6 Linter checks"
|
||||
- env: TOX_ENV=check
|
||||
python: 3.6
|
||||
name: "Python 3.6 Package checks"
|
||||
- env: TOX_ENV=security
|
||||
python: 3.6
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.6 Bandit security scan"
|
||||
- env: TOX_ENV=security
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.7 Bandit security scan"
|
||||
- env: TOX_ENV=security
|
||||
python: 3.8
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.8 Bandit security scan"
|
||||
- env: TOX_ENV=docs
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
@@ -48,14 +71,14 @@ matrix:
|
||||
name: "Python nightly with Extensions"
|
||||
- env: TOX_ENV=pyNightly-no-ext
|
||||
python: 'nightly'
|
||||
name: "Python nightly Extensions"
|
||||
name: "Python nightly without Extensions"
|
||||
allow_failures:
|
||||
- env: TOX_ENV=pyNightly
|
||||
python: 'nightly'
|
||||
name: "Python nightly with Extensions"
|
||||
- env: TOX_ENV=pyNightly-no-ext
|
||||
python: 'nightly'
|
||||
name: "Python nightly Extensions"
|
||||
name: "Python nightly without Extensions"
|
||||
install:
|
||||
- pip install -U tox
|
||||
- pip install codecov
|
||||
|
||||
237
CHANGELOG.rst
237
CHANGELOG.rst
@@ -1,3 +1,238 @@
|
||||
Version 20.6.3
|
||||
===============
|
||||
|
||||
Bugfixes
|
||||
********
|
||||
|
||||
*
|
||||
`#1884 <https://github.com/huge-success/sanic/pull/1884>`_
|
||||
Revert change to multiprocessing mode
|
||||
|
||||
|
||||
Version 20.6.2
|
||||
===============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
*
|
||||
`#1641 <https://github.com/huge-success/sanic/pull/1641>`_
|
||||
Socket binding implemented properly for IPv6 and UNIX sockets
|
||||
|
||||
|
||||
Version 20.6.1
|
||||
===============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
*
|
||||
`#1760 <https://github.com/huge-success/sanic/pull/1760>`_
|
||||
Add version parameter to websocket routes
|
||||
|
||||
*
|
||||
`#1866 <https://github.com/huge-success/sanic/pull/1866>`_
|
||||
Add ``sanic`` as an entry point command
|
||||
|
||||
*
|
||||
`#1880 <https://github.com/huge-success/sanic/pull/1880>`_
|
||||
Add handler names for websockets for url_for usage
|
||||
|
||||
Bugfixes
|
||||
********
|
||||
|
||||
*
|
||||
`#1776 <https://github.com/huge-success/sanic/pull/1776>`_
|
||||
Bug fix for host parameter issue with lists
|
||||
|
||||
*
|
||||
`#1842 <https://github.com/huge-success/sanic/pull/1842>`_
|
||||
Fix static _handler pickling error
|
||||
|
||||
*
|
||||
`#1827 <https://github.com/huge-success/sanic/pull/1827>`_
|
||||
Fix reloader on OSX py38 and Windows
|
||||
|
||||
*
|
||||
`#1848 <https://github.com/huge-success/sanic/pull/1848>`_
|
||||
Reverse named_response_middlware execution order, to match normal response middleware execution order
|
||||
|
||||
*
|
||||
`#1853 <https://github.com/huge-success/sanic/pull/1853>`_
|
||||
Fix pickle error when attempting to pickle an application which contains websocket routes
|
||||
|
||||
Deprecations and Removals
|
||||
*************************
|
||||
|
||||
*
|
||||
`#1739 <https://github.com/huge-success/sanic/pull/1739>`_
|
||||
Deprecate body_bytes to merge into body
|
||||
|
||||
Developer infrastructure
|
||||
************************
|
||||
|
||||
*
|
||||
`#1852 <https://github.com/huge-success/sanic/pull/1852>`_
|
||||
Fix naming of CI test env on Python nightlies
|
||||
|
||||
*
|
||||
`#1857 <https://github.com/huge-success/sanic/pull/1857>`_
|
||||
Adjust websockets version to setup.py
|
||||
|
||||
*
|
||||
`#1869 <https://github.com/huge-success/sanic/pull/1869>`_
|
||||
Wrap run()'s "protocol" type annotation in Optional[]
|
||||
|
||||
|
||||
Improved Documentation
|
||||
**********************
|
||||
|
||||
*
|
||||
`#1846 <https://github.com/huge-success/sanic/pull/1846>`_
|
||||
Update docs to clarify response middleware execution order
|
||||
|
||||
*
|
||||
`#1865 <https://github.com/huge-success/sanic/pull/1865>`_
|
||||
Fixing rst format issue that was hiding documentation
|
||||
|
||||
|
||||
Version 20.6.0
|
||||
===============
|
||||
|
||||
*Released, but unintentionally ommitting PR #1880, so was replaced by 20.6.1*
|
||||
|
||||
|
||||
Version 20.3.0
|
||||
===============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
*
|
||||
`#1762 <https://github.com/huge-success/sanic/pull/1762>`_
|
||||
Add ``srv.start_serving()`` and ``srv.serve_forever()`` to ``AsyncioServer``
|
||||
|
||||
*
|
||||
`#1767 <https://github.com/huge-success/sanic/pull/1767>`_
|
||||
Make Sanic usable on ``hypercorn -k trio myweb.app``
|
||||
|
||||
*
|
||||
`#1768 <https://github.com/huge-success/sanic/pull/1768>`_
|
||||
No tracebacks on normal errors and prettier error pages
|
||||
|
||||
*
|
||||
`#1769 <https://github.com/huge-success/sanic/pull/1769>`_
|
||||
Code cleanup in file responses
|
||||
|
||||
*
|
||||
`#1793 <https://github.com/huge-success/sanic/pull/1793>`_ and
|
||||
`#1819 <https://github.com/huge-success/sanic/pull/1819>`_
|
||||
Upgrade ``str.format()`` to f-strings
|
||||
|
||||
*
|
||||
`#1798 <https://github.com/huge-success/sanic/pull/1798>`_
|
||||
Allow multiple workers on MacOS with Python 3.8
|
||||
|
||||
*
|
||||
`#1820 <https://github.com/huge-success/sanic/pull/1820>`_
|
||||
Do not set content-type and content-length headers in exceptions
|
||||
|
||||
Bugfixes
|
||||
********
|
||||
|
||||
*
|
||||
`#1748 <https://github.com/huge-success/sanic/pull/1748>`_
|
||||
Remove loop argument in ``asyncio.Event`` in Python 3.8
|
||||
|
||||
*
|
||||
`#1764 <https://github.com/huge-success/sanic/pull/1764>`_
|
||||
Allow route decorators to stack up again
|
||||
|
||||
*
|
||||
`#1789 <https://github.com/huge-success/sanic/pull/1789>`_
|
||||
Fix tests using hosts yielding incorrect ``url_for``
|
||||
|
||||
*
|
||||
`#1808 <https://github.com/huge-success/sanic/pull/1808>`_
|
||||
Fix Ctrl+C and tests on Windows
|
||||
|
||||
Deprecations and Removals
|
||||
*************************
|
||||
|
||||
*
|
||||
`#1800 <https://github.com/huge-success/sanic/pull/1800>`_
|
||||
Begin deprecation in way of first-class streaming, removal of ``body_init``, ``body_push``, and ``body_finish``
|
||||
|
||||
*
|
||||
`#1801 <https://github.com/huge-success/sanic/pull/1801>`_
|
||||
Complete deprecation from `#1666 <https://github.com/huge-success/sanic/pull/1666>`_ of dictionary context on ``request`` objects.
|
||||
|
||||
*
|
||||
`#1807 <https://github.com/huge-success/sanic/pull/1807>`_
|
||||
Remove server config args that can be read directly from app
|
||||
|
||||
*
|
||||
`#1818 <https://github.com/huge-success/sanic/pull/1818>`_
|
||||
Complete deprecation of ``app.remove_route`` and ``request.raw_args``
|
||||
|
||||
Dependencies
|
||||
************
|
||||
|
||||
*
|
||||
`#1794 <https://github.com/huge-success/sanic/pull/1794>`_
|
||||
Bump ``httpx`` to 0.11.1
|
||||
|
||||
*
|
||||
`#1806 <https://github.com/huge-success/sanic/pull/1806>`_
|
||||
Import ``ASGIDispatch`` from top-level ``httpx`` (from third-party deprecation)
|
||||
|
||||
Developer infrastructure
|
||||
************************
|
||||
|
||||
*
|
||||
`#1833 <https://github.com/huge-success/sanic/pull/1833>`_
|
||||
Resolve broken documentation builds
|
||||
|
||||
Improved Documentation
|
||||
**********************
|
||||
|
||||
*
|
||||
`#1755 <https://github.com/huge-success/sanic/pull/1755>`_
|
||||
Usage of ``response.empty()``
|
||||
|
||||
*
|
||||
`#1778 <https://github.com/huge-success/sanic/pull/1778>`_
|
||||
Update README
|
||||
|
||||
*
|
||||
`#1783 <https://github.com/huge-success/sanic/pull/1783>`_
|
||||
Fix typo
|
||||
|
||||
*
|
||||
`#1784 <https://github.com/huge-success/sanic/pull/1784>`_
|
||||
Corrected changelog for docs move of MD to RST (`#1691 <https://github.com/huge-success/sanic/pull/1691>`_)
|
||||
|
||||
*
|
||||
`#1803 <https://github.com/huge-success/sanic/pull/1803>`_
|
||||
Update config docs to match DEFAULT_CONFIG
|
||||
|
||||
*
|
||||
`#1814 <https://github.com/huge-success/sanic/pull/1814>`_
|
||||
Update getting_started.rst
|
||||
|
||||
*
|
||||
`#1821 <https://github.com/huge-success/sanic/pull/1821>`_
|
||||
Update to deployment
|
||||
|
||||
*
|
||||
`#1822 <https://github.com/huge-success/sanic/pull/1822>`_
|
||||
Update docs with changes done in 20.3
|
||||
|
||||
*
|
||||
`#1834 <https://github.com/huge-success/sanic/pull/1834>`_
|
||||
Order of listeners
|
||||
|
||||
|
||||
Version 19.12.0
|
||||
===============
|
||||
|
||||
@@ -24,7 +259,7 @@ Bugfixes
|
||||
Improved Documentation
|
||||
**********************
|
||||
|
||||
- Move docs from RST to MD
|
||||
- Move docs from MD to RST
|
||||
|
||||
Moved all docs from markdown to restructured text like the rest of the docs to unify the scheme and make it easier in
|
||||
the future to update documentation. (`#1691 <https://github.com/huge-success/sanic/issues/1691>`__)
|
||||
|
||||
@@ -83,6 +83,9 @@ Installation
|
||||
If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to
|
||||
use ``sanic`` with ``ujson`` dependency.
|
||||
|
||||
.. note::
|
||||
|
||||
Windows support is currently "experimental" and on a best-effort basis. Multiple workers are also not currently supported on Windows (see `Issue #1517 <https://github.com/huge-success/sanic/issues/1517>`_), but setting ``workers=1`` should launch the server successfully.
|
||||
|
||||
Hello World Example
|
||||
-------------------
|
||||
|
||||
@@ -28,6 +28,7 @@ Guides
|
||||
sanic/debug_mode
|
||||
sanic/testing
|
||||
sanic/deploying
|
||||
sanic/nginx
|
||||
sanic/extensions
|
||||
sanic/examples
|
||||
sanic/changelog
|
||||
|
||||
@@ -28,14 +28,15 @@ using all these methods would look like the following.
|
||||
from sanic.views import HTTPMethodView
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic('some_name')
|
||||
app = Sanic("class_views_example")
|
||||
|
||||
class SimpleView(HTTPMethodView):
|
||||
|
||||
def get(self, request):
|
||||
return text('I am get method')
|
||||
|
||||
def post(self, request):
|
||||
# You can also use async syntax
|
||||
async def post(self, request):
|
||||
return text('I am post method')
|
||||
|
||||
def put(self, request):
|
||||
@@ -49,22 +50,6 @@ using all these methods would look like the following.
|
||||
|
||||
app.add_route(SimpleView.as_view(), '/')
|
||||
|
||||
You can also use `async` syntax.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.views import HTTPMethodView
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic('some_name')
|
||||
|
||||
class SimpleAsyncView(HTTPMethodView):
|
||||
|
||||
async def get(self, request):
|
||||
return text('I am async get method')
|
||||
|
||||
app.add_route(SimpleAsyncView.as_view(), '/')
|
||||
|
||||
URL parameters
|
||||
--------------
|
||||
@@ -154,7 +139,7 @@ lambda:
|
||||
from sanic.views import CompositionView
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic(__name__)
|
||||
app = Sanic("composition_example")
|
||||
|
||||
def get_handler(request):
|
||||
return text('I am a get method')
|
||||
|
||||
@@ -39,13 +39,13 @@ Any variables defined with the `SANIC_` prefix will be applied to the sanic conf
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
app = Sanic(load_env='MYAPP_')
|
||||
app = Sanic(__name__, load_env='MYAPP_')
|
||||
|
||||
Then the above variable would be `MYAPP_REQUEST_TIMEOUT`. If you want to disable loading from environment variables you can set it to `False` instead:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
app = Sanic(load_env=False)
|
||||
app = Sanic(__name__, load_env=False)
|
||||
|
||||
From an Object
|
||||
~~~~~~~~~~~~~~
|
||||
@@ -115,15 +115,25 @@ Out of the box there are just a few predefined values which can be overwritten w
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| KEEP_ALIVE_TIMEOUT | 5 | How long to hold a TCP connection open (sec) |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| WEBSOCKET_MAX_SIZE | 2^20 | Maximum size for incoming messages (bytes) |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| WEBSOCKET_MAX_QUEUE | 32 | Maximum length of the queue that holds incoming messages |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| WEBSOCKET_READ_LIMIT | 2^16 | High-water limit of the buffer for incoming bytes |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| WEBSOCKET_WRITE_LIMIT | 2^16 | High-water limit of the buffer for outgoing bytes |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| GRACEFUL_SHUTDOWN_TIMEOUT | 15.0 | How long to wait to force close non-idle connection (sec) |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| ACCESS_LOG | True | Disable or enable access log |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| PROXIES_COUNT | -1 | The number of proxy servers in front of the app (e.g. nginx; see below) |
|
||||
| FORWARDED_SECRET | None | Used to securely identify a specific proxy server (see below) |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| PROXIES_COUNT | None | The number of proxy servers in front of the app (e.g. nginx; see below) |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| FORWARDED_FOR_HEADER | "X-Forwarded-For" | The name of "X-Forwarded-For" HTTP header that contains client and proxy ip |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
| REAL_IP_HEADER | "X-Real-IP" | The name of "X-Real-IP" HTTP header that contains real client ip |
|
||||
| REAL_IP_HEADER | None | The name of "X-Real-IP" HTTP header that contains real client ip |
|
||||
+---------------------------+-------------------+-----------------------------------------------------------------------------+
|
||||
|
||||
The different Timeout variables:
|
||||
@@ -228,9 +238,7 @@ Proxy config if using ...
|
||||
* a proxy that supports `forwarded`: set `FORWARDED_SECRET` to the value that the proxy inserts in the header
|
||||
* Apache Traffic Server: `CONFIG proxy.config.http.insert_forwarded STRING for|proto|host|by=_secret`
|
||||
* NGHTTPX: `nghttpx --add-forwarded=for,proto,host,by --forwarded-for=ip --forwarded-by=_secret`
|
||||
* NGINX: after `the official instructions <https://www.nginx.com/resources/wiki/start/topics/examples/forwarded/>`_, add anywhere in your config:
|
||||
|
||||
.. proxy_set_header Forwarded "$proxy_add_forwarded;by=\"_$server_name\";proto=$scheme;host=\"$http_host\";path=\"$request_uri\";secret=_secret";
|
||||
* NGINX: :ref:`nginx`.
|
||||
|
||||
* a custom header with client IP: set `REAL_IP_HEADER` to the name of that header
|
||||
* `x-forwarded-for`: set `PROXIES_COUNT` to `1` for a single proxy, or a greater number to allow Sanic to select the correct IP
|
||||
|
||||
@@ -21,7 +21,7 @@ and the Automatic Reloader will be activated.
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.route('/')
|
||||
async def hello_world(request):
|
||||
@@ -43,7 +43,7 @@ the ``auto_reload`` argument will activate or deactivate the Automatic Reloader.
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.route('/')
|
||||
async def hello_world(request):
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
Deploying
|
||||
=========
|
||||
|
||||
Deploying Sanic is very simple using one of three options: the inbuilt webserver,
|
||||
Sanic has three serving options: the inbuilt webserver,
|
||||
an `ASGI webserver <https://asgi.readthedocs.io/en/latest/implementations.html>`_, or `gunicorn`.
|
||||
It is also very common to place Sanic behind a reverse proxy, like `nginx`.
|
||||
|
||||
Sanic's own webserver is the fastest option, and it can be securely run on
|
||||
the Internet. Still, it is also very common to place Sanic behind a reverse
|
||||
proxy, as shown in :ref:`nginx`.
|
||||
|
||||
Running via Sanic webserver
|
||||
---------------------------
|
||||
@@ -13,6 +16,7 @@ keyword arguments:
|
||||
|
||||
- `host` *(default `"127.0.0.1"`)*: Address to host the server on.
|
||||
- `port` *(default `8000`)*: Port to host the server on.
|
||||
- `unix` *(default `None`)*: Unix socket name to host the server on (instead of TCP).
|
||||
- `debug` *(default `False`)*: Enables debug output (slows server).
|
||||
- `ssl` *(default `None`)*: `SSLContext` for SSL encryption of worker(s).
|
||||
- `sock` *(default `None`)*: Socket for the server to accept connections from.
|
||||
@@ -47,7 +51,15 @@ If you like using command line arguments, you can launch a Sanic webserver by
|
||||
executing the module. For example, if you initialized Sanic as `app` in a file
|
||||
named `server.py`, you could run the server like so:
|
||||
|
||||
.. python -m sanic server.app --host=0.0.0.0 --port=1337 --workers=4
|
||||
::
|
||||
|
||||
sanic server.app --host=0.0.0.0 --port=1337 --workers=4
|
||||
|
||||
It can also be called directly as a module.
|
||||
|
||||
::
|
||||
|
||||
python -m sanic server.app --host=0.0.0.0 --port=1337 --workers=4
|
||||
|
||||
With this way of running sanic, it is not necessary to invoke `app.run` in your
|
||||
Python file. If you do, make sure you wrap it so that it only executes when
|
||||
@@ -85,7 +97,11 @@ before shutdown, and after shutdown. Therefore, in ASGI mode, the startup and sh
|
||||
run consecutively and not actually around the server process beginning and ending (since that
|
||||
is now controlled by the ASGI server). Therefore, it is best to use `after_server_start` and
|
||||
`before_server_stop`.
|
||||
3. ASGI mode is still in "beta" as of Sanic v19.6.
|
||||
|
||||
Sanic has experimental support for running on `Trio <https://trio.readthedocs.io/en/stable/>`_ with::
|
||||
|
||||
hypercorn -k trio myapp:app
|
||||
|
||||
|
||||
Running via Gunicorn
|
||||
--------------------
|
||||
@@ -110,28 +126,6 @@ See the `Gunicorn Docs <http://docs.gunicorn.org/en/latest/settings.html#max-req
|
||||
Other deployment considerations
|
||||
-------------------------------
|
||||
|
||||
Running behind a reverse proxy
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Sanic can be used with a reverse proxy (e.g. nginx). There's a simple example of nginx configuration:
|
||||
|
||||
|
||||
::
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name example.org;
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
If you want to get real client ip, you should configure `X-Real-IP` and `X-Forwarded-For` HTTP headers and set `app.config.PROXIES_COUNT` to `1`; see the configuration page for more information.
|
||||
|
||||
Disable debug logging for performance
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ A simple sanic application with a single ``async`` method with ``text`` and ``js
|
||||
Simple App with ``Sanic Views``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Showcasing the simple mechanism of using :class:`sanic.viewes.HTTPMethodView` as well as a way to extend the same
|
||||
Showcasing the simple mechanism of using :class:`sanic.views.HTTPMethodView` as well as a way to extend the same
|
||||
into providing a custom ``async`` behavior for ``view``.
|
||||
|
||||
.. literalinclude:: ../../examples/simple_async_view.py
|
||||
|
||||
@@ -59,7 +59,7 @@ You can also add an exception handler as such:
|
||||
async def server_error_handler(request, exception):
|
||||
return text("Oops, server error", status=500)
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("error_handler_example")
|
||||
app.error_handler.add(Exception, server_error_handler)
|
||||
|
||||
In some cases, you might want to add some more error handling
|
||||
@@ -77,7 +77,7 @@ can subclass Sanic's default error handler as such:
|
||||
# You custom error handling logic...
|
||||
return super().default(request, exception)
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("custom_error_handler_example")
|
||||
app.error_handler = CustomErrorHandler()
|
||||
|
||||
Useful exceptions
|
||||
|
||||
@@ -8,19 +8,19 @@ syntax, so earlier versions of python won't work.
|
||||
1. Install Sanic
|
||||
----------------
|
||||
|
||||
> If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to use ``sanic`` with ``ujson`` dependency.
|
||||
If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to use ``sanic`` with ``ujson`` dependency.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip3 install sanic
|
||||
|
||||
To install sanic without `uvloop` or `ujson` using bash, you can provide either or both of these environmental variables
|
||||
using any truthy string like `'y', 'yes', 't', 'true', 'on', '1'` and setting the `SANIC_NO_X` (`X` = `UVLOOP`/`UJSON`)
|
||||
using any truthy string like `'y', 'yes', 't', 'true', 'on', '1'` and setting the `SANIC_NO_X` ( with`X` = `UVLOOP`/`UJSON`)
|
||||
to true will stop that features installation.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
SANIC_NO_UVLOOP=true SANIC_NO_UJSON=true pip3 install sanic
|
||||
SANIC_NO_UVLOOP=true SANIC_NO_UJSON=true pip3 install --no-binary :all: sanic
|
||||
|
||||
You can also install Sanic from `conda-forge <https://anaconda.org/conda-forge/sanic>`_
|
||||
|
||||
@@ -37,7 +37,7 @@ You can also install Sanic from `conda-forge <https://anaconda.org/conda-forge/s
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("hello_example")
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
|
||||
@@ -15,7 +15,7 @@ Sanic aspires to be simple
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("App Name")
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
|
||||
@@ -17,7 +17,7 @@ A simple example using default settings would be like this:
|
||||
from sanic.log import logger
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic('test')
|
||||
app = Sanic('logging_example')
|
||||
|
||||
@app.route('/')
|
||||
async def test(request):
|
||||
@@ -47,7 +47,7 @@ initialize ``Sanic`` app:
|
||||
|
||||
.. code:: python
|
||||
|
||||
app = Sanic('test', log_config=LOGGING_CONFIG)
|
||||
app = Sanic('logging_example', log_config=LOGGING_CONFIG)
|
||||
|
||||
And to close logging, simply assign access_log=False:
|
||||
|
||||
@@ -100,4 +100,4 @@ Log Context Parameter Parameter Value Datatype
|
||||
|
||||
The default access log format is ``%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: %(request)s %(message)s %(status)d %(byte)d``
|
||||
|
||||
.. _python3 logging API: https://docs.python.org/3/howto/logging.html
|
||||
.. _python3 logging API: https://docs.python.org/3/howto/logging.html
|
||||
|
||||
@@ -14,8 +14,8 @@ There are two types of middleware: request and response. Both are declared
|
||||
using the `@app.middleware` decorator, with the decorator's parameter being a
|
||||
string representing its type: `'request'` or `'response'`.
|
||||
|
||||
* Request middleware receives only the `request` as argument.
|
||||
* Response middleware receives both the `request` and `response`.
|
||||
* Request middleware receives only the `request` as an argument and are executed in the order they were added.
|
||||
* Response middleware receives both the `request` and `response` and are executed in *reverse* order.
|
||||
|
||||
The simplest middleware doesn't modify the request or response at all:
|
||||
|
||||
@@ -64,12 +64,12 @@ this.
|
||||
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
|
||||
The three middlewares are executed in order:
|
||||
The three middlewares are executed in the following order:
|
||||
|
||||
1. The first request middleware **add_key** adds a new key `foo` into request context.
|
||||
2. Request is routed to handler **index**, which gets the key from context and returns a text response.
|
||||
3. The first response middleware **custom_banner** changes the HTTP response header *Server* to say *Fake-Server*
|
||||
4. The second response middleware **prevent_xss** adds the HTTP header for preventing Cross-Site-Scripting (XSS) attacks.
|
||||
3. The second response middleware **prevent_xss** adds the HTTP header for preventing Cross-Site-Scripting (XSS) attacks.
|
||||
4. The first response middleware **custom_banner** changes the HTTP response header *Server* to say *Fake-Server*
|
||||
|
||||
Responding early
|
||||
----------------
|
||||
@@ -132,13 +132,24 @@ For example:
|
||||
async def close_db(app, loop):
|
||||
await app.db.close()
|
||||
|
||||
Note:
|
||||
|
||||
The listeners are deconstructed in the reverse order of being constructed.
|
||||
|
||||
For example:
|
||||
|
||||
If the first listener in before_server_start handler setups a database connection,
|
||||
ones registered after it can rely on that connection being alive both when they are started
|
||||
and stopped, because stopping is done in reverse order, and the database connection is
|
||||
torn down last.
|
||||
|
||||
It's also possible to register a listener using the `register_listener` method.
|
||||
This may be useful if you define your listeners in another module besides
|
||||
the one you instantiate your app in.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic(__name__)
|
||||
|
||||
async def setup_db(app, loop):
|
||||
app.db = await db_setup()
|
||||
|
||||
222
docs/sanic/nginx.rst
Normal file
222
docs/sanic/nginx.rst
Normal file
@@ -0,0 +1,222 @@
|
||||
|
||||
.. _nginx:
|
||||
|
||||
Nginx Deployment
|
||||
================
|
||||
|
||||
Introduction
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Although Sanic can be run directly on Internet, it may be useful to use a proxy
|
||||
server such as Nginx in front of it. This is particularly useful for running
|
||||
multiple virtual hosts on the same IP, serving NodeJS or other services beside
|
||||
a single Sanic app, and it also allows for efficient serving of static files.
|
||||
SSL and HTTP/2 are also easily implemented on such proxy.
|
||||
|
||||
We are setting the Sanic app to serve only locally at `127.0.0.1:8000`, while the
|
||||
Nginx installation is responsible for providing the service to public Internet
|
||||
on domain `example.com`. Static files will be served from `/var/www/`.
|
||||
|
||||
|
||||
Proxied Sanic app
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The app needs to be setup with a secret key used to identify a trusted proxy,
|
||||
so that real client IP and other information can be identified. This protects
|
||||
against anyone on the Internet sending fake headers to spoof their IP addresses
|
||||
and other details. Choose any random string and configure it both on the app
|
||||
and in Nginx config.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic("proxied_example")
|
||||
app.config.FORWARDED_SECRET = "YOUR SECRET"
|
||||
|
||||
@app.get("/")
|
||||
def index(request):
|
||||
# This should display external (public) addresses:
|
||||
return text(
|
||||
f"{request.remote_addr} connected to {request.url_for('index')}\n"
|
||||
f"Forwarded: {request.forwarded}\n"
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='127.0.0.1', port=8000, workers=8, access_log=False)
|
||||
|
||||
Since this is going to be a system service, save your code to
|
||||
`/srv/sanicexample/sanicexample.py`.
|
||||
|
||||
For testing, run your app in a terminal.
|
||||
|
||||
|
||||
Nginx configuration
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Quite much configuration is required to allow fast transparent proxying, but
|
||||
for the most part these don't need to be modified, so bear with me.
|
||||
|
||||
Upstream servers need to be configured in a separate `upstream` block to enable
|
||||
HTTP keep-alive, which can drastically improve performance, so we use this
|
||||
instead of directly providing an upstream address in `proxy_pass` directive. In
|
||||
this example, the upstream section is named by `server_name`, i.e. the public
|
||||
domain name, which then also gets passed to Sanic in the `Host` header. You may
|
||||
change the naming as you see fit. Multiple servers may also be provided for
|
||||
load balancing and failover.
|
||||
|
||||
Change the two occurrences of `example.com` to your true domain name, and
|
||||
instead of `YOUR SECRET` use the secret you chose for your app.
|
||||
|
||||
::
|
||||
|
||||
upstream example.com {
|
||||
keepalive 100;
|
||||
server 127.0.0.1:8000;
|
||||
#server unix:/tmp/sanic.sock;
|
||||
}
|
||||
|
||||
server {
|
||||
server_name example.com;
|
||||
listen 443 ssl http2 default_server;
|
||||
listen [::]:443 ssl http2 default_server;
|
||||
# Serve static files if found, otherwise proxy to Sanic
|
||||
location / {
|
||||
root /var/www;
|
||||
try_files $uri @sanic;
|
||||
}
|
||||
location @sanic {
|
||||
proxy_pass http://$server_name;
|
||||
# Allow fast streaming HTTP/1.1 pipes (keep-alive, unbuffered)
|
||||
proxy_http_version 1.1;
|
||||
proxy_request_buffering off;
|
||||
proxy_buffering off;
|
||||
# Proxy forwarding (password configured in app.config.FORWARDED_SECRET)
|
||||
proxy_set_header forwarded "$proxy_forwarded;secret=\"YOUR SECRET\"";
|
||||
# Allow websockets
|
||||
proxy_set_header connection "upgrade";
|
||||
proxy_set_header upgrade $http_upgrade;
|
||||
}
|
||||
}
|
||||
|
||||
To avoid cookie visibility issues and inconsistent addresses on search engines,
|
||||
it is a good idea to redirect all visitors to one true domain, always using
|
||||
HTTPS:
|
||||
|
||||
::
|
||||
|
||||
# Redirect all HTTP to HTTPS with no-WWW
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
server_name ~^(?:www\.)?(.*)$;
|
||||
return 301 https://$1$request_uri;
|
||||
}
|
||||
|
||||
# Redirect WWW to no-WWW
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
server_name ~^www\.(.*)$;
|
||||
return 301 $scheme://$1$request_uri;
|
||||
}
|
||||
|
||||
The above config sections may be placed in `/etc/nginx/sites-available/default`
|
||||
or in other site configs (be sure to symlink them to `sites-enabled` if you
|
||||
create new ones).
|
||||
|
||||
Make sure that your SSL certificates are configured in the main config, or
|
||||
add the `ssl_certificate` and `ssl_certificate_key` directives to each
|
||||
`server` section that listens on SSL.
|
||||
|
||||
Additionally, copy&paste all of this into `nginx/conf.d/forwarded.conf`:
|
||||
|
||||
::
|
||||
|
||||
# RFC 7239 Forwarded header for Nginx proxy_pass
|
||||
|
||||
# Add within your server or location block:
|
||||
# proxy_set_header forwarded "$proxy_forwarded;secret=\"YOUR SECRET\"";
|
||||
|
||||
# Configure your upstream web server to identify this proxy by that password
|
||||
# because otherwise anyone on the Internet could spoof these headers and fake
|
||||
# their real IP address and other information to your service.
|
||||
|
||||
|
||||
# Provide the full proxy chain in $proxy_forwarded
|
||||
map $proxy_add_forwarded $proxy_forwarded {
|
||||
default "$proxy_add_forwarded;by=\"_$hostname\";proto=$scheme;host=\"$http_host\";path=\"$request_uri\"";
|
||||
}
|
||||
|
||||
# The following mappings are based on
|
||||
# https://www.nginx.com/resources/wiki/start/topics/examples/forwarded/
|
||||
|
||||
map $remote_addr $proxy_forwarded_elem {
|
||||
# IPv4 addresses can be sent as-is
|
||||
~^[0-9.]+$ "for=$remote_addr";
|
||||
|
||||
# IPv6 addresses need to be bracketed and quoted
|
||||
~^[0-9A-Fa-f:.]+$ "for=\"[$remote_addr]\"";
|
||||
|
||||
# Unix domain socket names cannot be represented in RFC 7239 syntax
|
||||
default "for=unknown";
|
||||
}
|
||||
|
||||
map $http_forwarded $proxy_add_forwarded {
|
||||
# If the incoming Forwarded header is syntactically valid, append to it
|
||||
"~^(,[ \\t]*)*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*([ \\t]*,([ \\t]*([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?(;([!#$%&'*+.^_`|~0-9A-Za-z-]+=([!#$%&'*+.^_`|~0-9A-Za-z-]+|\"([\\t \\x21\\x23-\\x5B\\x5D-\\x7E\\x80-\\xFF]|\\\\[\\t \\x21-\\x7E\\x80-\\xFF])*\"))?)*)?)*$" "$http_forwarded, $proxy_forwarded_elem";
|
||||
|
||||
# Otherwise, replace it
|
||||
default "$proxy_forwarded_elem";
|
||||
}
|
||||
|
||||
For installs that don't use `conf.d` and `sites-available`, all of the above
|
||||
configs may also be placed inside the `http` section of the main `nginx.conf`.
|
||||
|
||||
Reload Nginx config after changes:
|
||||
|
||||
::
|
||||
|
||||
sudo nginx -s reload
|
||||
|
||||
Now you should be able to connect your app on `https://example.com/`. Any 404
|
||||
errors and such will be handled by Sanic's error pages, and whenever a static
|
||||
file is present at a given path, it will be served by Nginx.
|
||||
|
||||
|
||||
SSL certificates
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
If you haven't already configured valid certificates on your server, now is a
|
||||
good time to do so. Install `certbot` and `python3-certbot-nginx`, then run
|
||||
|
||||
::
|
||||
|
||||
certbot --nginx -d example.com -d www.example.com
|
||||
|
||||
`<https://www.nginx.com/blog/using-free-ssltls-certificates-from-lets-encrypt-with-nginx/>`_
|
||||
|
||||
Running as a service
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This part is for Linux distributions based on `systemd`. Create a unit file
|
||||
`/etc/systemd/system/sanicexample.service`::
|
||||
|
||||
[Unit]
|
||||
Description=Sanic Example
|
||||
|
||||
[Service]
|
||||
User=nobody
|
||||
WorkingDirectory=/srv/sanicexample
|
||||
ExecStart=/usr/bin/env python3 sanicexample.py
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
Then reload service files, start your service and enable it on boot::
|
||||
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl start sanicexample
|
||||
sudo systemctl enable sanicexample
|
||||
@@ -56,7 +56,6 @@ The difference between Request.args and Request.query_args for the queryset `?ke
|
||||
"url": request.url,
|
||||
"query_string": request.query_string,
|
||||
"args": request.args,
|
||||
"raw_args": request.raw_args,
|
||||
"query_args": request.query_args,
|
||||
})
|
||||
|
||||
@@ -72,12 +71,9 @@ The difference between Request.args and Request.query_args for the queryset `?ke
|
||||
"url":"http:\/\/0.0.0.0:8000\/test_request_args?key1=value1&key2=value2&key1=value3",
|
||||
"query_string":"key1=value1&key2=value2&key1=value3",
|
||||
"args":{"key1":["value1","value3"],"key2":["value2"]},
|
||||
"raw_args":{"key1":"value1","key2":"value2"},
|
||||
"query_args":[["key1","value1"],["key2","value2"],["key1","value3"]]
|
||||
}
|
||||
|
||||
- `raw_args` contains only the first entry of `key1`. Will be deprecated in the future versions.
|
||||
|
||||
- `files` (dictionary of `File` objects) - List of files that have a name, body, and type
|
||||
|
||||
.. code-block:: python
|
||||
@@ -206,7 +202,7 @@ The output will be:
|
||||
Accessing values using `get` and `getlist`
|
||||
------------------------------------------
|
||||
|
||||
The `request.args` returns a subclass of `dict` called `RequestParameters`.
|
||||
The `request.args` returns a subclass of `dict` called `RequestParameters`.
|
||||
The key difference when using this object is the distinction between the `get` and `getlist` methods.
|
||||
|
||||
- `get(key, default=None)` operates as normal, except that when the value of
|
||||
@@ -228,14 +224,14 @@ The key difference when using this object is the distinction between the `get` a
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic(name="example")
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.route("/")
|
||||
def get_handler(request):
|
||||
return json({
|
||||
"p1": request.args.getlist("p1")
|
||||
})
|
||||
|
||||
|
||||
Accessing the handler name with the request.endpoint attribute
|
||||
--------------------------------------------------------------
|
||||
|
||||
@@ -247,7 +243,7 @@ route will return "hello".
|
||||
from sanic.response import text
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.get("/")
|
||||
def hello(request):
|
||||
|
||||
@@ -107,6 +107,19 @@ Response without encoding the body
|
||||
def handle_request(request):
|
||||
return response.raw(b'raw data')
|
||||
|
||||
Empty
|
||||
--------------
|
||||
|
||||
For responding with an empty message as defined by `RFC 2616 <https://tools.ietf.org/search/rfc2616#section-7.2.1>`_
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from sanic import response
|
||||
|
||||
@app.route('/empty')
|
||||
async def handle_request(request):
|
||||
return response.empty()
|
||||
|
||||
Modify headers or status
|
||||
------------------------
|
||||
|
||||
|
||||
@@ -406,7 +406,7 @@ Build URL for static files
|
||||
==========================
|
||||
|
||||
Sanic supports using `url_for` method to build static file urls. In case if the static url
|
||||
is pointing to a directory, `filename` parameter to the `url_for` can be ignored. q
|
||||
is pointing to a directory, `filename` parameter to the `url_for` can be ignored.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ IPv6 example:
|
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
|
||||
sock.bind(('::', 7777))
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("ipv6_example")
|
||||
|
||||
|
||||
@app.route("/")
|
||||
@@ -46,7 +46,7 @@ UNIX socket example:
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.bind(server_socket)
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("unix_socket_example")
|
||||
|
||||
|
||||
@app.route("/")
|
||||
|
||||
@@ -16,7 +16,7 @@ Sanic allows you to get request data by stream, as below. When the request ends,
|
||||
from sanic.response import stream, text
|
||||
|
||||
bp = Blueprint('blueprint_request_stream')
|
||||
app = Sanic('request_stream')
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
class SimpleView(HTTPMethodView):
|
||||
|
||||
@@ -12,7 +12,7 @@ To setup a WebSocket:
|
||||
from sanic.response import json
|
||||
from sanic.websocket import WebSocketProtocol
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic("websocket_example")
|
||||
|
||||
@app.websocket('/feed')
|
||||
async def feed(request, ws):
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
name: py36
|
||||
dependencies:
|
||||
- pip=18.1=py36_0
|
||||
- python=3.6=0
|
||||
- setuptools=40.4.3=py36_0
|
||||
- pip:
|
||||
- httptools>=0.0.10
|
||||
- uvloop>=0.5.3
|
||||
- ujson>=1.35
|
||||
- aiofiles>=0.3.0
|
||||
- websockets>=6.0,<7.0
|
||||
- multidict>=4.0,<5.0
|
||||
- sphinx==1.8.3
|
||||
- sphinx_rtd_theme==0.4.2
|
||||
- recommonmark==0.5.0
|
||||
- httpx==0.9.3
|
||||
- sphinxcontrib-asyncio>=0.2.0
|
||||
- docutils==0.14
|
||||
- pygments==2.3.1
|
||||
43
examples/blueprint_middlware_execution_order.py
Normal file
43
examples/blueprint_middlware_execution_order.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from sanic import Sanic, Blueprint
|
||||
from sanic.response import text
|
||||
'''
|
||||
Demonstrates that blueprint request middleware are executed in the order they
|
||||
are added. And blueprint response middleware are executed in _reverse_ order.
|
||||
On a valid request, it should print "1 2 3 6 5 4" to terminal
|
||||
'''
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
bp = Blueprint("bp_"+__name__)
|
||||
|
||||
@bp.middleware('request')
|
||||
def request_middleware_1(request):
|
||||
print('1')
|
||||
|
||||
@bp.middleware('request')
|
||||
def request_middleware_2(request):
|
||||
print('2')
|
||||
|
||||
@bp.middleware('request')
|
||||
def request_middleware_3(request):
|
||||
print('3')
|
||||
|
||||
@bp.middleware('response')
|
||||
def resp_middleware_4(request, response):
|
||||
print('4')
|
||||
|
||||
@bp.middleware('response')
|
||||
def resp_middleware_5(request, response):
|
||||
print('5')
|
||||
|
||||
@bp.middleware('response')
|
||||
def resp_middleware_6(request, response):
|
||||
print('6')
|
||||
|
||||
@bp.route('/')
|
||||
def pop_handler(request):
|
||||
return text('hello world')
|
||||
|
||||
app.blueprint(bp, url_prefix='/bp')
|
||||
|
||||
app.run(host="0.0.0.0", port=8000, debug=True, auto_reload=False)
|
||||
18
examples/delayed_response.py
Normal file
18
examples/delayed_response.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from asyncio import sleep
|
||||
|
||||
from sanic import Sanic, response
|
||||
|
||||
app = Sanic(__name__, strict_slashes=True)
|
||||
|
||||
@app.get("/")
|
||||
async def handler(request):
|
||||
return response.redirect("/sleep/3")
|
||||
|
||||
@app.get("/sleep/<t:number>")
|
||||
async def handler2(request, t=0.3):
|
||||
await sleep(t)
|
||||
return response.text(f"Slept {t:.1f} seconds.\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
@@ -1,2 +1,9 @@
|
||||
conda:
|
||||
file: environment.yml
|
||||
version: 2
|
||||
python:
|
||||
version: 3.8
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- docs
|
||||
system_packages: true
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from importlib import import_module
|
||||
from typing import Any, Dict, Optional
|
||||
@@ -6,10 +9,11 @@ from sanic.app import Sanic
|
||||
from sanic.log import logger
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main():
|
||||
parser = ArgumentParser(prog="sanic")
|
||||
parser.add_argument("--host", dest="host", type=str, default="127.0.0.1")
|
||||
parser.add_argument("--port", dest="port", type=int, default=8000)
|
||||
parser.add_argument("--unix", dest="unix", type=str, default="")
|
||||
parser.add_argument(
|
||||
"--cert", dest="cert", type=str, help="location of certificate for SSL"
|
||||
)
|
||||
@@ -22,18 +26,22 @@ if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
module_path = os.path.abspath(os.getcwd())
|
||||
if module_path not in sys.path:
|
||||
sys.path.append(module_path)
|
||||
|
||||
module_parts = args.module.split(".")
|
||||
module_name = ".".join(module_parts[:-1])
|
||||
app_name = module_parts[-1]
|
||||
|
||||
module = import_module(module_name)
|
||||
app = getattr(module, app_name, None)
|
||||
app_name = type(app).__name__
|
||||
|
||||
if not isinstance(app, Sanic):
|
||||
raise ValueError(
|
||||
"Module is not a Sanic app, it is a {}. "
|
||||
"Perhaps you meant {}.app?".format(
|
||||
type(app).__name__, args.module
|
||||
)
|
||||
f"Module is not a Sanic app, it is a {app_name}. "
|
||||
f"Perhaps you meant {args.module}.app?"
|
||||
)
|
||||
if args.cert is not None or args.key is not None:
|
||||
ssl = {
|
||||
@@ -46,15 +54,20 @@ if __name__ == "__main__":
|
||||
app.run(
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
unix=args.unix,
|
||||
workers=args.workers,
|
||||
debug=args.debug,
|
||||
ssl=ssl,
|
||||
)
|
||||
except ImportError as e:
|
||||
logger.error(
|
||||
"No module named {} found.\n"
|
||||
" Example File: project/sanic_server.py -> app\n"
|
||||
" Example Module: project.sanic_server.app".format(e.name)
|
||||
f"No module named {e.name} found.\n"
|
||||
f" Example File: project/sanic_server.py -> app\n"
|
||||
f" Example Module: project.sanic_server.app"
|
||||
)
|
||||
except ValueError:
|
||||
logger.exception("Failed to run app")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1 +1 @@
|
||||
__version__ = "19.12.0"
|
||||
__version__ = "20.6.3"
|
||||
|
||||
332
sanic/app.py
332
sanic/app.py
@@ -81,6 +81,7 @@ class Sanic:
|
||||
self.sock = None
|
||||
self.strict_slashes = strict_slashes
|
||||
self.listeners = defaultdict(list)
|
||||
self.is_stopping = False
|
||||
self.is_running = False
|
||||
self.is_request_stream = False
|
||||
self.websocket_enabled = False
|
||||
@@ -116,24 +117,12 @@ class Sanic:
|
||||
:param task: future, couroutine or awaitable
|
||||
"""
|
||||
try:
|
||||
if callable(task):
|
||||
try:
|
||||
self.loop.create_task(task(self))
|
||||
except TypeError:
|
||||
self.loop.create_task(task())
|
||||
else:
|
||||
self.loop.create_task(task)
|
||||
loop = self.loop # Will raise SanicError if loop is not started
|
||||
self._loop_add_task(task, self, loop)
|
||||
except SanicException:
|
||||
|
||||
@self.listener("before_server_start")
|
||||
def run(app, loop):
|
||||
if callable(task):
|
||||
try:
|
||||
loop.create_task(task(self))
|
||||
except TypeError:
|
||||
loop.create_task(task())
|
||||
else:
|
||||
loop.create_task(task)
|
||||
self.listener("before_server_start")(
|
||||
partial(self._loop_add_task, task)
|
||||
)
|
||||
|
||||
# Decorator
|
||||
def listener(self, event):
|
||||
@@ -194,25 +183,35 @@ class Sanic:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
def response(handler):
|
||||
if isinstance(handler, tuple):
|
||||
# if a handler fn is already wrapped in a route, the handler
|
||||
# variable will be a tuple of (existing routes, handler fn)
|
||||
routes, handler = handler
|
||||
else:
|
||||
routes = []
|
||||
args = list(signature(handler).parameters.keys())
|
||||
|
||||
if not args:
|
||||
handler_name = handler.__name__
|
||||
|
||||
raise ValueError(
|
||||
"Required parameter `request` missing "
|
||||
"in the {0}() route?".format(handler.__name__)
|
||||
f"Required parameter `request` missing "
|
||||
f"in the {handler_name}() route?"
|
||||
)
|
||||
|
||||
if stream:
|
||||
handler.is_stream = stream
|
||||
|
||||
routes = self.router.add(
|
||||
uri=uri,
|
||||
methods=methods,
|
||||
handler=handler,
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
routes.extend(
|
||||
self.router.add(
|
||||
uri=uri,
|
||||
methods=methods,
|
||||
handler=handler,
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
)
|
||||
return routes, handler
|
||||
|
||||
@@ -451,7 +450,13 @@ class Sanic:
|
||||
|
||||
# Decorator
|
||||
def websocket(
|
||||
self, uri, host=None, strict_slashes=None, subprotocols=None, name=None
|
||||
self,
|
||||
uri,
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
subprotocols=None,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""
|
||||
Decorate a function to be registered as a websocket route
|
||||
@@ -476,53 +481,28 @@ class Sanic:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
def response(handler):
|
||||
async def websocket_handler(request, *args, **kwargs):
|
||||
request.app = self
|
||||
if not getattr(handler, "__blueprintname__", False):
|
||||
request.endpoint = handler.__name__
|
||||
else:
|
||||
request.endpoint = (
|
||||
getattr(handler, "__blueprintname__", "")
|
||||
+ handler.__name__
|
||||
)
|
||||
|
||||
pass
|
||||
|
||||
if self.asgi:
|
||||
ws = request.transport.get_websocket_connection()
|
||||
else:
|
||||
try:
|
||||
protocol = request.transport.get_protocol()
|
||||
except AttributeError:
|
||||
# On Python3.5 the Transport classes in asyncio do not
|
||||
# have a get_protocol() method as in uvloop
|
||||
protocol = request.transport._protocol
|
||||
protocol.app = self
|
||||
|
||||
ws = await protocol.websocket_handshake(
|
||||
request, subprotocols
|
||||
)
|
||||
|
||||
# schedule the application handler
|
||||
# its future is kept in self.websocket_tasks in case it
|
||||
# needs to be cancelled due to the server being stopped
|
||||
fut = ensure_future(handler(request, ws, *args, **kwargs))
|
||||
self.websocket_tasks.add(fut)
|
||||
try:
|
||||
await fut
|
||||
except (CancelledError, ConnectionClosed):
|
||||
pass
|
||||
finally:
|
||||
self.websocket_tasks.remove(fut)
|
||||
await ws.close()
|
||||
|
||||
routes = self.router.add(
|
||||
uri=uri,
|
||||
handler=websocket_handler,
|
||||
methods=frozenset({"GET"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
name=name,
|
||||
if isinstance(handler, tuple):
|
||||
# if a handler fn is already wrapped in a route, the handler
|
||||
# variable will be a tuple of (existing routes, handler fn)
|
||||
routes, handler = handler
|
||||
else:
|
||||
routes = []
|
||||
websocket_handler = partial(
|
||||
self._websocket_handler, handler, subprotocols=subprotocols
|
||||
)
|
||||
websocket_handler.__name__ = (
|
||||
"websocket_handler_" + handler.__name__
|
||||
)
|
||||
routes.extend(
|
||||
self.router.add(
|
||||
uri=uri,
|
||||
handler=websocket_handler,
|
||||
methods=frozenset({"GET"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
)
|
||||
return routes, handler
|
||||
|
||||
@@ -535,6 +515,7 @@ class Sanic:
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
subprotocols=None,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""
|
||||
@@ -562,6 +543,7 @@ class Sanic:
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
subprotocols=subprotocols,
|
||||
version=version,
|
||||
name=name,
|
||||
)(handler)
|
||||
|
||||
@@ -574,36 +556,10 @@ class Sanic:
|
||||
if not self.websocket_enabled:
|
||||
# if the server is stopped, we want to cancel any ongoing
|
||||
# websocket tasks, to allow the server to exit promptly
|
||||
@self.listener("before_server_stop")
|
||||
def cancel_websocket_tasks(app, loop):
|
||||
for task in self.websocket_tasks:
|
||||
task.cancel()
|
||||
self.listener("before_server_stop")(self._cancel_websocket_tasks)
|
||||
|
||||
self.websocket_enabled = enable
|
||||
|
||||
def remove_route(self, uri, clean_cache=True, host=None):
|
||||
"""
|
||||
This method provides the app user a mechanism by which an already
|
||||
existing route can be removed from the :class:`Sanic` object
|
||||
|
||||
.. warning::
|
||||
remove_route is deprecated in v19.06 and will be removed
|
||||
from future versions.
|
||||
|
||||
:param uri: URL Path to be removed from the app
|
||||
:param clean_cache: Instruct sanic if it needs to clean up the LRU
|
||||
route cache
|
||||
:param host: IP address or FQDN specific to the host
|
||||
:return: None
|
||||
"""
|
||||
warnings.warn(
|
||||
"remove_route is deprecated and will be removed "
|
||||
"from future versions.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.router.remove(uri, clean_cache, host)
|
||||
|
||||
# Decorator
|
||||
def exception(self, *exceptions):
|
||||
"""Decorate a function to be registered as a handler for exceptions
|
||||
@@ -661,7 +617,7 @@ class Sanic:
|
||||
if _rn not in self.named_response_middleware:
|
||||
self.named_response_middleware[_rn] = deque()
|
||||
if middleware not in self.named_response_middleware[_rn]:
|
||||
self.named_response_middleware[_rn].append(middleware)
|
||||
self.named_response_middleware[_rn].appendleft(middleware)
|
||||
|
||||
# Decorator
|
||||
def middleware(self, middleware_or_request):
|
||||
@@ -810,9 +766,17 @@ class Sanic:
|
||||
uri, route = self.router.find_route_by_view_name(view_name, **kw)
|
||||
if not (uri and route):
|
||||
raise URLBuildError(
|
||||
"Endpoint with name `{}` was not found".format(view_name)
|
||||
f"Endpoint with name `{view_name}` was not found"
|
||||
)
|
||||
|
||||
# If the route has host defined, split that off
|
||||
# TODO: Retain netloc and path separately in Route objects
|
||||
host = uri.find("/")
|
||||
if host > 0:
|
||||
host, uri = uri[:host], uri[host:]
|
||||
else:
|
||||
host = None
|
||||
|
||||
if view_name == "static" or view_name.endswith(".static"):
|
||||
filename = kwargs.pop("filename", None)
|
||||
# it's static folder
|
||||
@@ -824,7 +788,7 @@ class Sanic:
|
||||
if filename.startswith("/"):
|
||||
filename = filename[1:]
|
||||
|
||||
uri = "{}/{}".format(folder_, filename)
|
||||
uri = f"{folder_}/{filename}"
|
||||
|
||||
if uri != "/" and uri.endswith("/"):
|
||||
uri = uri[:-1]
|
||||
@@ -845,7 +809,7 @@ class Sanic:
|
||||
|
||||
netloc = kwargs.pop("_server", None)
|
||||
if netloc is None and external:
|
||||
netloc = self.config.get("SERVER_NAME", "")
|
||||
netloc = host or self.config.get("SERVER_NAME", "")
|
||||
|
||||
if external:
|
||||
if not scheme:
|
||||
@@ -860,7 +824,7 @@ class Sanic:
|
||||
for match in matched_params:
|
||||
name, _type, pattern = self.router.parse_parameter_string(match)
|
||||
# we only want to match against each individual parameter
|
||||
specific_pattern = "^{}$".format(pattern)
|
||||
specific_pattern = f"^{pattern}$"
|
||||
supplied_param = None
|
||||
|
||||
if name in kwargs:
|
||||
@@ -868,9 +832,7 @@ class Sanic:
|
||||
del kwargs[name]
|
||||
else:
|
||||
raise URLBuildError(
|
||||
"Required parameter `{}` was not passed to url_for".format(
|
||||
name
|
||||
)
|
||||
f"Required parameter `{name}` was not passed to url_for"
|
||||
)
|
||||
|
||||
supplied_param = str(supplied_param)
|
||||
@@ -880,23 +842,22 @@ class Sanic:
|
||||
|
||||
if not passes_pattern:
|
||||
if _type != str:
|
||||
type_name = _type.__name__
|
||||
|
||||
msg = (
|
||||
'Value "{}" for parameter `{}` does not '
|
||||
"match pattern for type `{}`: {}".format(
|
||||
supplied_param, name, _type.__name__, pattern
|
||||
)
|
||||
f'Value "{supplied_param}" '
|
||||
f"for parameter `{name}` does not "
|
||||
f"match pattern for type `{type_name}`: {pattern}"
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
'Value "{}" for parameter `{}` '
|
||||
"does not satisfy pattern {}".format(
|
||||
supplied_param, name, pattern
|
||||
)
|
||||
f'Value "{supplied_param}" for parameter `{name}` '
|
||||
f"does not satisfy pattern {pattern}"
|
||||
)
|
||||
raise URLBuildError(msg)
|
||||
|
||||
# replace the parameter in the URL with the supplied value
|
||||
replacement_regex = "(<{}.*?>)".format(name)
|
||||
replacement_regex = f"(<{name}.*?>)"
|
||||
|
||||
out = re.sub(replacement_regex, supplied_param, out)
|
||||
|
||||
@@ -997,9 +958,8 @@ class Sanic:
|
||||
)
|
||||
elif self.debug:
|
||||
response = HTTPResponse(
|
||||
"Error while handling error: {}\nStack: {}".format(
|
||||
e, format_exc()
|
||||
),
|
||||
f"Error while "
|
||||
f"handling error: {e}\nStack: {format_exc()}",
|
||||
status=500,
|
||||
)
|
||||
else:
|
||||
@@ -1062,16 +1022,19 @@ class Sanic:
|
||||
self,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
*,
|
||||
debug: bool = False,
|
||||
auto_reload: Optional[bool] = None,
|
||||
ssl: Union[dict, SSLContext, None] = None,
|
||||
sock: Optional[socket] = None,
|
||||
workers: int = 1,
|
||||
protocol: Type[Protocol] = None,
|
||||
protocol: Optional[Type[Protocol]] = None,
|
||||
backlog: int = 100,
|
||||
stop_event: Any = None,
|
||||
register_sys_signals: bool = True,
|
||||
access_log: Optional[bool] = None,
|
||||
**kwargs: Any
|
||||
unix: Optional[str] = None,
|
||||
loop: None = None,
|
||||
) -> None:
|
||||
"""Run the HTTP Server and listen until keyboard interrupt or term
|
||||
signal. On termination, drain connections before closing.
|
||||
@@ -1082,6 +1045,9 @@ class Sanic:
|
||||
:type port: int
|
||||
:param debug: Enables debug output (slows server)
|
||||
:type debug: bool
|
||||
:param auto_reload: Reload app whenever its source code is changed.
|
||||
Enabled by default in debug mode.
|
||||
:type auto_relaod: bool
|
||||
:param ssl: SSLContext, or location of certificate and key
|
||||
for SSL encryption of worker(s)
|
||||
:type ssl: SSLContext or dict
|
||||
@@ -1101,9 +1067,11 @@ class Sanic:
|
||||
:type register_sys_signals: bool
|
||||
:param access_log: Enables writing access logs (slows server)
|
||||
:type access_log: bool
|
||||
:param unix: Unix socket to listen on instead of TCP port
|
||||
:type unix: str
|
||||
:return: Nothing
|
||||
"""
|
||||
if "loop" in kwargs:
|
||||
if loop is not None:
|
||||
raise TypeError(
|
||||
"loop is not a valid argument. To use an existing loop, "
|
||||
"change to create_server().\nSee more: "
|
||||
@@ -1111,13 +1079,9 @@ class Sanic:
|
||||
"#asynchronous-support"
|
||||
)
|
||||
|
||||
# Default auto_reload to false
|
||||
auto_reload = False
|
||||
# If debug is set, default it to true (unless on windows)
|
||||
if debug and os.name == "posix":
|
||||
auto_reload = True
|
||||
# Allow for overriding either of the defaults
|
||||
auto_reload = kwargs.get("auto_reload", auto_reload)
|
||||
if auto_reload or auto_reload is None and debug:
|
||||
if os.environ.get("SANIC_SERVER_RUNNING") != "true":
|
||||
return reloader_helpers.watchdog(1.0)
|
||||
|
||||
if sock is None:
|
||||
host, port = host or "127.0.0.1", port or 8000
|
||||
@@ -1143,6 +1107,7 @@ class Sanic:
|
||||
debug=debug,
|
||||
ssl=ssl,
|
||||
sock=sock,
|
||||
unix=unix,
|
||||
workers=workers,
|
||||
protocol=protocol,
|
||||
backlog=backlog,
|
||||
@@ -1152,19 +1117,15 @@ class Sanic:
|
||||
|
||||
try:
|
||||
self.is_running = True
|
||||
self.is_stopping = False
|
||||
if workers > 1 and os.name != "posix":
|
||||
logger.warn(
|
||||
f"Multiprocessing is currently not supported on {os.name},"
|
||||
" using workers=1 instead"
|
||||
)
|
||||
workers = 1
|
||||
if workers == 1:
|
||||
if auto_reload and os.name != "posix":
|
||||
# This condition must be removed after implementing
|
||||
# auto reloader for other operating systems.
|
||||
raise NotImplementedError
|
||||
|
||||
if (
|
||||
auto_reload
|
||||
and os.environ.get("SANIC_SERVER_RUNNING") != "true"
|
||||
):
|
||||
reloader_helpers.watchdog(2)
|
||||
else:
|
||||
serve(**server_settings)
|
||||
serve(**server_settings)
|
||||
else:
|
||||
serve_multiple(server_settings, workers)
|
||||
except BaseException:
|
||||
@@ -1178,12 +1139,15 @@ class Sanic:
|
||||
|
||||
def stop(self):
|
||||
"""This kills the Sanic"""
|
||||
get_event_loop().stop()
|
||||
if not self.is_stopping:
|
||||
self.is_stopping = True
|
||||
get_event_loop().stop()
|
||||
|
||||
async def create_server(
|
||||
self,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
*,
|
||||
debug: bool = False,
|
||||
ssl: Union[dict, SSLContext, None] = None,
|
||||
sock: Optional[socket] = None,
|
||||
@@ -1191,6 +1155,7 @@ class Sanic:
|
||||
backlog: int = 100,
|
||||
stop_event: Any = None,
|
||||
access_log: Optional[bool] = None,
|
||||
unix: Optional[str] = None,
|
||||
return_asyncio_server=False,
|
||||
asyncio_server_kwargs=None,
|
||||
) -> Optional[AsyncioServer]:
|
||||
@@ -1260,6 +1225,7 @@ class Sanic:
|
||||
debug=debug,
|
||||
ssl=ssl,
|
||||
sock=sock,
|
||||
unix=unix,
|
||||
loop=get_event_loop(),
|
||||
protocol=protocol,
|
||||
backlog=backlog,
|
||||
@@ -1325,6 +1291,7 @@ class Sanic:
|
||||
debug=False,
|
||||
ssl=None,
|
||||
sock=None,
|
||||
unix=None,
|
||||
workers=1,
|
||||
loop=None,
|
||||
protocol=HttpProtocol,
|
||||
@@ -1363,33 +1330,16 @@ class Sanic:
|
||||
|
||||
server_settings = {
|
||||
"protocol": protocol,
|
||||
"request_class": self.request_class,
|
||||
"is_request_stream": self.is_request_stream,
|
||||
"router": self.router,
|
||||
"host": host,
|
||||
"port": port,
|
||||
"sock": sock,
|
||||
"unix": unix,
|
||||
"ssl": ssl,
|
||||
"app": self,
|
||||
"signal": Signal(),
|
||||
"debug": debug,
|
||||
"request_handler": self.handle_request,
|
||||
"error_handler": self.error_handler,
|
||||
"request_timeout": self.config.REQUEST_TIMEOUT,
|
||||
"response_timeout": self.config.RESPONSE_TIMEOUT,
|
||||
"keep_alive_timeout": self.config.KEEP_ALIVE_TIMEOUT,
|
||||
"request_max_size": self.config.REQUEST_MAX_SIZE,
|
||||
"request_buffer_queue_size": self.config.REQUEST_BUFFER_QUEUE_SIZE,
|
||||
"keep_alive": self.config.KEEP_ALIVE,
|
||||
"loop": loop,
|
||||
"register_sys_signals": register_sys_signals,
|
||||
"backlog": backlog,
|
||||
"access_log": self.config.ACCESS_LOG,
|
||||
"websocket_max_size": self.config.WEBSOCKET_MAX_SIZE,
|
||||
"websocket_max_queue": self.config.WEBSOCKET_MAX_QUEUE,
|
||||
"websocket_read_limit": self.config.WEBSOCKET_READ_LIMIT,
|
||||
"websocket_write_limit": self.config.WEBSOCKET_WRITE_LIMIT,
|
||||
"graceful_shutdown_timeout": self.config.GRACEFUL_SHUTDOWN_TIMEOUT,
|
||||
}
|
||||
|
||||
# -------------------------------------------- #
|
||||
@@ -1426,11 +1376,14 @@ class Sanic:
|
||||
server_settings["run_async"] = True
|
||||
|
||||
# Serve
|
||||
if host and port and os.environ.get("SANIC_SERVER_RUNNING") != "true":
|
||||
if host and port:
|
||||
proto = "http"
|
||||
if ssl is not None:
|
||||
proto = "https"
|
||||
logger.info("Goin' Fast @ {}://{}:{}".format(proto, host, port))
|
||||
if unix:
|
||||
logger.info(f"Goin' Fast @ {unix} {proto}://...")
|
||||
else:
|
||||
logger.info(f"Goin' Fast @ {proto}://{host}:{port}")
|
||||
|
||||
return server_settings
|
||||
|
||||
@@ -1438,6 +1391,55 @@ class Sanic:
|
||||
parts = [self.name, *parts]
|
||||
return ".".join(parts)
|
||||
|
||||
@classmethod
|
||||
def _loop_add_task(cls, task, app, loop):
|
||||
if callable(task):
|
||||
try:
|
||||
loop.create_task(task(app))
|
||||
except TypeError:
|
||||
loop.create_task(task())
|
||||
else:
|
||||
loop.create_task(task)
|
||||
|
||||
@classmethod
|
||||
def _cancel_websocket_tasks(cls, app, loop):
|
||||
for task in app.websocket_tasks:
|
||||
task.cancel()
|
||||
|
||||
async def _websocket_handler(
|
||||
self, handler, request, *args, subprotocols=None, **kwargs
|
||||
):
|
||||
request.app = self
|
||||
if not getattr(handler, "__blueprintname__", False):
|
||||
request.endpoint = handler.__name__
|
||||
else:
|
||||
request.endpoint = (
|
||||
getattr(handler, "__blueprintname__", "") + handler.__name__
|
||||
)
|
||||
|
||||
pass
|
||||
|
||||
if self.asgi:
|
||||
ws = request.transport.get_websocket_connection()
|
||||
else:
|
||||
protocol = request.transport.get_protocol()
|
||||
protocol.app = self
|
||||
|
||||
ws = await protocol.websocket_handshake(request, subprotocols)
|
||||
|
||||
# schedule the application handler
|
||||
# its future is kept in self.websocket_tasks in case it
|
||||
# needs to be cancelled due to the server being stopped
|
||||
fut = ensure_future(handler(request, ws, *args, **kwargs))
|
||||
self.websocket_tasks.add(fut)
|
||||
try:
|
||||
await fut
|
||||
except (CancelledError, ConnectionClosed):
|
||||
pass
|
||||
finally:
|
||||
self.websocket_tasks.remove(fut)
|
||||
await ws.close()
|
||||
|
||||
# -------------------------------------------------------------------- #
|
||||
# ASGI
|
||||
# -------------------------------------------------------------------- #
|
||||
|
||||
@@ -22,7 +22,7 @@ from sanic.exceptions import InvalidUsage, ServerError
|
||||
from sanic.log import logger
|
||||
from sanic.request import Request
|
||||
from sanic.response import HTTPResponse, StreamingHTTPResponse
|
||||
from sanic.server import StreamBuffer
|
||||
from sanic.server import ConnInfo, StreamBuffer
|
||||
from sanic.websocket import WebSocketConnection
|
||||
|
||||
|
||||
@@ -255,6 +255,7 @@ class ASGIApp:
|
||||
instance.transport,
|
||||
sanic_app,
|
||||
)
|
||||
instance.request.conn_info = ConnInfo(instance.transport)
|
||||
|
||||
if sanic_app.is_request_stream:
|
||||
is_stream_handler = sanic_app.router.is_stream_handler(
|
||||
|
||||
@@ -143,7 +143,7 @@ class Blueprint:
|
||||
if _routes:
|
||||
routes += _routes
|
||||
|
||||
route_names = [route.name for route in routes]
|
||||
route_names = [route.name for route in routes if route]
|
||||
# Middleware
|
||||
for future in self.middlewares:
|
||||
if future.args or future.kwargs:
|
||||
@@ -151,7 +151,7 @@ class Blueprint:
|
||||
future.middleware,
|
||||
route_names,
|
||||
*future.args,
|
||||
**future.kwargs
|
||||
**future.kwargs,
|
||||
)
|
||||
else:
|
||||
app.register_named_middleware(future.middleware, route_names)
|
||||
@@ -283,6 +283,13 @@ class Blueprint:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
def decorator(handler):
|
||||
nonlocal uri
|
||||
nonlocal host
|
||||
nonlocal strict_slashes
|
||||
nonlocal version
|
||||
nonlocal name
|
||||
|
||||
name = f"{self.name}.{name or handler.__name__}"
|
||||
route = FutureRoute(
|
||||
handler, uri, [], host, strict_slashes, False, version, name
|
||||
)
|
||||
@@ -376,7 +383,7 @@ class Blueprint:
|
||||
"""
|
||||
name = kwargs.pop("name", "static")
|
||||
if not name.startswith(self.name + "."):
|
||||
name = "{}.{}".format(self.name, name)
|
||||
name = f"{self.name}.{name}"
|
||||
kwargs.update(name=name)
|
||||
|
||||
strict_slashes = kwargs.get("strict_slashes")
|
||||
|
||||
@@ -1,6 +1,52 @@
|
||||
import asyncio
|
||||
import signal
|
||||
|
||||
from sys import argv
|
||||
|
||||
from multidict import CIMultiDict # type: ignore
|
||||
|
||||
|
||||
class Header(CIMultiDict):
|
||||
def get_all(self, key):
|
||||
return self.getall(key, default=[])
|
||||
|
||||
|
||||
use_trio = argv[0].endswith("hypercorn") and "trio" in argv
|
||||
|
||||
if use_trio:
|
||||
from trio import open_file as open_async, Path # type: ignore
|
||||
|
||||
def stat_async(path):
|
||||
return Path(path).stat()
|
||||
|
||||
|
||||
else:
|
||||
from aiofiles import open as aio_open # type: ignore
|
||||
from aiofiles.os import stat as stat_async # type: ignore # noqa: F401
|
||||
|
||||
async def open_async(file, mode="r", **kwargs):
|
||||
return aio_open(file, mode, **kwargs)
|
||||
|
||||
|
||||
def ctrlc_workaround_for_windows(app):
|
||||
async def stay_active(app):
|
||||
"""Asyncio wakeups to allow receiving SIGINT in Python"""
|
||||
while not die:
|
||||
# If someone else stopped the app, just exit
|
||||
if app.is_stopping:
|
||||
return
|
||||
# Windows Python blocks signal handlers while the event loop is
|
||||
# waiting for I/O. Frequent wakeups keep interrupts flowing.
|
||||
await asyncio.sleep(0.1)
|
||||
# Can't be called from signal handler, so call it from here
|
||||
app.stop()
|
||||
|
||||
def ctrlc_handler(sig, frame):
|
||||
nonlocal die
|
||||
if die:
|
||||
raise KeyboardInterrupt("Non-graceful Ctrl+C")
|
||||
die = True
|
||||
|
||||
die = False
|
||||
signal.signal(signal.SIGINT, ctrlc_handler)
|
||||
app.add_task(stay_active)
|
||||
|
||||
@@ -20,7 +20,7 @@ DEFAULT_CONFIG = {
|
||||
"RESPONSE_TIMEOUT": 60, # 60 seconds
|
||||
"KEEP_ALIVE": True,
|
||||
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
|
||||
"WEBSOCKET_MAX_SIZE": 2 ** 20, # 1 megabytes
|
||||
"WEBSOCKET_MAX_SIZE": 2 ** 20, # 1 megabyte
|
||||
"WEBSOCKET_MAX_QUEUE": 32,
|
||||
"WEBSOCKET_READ_LIMIT": 2 ** 16,
|
||||
"WEBSOCKET_WRITE_LIMIT": 2 ** 16,
|
||||
@@ -51,7 +51,7 @@ class Config(dict):
|
||||
try:
|
||||
return self[attr]
|
||||
except KeyError as ke:
|
||||
raise AttributeError("Config has no '{}'".format(ke.args[0]))
|
||||
raise AttributeError(f"Config has no '{ke.args[0]}'")
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self[attr] = value
|
||||
|
||||
117
sanic/errorpages.py
Normal file
117
sanic/errorpages.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import sys
|
||||
|
||||
from traceback import extract_tb
|
||||
|
||||
from sanic.exceptions import SanicException
|
||||
from sanic.helpers import STATUS_CODES
|
||||
from sanic.response import html
|
||||
|
||||
|
||||
# Here, There Be Dragons (custom HTML formatting to follow)
|
||||
|
||||
|
||||
def escape(text):
|
||||
"""Minimal HTML escaping, not for attribute values (unlike html.escape)."""
|
||||
return f"{text}".replace("&", "&").replace("<", "<")
|
||||
|
||||
|
||||
def exception_response(request, exception, debug):
|
||||
status = 500
|
||||
text = (
|
||||
"The server encountered an internal error "
|
||||
"and cannot complete your request."
|
||||
)
|
||||
|
||||
headers = {}
|
||||
if isinstance(exception, SanicException):
|
||||
text = f"{exception}"
|
||||
status = getattr(exception, "status_code", status)
|
||||
headers = getattr(exception, "headers", headers)
|
||||
elif debug:
|
||||
text = f"{exception}"
|
||||
|
||||
status_text = STATUS_CODES.get(status, b"Error Occurred").decode()
|
||||
title = escape(f"{status} — {status_text}")
|
||||
text = escape(text)
|
||||
|
||||
if debug and not getattr(exception, "quiet", False):
|
||||
return html(
|
||||
f"<!DOCTYPE html><meta charset=UTF-8><title>{title}</title>"
|
||||
f"<style>{TRACEBACK_STYLE}</style>\n"
|
||||
f"<h1>⚠️ {title}</h1><p>{text}\n"
|
||||
f"{_render_traceback_html(request, exception)}",
|
||||
status=status,
|
||||
)
|
||||
|
||||
# Keeping it minimal with trailing newline for pretty curl/console output
|
||||
return html(
|
||||
f"<!DOCTYPE html><meta charset=UTF-8><title>{title}</title>"
|
||||
"<style>html { font-family: sans-serif }</style>\n"
|
||||
f"<h1>⚠️ {title}</h1><p>{text}\n",
|
||||
status=status,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
def _render_exception(exception):
|
||||
frames = extract_tb(exception.__traceback__)
|
||||
frame_html = "".join(TRACEBACK_LINE_HTML.format(frame) for frame in frames)
|
||||
return TRACEBACK_WRAPPER_HTML.format(
|
||||
exc_name=escape(exception.__class__.__name__),
|
||||
exc_value=escape(exception),
|
||||
frame_html=frame_html,
|
||||
)
|
||||
|
||||
|
||||
def _render_traceback_html(request, exception):
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
exceptions = []
|
||||
while exc_value:
|
||||
exceptions.append(_render_exception(exc_value))
|
||||
exc_value = exc_value.__cause__
|
||||
|
||||
traceback_html = TRACEBACK_BORDER.join(reversed(exceptions))
|
||||
appname = escape(request.app.name)
|
||||
name = escape(exception.__class__.__name__)
|
||||
value = escape(exception)
|
||||
path = escape(request.path)
|
||||
return (
|
||||
f"<h2>Traceback of {appname} (most recent call last):</h2>"
|
||||
f"{traceback_html}"
|
||||
"<div class=summary><p>"
|
||||
f"<b>{name}: {value}</b> while handling path <code>{path}</code>"
|
||||
)
|
||||
|
||||
|
||||
TRACEBACK_STYLE = """
|
||||
html { font-family: sans-serif }
|
||||
h2 { color: #888; }
|
||||
.tb-wrapper p { margin: 0 }
|
||||
.frame-border { margin: 1rem }
|
||||
.frame-line > * { padding: 0.3rem 0.6rem }
|
||||
.frame-line { margin-bottom: 0.3rem }
|
||||
.frame-code { font-size: 16px; padding-left: 4ch }
|
||||
.tb-wrapper { border: 1px solid #eee }
|
||||
.tb-header { background: #eee; padding: 0.3rem; font-weight: bold }
|
||||
.frame-descriptor { background: #e2eafb; font-size: 14px }
|
||||
"""
|
||||
|
||||
TRACEBACK_WRAPPER_HTML = (
|
||||
"<div class=tb-header>{exc_name}: {exc_value}</div>"
|
||||
"<div class=tb-wrapper>{frame_html}</div>"
|
||||
)
|
||||
|
||||
TRACEBACK_BORDER = (
|
||||
"<div class=frame-border>"
|
||||
"The above exception was the direct cause of the following exception:"
|
||||
"</div>"
|
||||
)
|
||||
|
||||
TRACEBACK_LINE_HTML = (
|
||||
"<div class=frame-line>"
|
||||
"<p class=frame-descriptor>"
|
||||
"File {0.filename}, line <i>{0.lineno}</i>, "
|
||||
"in <code><b>{0.name}</b></code>"
|
||||
"<p class=frame-code><code>{0.line}</code>"
|
||||
"</div>"
|
||||
)
|
||||
@@ -1,133 +1,18 @@
|
||||
from sanic.helpers import STATUS_CODES
|
||||
|
||||
|
||||
TRACEBACK_STYLE = """
|
||||
<style>
|
||||
body {
|
||||
padding: 20px;
|
||||
font-family: Arial, sans-serif;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.summary {
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
h3 {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
h3 code {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.frame-line > * {
|
||||
padding: 5px 10px;
|
||||
}
|
||||
|
||||
.frame-line {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.frame-code {
|
||||
font-size: 16px;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
.tb-wrapper {
|
||||
border: 1px solid #f3f3f3;
|
||||
}
|
||||
|
||||
.tb-header {
|
||||
background-color: #f3f3f3;
|
||||
padding: 5px 10px;
|
||||
}
|
||||
|
||||
.tb-border {
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.frame-descriptor {
|
||||
background-color: #e2eafb;
|
||||
}
|
||||
|
||||
.frame-descriptor {
|
||||
font-size: 14px;
|
||||
}
|
||||
</style>
|
||||
"""
|
||||
|
||||
TRACEBACK_WRAPPER_HTML = """
|
||||
<html>
|
||||
<head>
|
||||
{style}
|
||||
</head>
|
||||
<body>
|
||||
{inner_html}
|
||||
<div class="summary">
|
||||
<p>
|
||||
<b>{exc_name}: {exc_value}</b>
|
||||
while handling path <code>{path}</code>
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
TRACEBACK_WRAPPER_INNER_HTML = """
|
||||
<h1>{exc_name}</h1>
|
||||
<h3><code>{exc_value}</code></h3>
|
||||
<div class="tb-wrapper">
|
||||
<p class="tb-header">Traceback (most recent call last):</p>
|
||||
{frame_html}
|
||||
</div>
|
||||
"""
|
||||
|
||||
TRACEBACK_BORDER = """
|
||||
<div class="tb-border">
|
||||
<b><i>
|
||||
The above exception was the direct cause of the
|
||||
following exception:
|
||||
</i></b>
|
||||
</div>
|
||||
"""
|
||||
|
||||
TRACEBACK_LINE_HTML = """
|
||||
<div class="frame-line">
|
||||
<p class="frame-descriptor">
|
||||
File {0.filename}, line <i>{0.lineno}</i>,
|
||||
in <code><b>{0.name}</b></code>
|
||||
</p>
|
||||
<p class="frame-code"><code>{0.line}</code></p>
|
||||
</div>
|
||||
"""
|
||||
|
||||
INTERNAL_SERVER_ERROR_HTML = """
|
||||
<h1>Internal Server Error</h1>
|
||||
<p>
|
||||
The server encountered an internal error and cannot complete
|
||||
your request.
|
||||
</p>
|
||||
"""
|
||||
|
||||
|
||||
_sanic_exceptions = {}
|
||||
|
||||
|
||||
def add_status_code(code):
|
||||
def add_status_code(code, quiet=None):
|
||||
"""
|
||||
Decorator used for adding exceptions to :class:`SanicException`.
|
||||
"""
|
||||
|
||||
def class_decorator(cls):
|
||||
cls.status_code = code
|
||||
if quiet or quiet is None and code != 500:
|
||||
cls.quiet = True
|
||||
_sanic_exceptions[code] = cls
|
||||
return cls
|
||||
|
||||
@@ -135,12 +20,16 @@ def add_status_code(code):
|
||||
|
||||
|
||||
class SanicException(Exception):
|
||||
def __init__(self, message, status_code=None):
|
||||
def __init__(self, message, status_code=None, quiet=None):
|
||||
super().__init__(message)
|
||||
|
||||
if status_code is not None:
|
||||
self.status_code = status_code
|
||||
|
||||
# quiet=None/False/True with None meaning choose by status
|
||||
if quiet or quiet is None and status_code not in (None, 500):
|
||||
self.quiet = True
|
||||
|
||||
|
||||
@add_status_code(404)
|
||||
class NotFound(SanicException):
|
||||
@@ -156,10 +45,7 @@ class InvalidUsage(SanicException):
|
||||
class MethodNotSupported(SanicException):
|
||||
def __init__(self, message, method, allowed_methods):
|
||||
super().__init__(message)
|
||||
self.headers = dict()
|
||||
self.headers["Allow"] = ", ".join(allowed_methods)
|
||||
if method in ["HEAD", "PATCH", "PUT", "DELETE"]:
|
||||
self.headers["Content-Length"] = 0
|
||||
self.headers = {"Allow": ", ".join(allowed_methods)}
|
||||
|
||||
|
||||
@add_status_code(500)
|
||||
@@ -212,10 +98,7 @@ class HeaderNotFound(InvalidUsage):
|
||||
class ContentRangeError(SanicException):
|
||||
def __init__(self, message, content_range):
|
||||
super().__init__(message)
|
||||
self.headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Range": "bytes */%s" % (content_range.total,),
|
||||
}
|
||||
self.headers = {"Content-Range": f"bytes */{content_range.total}"}
|
||||
|
||||
|
||||
@add_status_code(417)
|
||||
@@ -282,7 +165,7 @@ class Unauthorized(SanicException):
|
||||
challenge = ", ".join(values)
|
||||
|
||||
self.headers = {
|
||||
"WWW-Authenticate": "{} {}".format(scheme, challenge).rstrip()
|
||||
"WWW-Authenticate": f"{scheme} {challenge}".rstrip()
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
import sys
|
||||
|
||||
from traceback import extract_tb, format_exc
|
||||
from traceback import format_exc
|
||||
|
||||
from sanic.errorpages import exception_response
|
||||
from sanic.exceptions import (
|
||||
INTERNAL_SERVER_ERROR_HTML,
|
||||
TRACEBACK_BORDER,
|
||||
TRACEBACK_LINE_HTML,
|
||||
TRACEBACK_STYLE,
|
||||
TRACEBACK_WRAPPER_HTML,
|
||||
TRACEBACK_WRAPPER_INNER_HTML,
|
||||
ContentRangeError,
|
||||
HeaderNotFound,
|
||||
InvalidRangeType,
|
||||
SanicException,
|
||||
)
|
||||
from sanic.log import logger
|
||||
from sanic.response import html, text
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
@@ -40,35 +32,6 @@ class ErrorHandler:
|
||||
self.cached_handlers = {}
|
||||
self.debug = False
|
||||
|
||||
def _render_exception(self, exception):
|
||||
frames = extract_tb(exception.__traceback__)
|
||||
|
||||
frame_html = []
|
||||
for frame in frames:
|
||||
frame_html.append(TRACEBACK_LINE_HTML.format(frame))
|
||||
|
||||
return TRACEBACK_WRAPPER_INNER_HTML.format(
|
||||
exc_name=exception.__class__.__name__,
|
||||
exc_value=exception,
|
||||
frame_html="".join(frame_html),
|
||||
)
|
||||
|
||||
def _render_traceback_html(self, exception, request):
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
exceptions = []
|
||||
|
||||
while exc_value:
|
||||
exceptions.append(self._render_exception(exc_value))
|
||||
exc_value = exc_value.__cause__
|
||||
|
||||
return TRACEBACK_WRAPPER_HTML.format(
|
||||
style=TRACEBACK_STYLE,
|
||||
exc_name=exception.__class__.__name__,
|
||||
exc_value=exception,
|
||||
inner_html=TRACEBACK_BORDER.join(reversed(exceptions)),
|
||||
path=request.path,
|
||||
)
|
||||
|
||||
def add(self, exception, handler):
|
||||
"""
|
||||
Add a new exception handler to an already existing handler object.
|
||||
@@ -166,27 +129,17 @@ class ErrorHandler:
|
||||
:class:`Exception`
|
||||
:return:
|
||||
"""
|
||||
self.log(format_exc())
|
||||
try:
|
||||
url = repr(request.url)
|
||||
except AttributeError:
|
||||
url = "unknown"
|
||||
quiet = getattr(exception, "quiet", False)
|
||||
if quiet is False:
|
||||
try:
|
||||
url = repr(request.url)
|
||||
except AttributeError:
|
||||
url = "unknown"
|
||||
|
||||
response_message = "Exception occurred while handling uri: %s"
|
||||
logger.exception(response_message, url)
|
||||
self.log(format_exc())
|
||||
logger.exception("Exception occurred while handling uri: %s", url)
|
||||
|
||||
if issubclass(type(exception), SanicException):
|
||||
return text(
|
||||
"Error: {}".format(exception),
|
||||
status=getattr(exception, "status_code", 500),
|
||||
headers=getattr(exception, "headers", dict()),
|
||||
)
|
||||
elif self.debug:
|
||||
html_output = self._render_traceback_html(exception, request)
|
||||
|
||||
return html(html_output, status=500)
|
||||
else:
|
||||
return html(INTERNAL_SERVER_ERROR_HTML, status=500)
|
||||
return exception_response(request, exception, self.debug)
|
||||
|
||||
|
||||
class ContentRangeHandler:
|
||||
|
||||
@@ -3,6 +3,8 @@ import re
|
||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
||||
from urllib.parse import unquote
|
||||
|
||||
from sanic.helpers import STATUS_CODES
|
||||
|
||||
|
||||
HeaderIterable = Iterable[Tuple[str, Any]] # Values convertible to str
|
||||
Options = Dict[str, Union[int, str]] # key=value fields in various headers
|
||||
@@ -180,3 +182,19 @@ def format_http1(headers: HeaderIterable) -> bytes:
|
||||
- Values are converted into strings if necessary.
|
||||
"""
|
||||
return "".join(f"{name}: {val}\r\n" for name, val in headers).encode()
|
||||
|
||||
|
||||
def format_http1_response(
|
||||
status: int, headers: HeaderIterable, body=b""
|
||||
) -> bytes:
|
||||
"""Format a full HTTP/1.1 response.
|
||||
|
||||
- If `body` is included, content-length must be specified in headers.
|
||||
"""
|
||||
headerbytes = format_http1(headers)
|
||||
return b"HTTP/1.1 %d %b\r\n%b\r\n%b" % (
|
||||
status,
|
||||
STATUS_CODES.get(status, b"UNKNOWN"),
|
||||
headerbytes,
|
||||
body,
|
||||
)
|
||||
|
||||
@@ -3,7 +3,6 @@ import signal
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from multiprocessing import Process
|
||||
from time import sleep
|
||||
|
||||
|
||||
@@ -35,101 +34,26 @@ def _iter_module_files():
|
||||
|
||||
def _get_args_for_reloading():
|
||||
"""Returns the executable."""
|
||||
rv = [sys.executable]
|
||||
main_module = sys.modules["__main__"]
|
||||
mod_spec = getattr(main_module, "__spec__", None)
|
||||
if sys.argv[0] in ("", "-c"):
|
||||
raise RuntimeError(
|
||||
f"Autoreloader cannot work with argv[0]={sys.argv[0]!r}"
|
||||
)
|
||||
if mod_spec:
|
||||
# Parent exe was launched as a module rather than a script
|
||||
rv.extend(["-m", mod_spec.name])
|
||||
if len(sys.argv) > 1:
|
||||
rv.extend(sys.argv[1:])
|
||||
else:
|
||||
rv.extend(sys.argv)
|
||||
return rv
|
||||
return [sys.executable, "-m", mod_spec.name] + sys.argv[1:]
|
||||
return [sys.executable] + sys.argv
|
||||
|
||||
|
||||
def restart_with_reloader():
|
||||
"""Create a new process and a subprocess in it with the same arguments as
|
||||
this one.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
args = _get_args_for_reloading()
|
||||
new_environ = os.environ.copy()
|
||||
new_environ["SANIC_SERVER_RUNNING"] = "true"
|
||||
cmd = " ".join(args)
|
||||
worker_process = Process(
|
||||
target=subprocess.call,
|
||||
args=(cmd,),
|
||||
kwargs={"cwd": cwd, "shell": True, "env": new_environ},
|
||||
return subprocess.Popen(
|
||||
_get_args_for_reloading(),
|
||||
env={**os.environ, "SANIC_SERVER_RUNNING": "true"},
|
||||
)
|
||||
worker_process.start()
|
||||
return worker_process
|
||||
|
||||
|
||||
def kill_process_children_unix(pid):
|
||||
"""Find and kill child processes of a process (maximum two level).
|
||||
|
||||
:param pid: PID of parent process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
root_process_path = "/proc/{pid}/task/{pid}/children".format(pid=pid)
|
||||
if not os.path.isfile(root_process_path):
|
||||
return
|
||||
with open(root_process_path) as children_list_file:
|
||||
children_list_pid = children_list_file.read().split()
|
||||
|
||||
for child_pid in children_list_pid:
|
||||
children_proc_path = "/proc/%s/task/%s/children" % (
|
||||
child_pid,
|
||||
child_pid,
|
||||
)
|
||||
if not os.path.isfile(children_proc_path):
|
||||
continue
|
||||
with open(children_proc_path) as children_list_file_2:
|
||||
children_list_pid_2 = children_list_file_2.read().split()
|
||||
for _pid in children_list_pid_2:
|
||||
try:
|
||||
os.kill(int(_pid), signal.SIGTERM)
|
||||
except ProcessLookupError:
|
||||
continue
|
||||
try:
|
||||
os.kill(int(child_pid), signal.SIGTERM)
|
||||
except ProcessLookupError:
|
||||
continue
|
||||
|
||||
|
||||
def kill_process_children_osx(pid):
|
||||
"""Find and kill child processes of a process.
|
||||
|
||||
:param pid: PID of parent process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
subprocess.run(["pkill", "-P", str(pid)])
|
||||
|
||||
|
||||
def kill_process_children(pid):
|
||||
"""Find and kill child processes of a process.
|
||||
|
||||
:param pid: PID of parent process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
if sys.platform == "darwin":
|
||||
kill_process_children_osx(pid)
|
||||
elif sys.platform == "linux":
|
||||
kill_process_children_unix(pid)
|
||||
else:
|
||||
pass # should signal error here
|
||||
|
||||
|
||||
def kill_program_completly(proc):
|
||||
"""Kill worker and it's child processes and exit.
|
||||
|
||||
:param proc: worker process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
kill_process_children(proc.pid)
|
||||
proc.terminate()
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def watchdog(sleep_interval):
|
||||
@@ -138,30 +62,42 @@ def watchdog(sleep_interval):
|
||||
:param sleep_interval: interval in second.
|
||||
:return: Nothing
|
||||
"""
|
||||
|
||||
def interrupt_self(*args):
|
||||
raise KeyboardInterrupt
|
||||
|
||||
mtimes = {}
|
||||
signal.signal(signal.SIGTERM, interrupt_self)
|
||||
if os.name == "nt":
|
||||
signal.signal(signal.SIGBREAK, interrupt_self)
|
||||
|
||||
worker_process = restart_with_reloader()
|
||||
signal.signal(
|
||||
signal.SIGTERM, lambda *args: kill_program_completly(worker_process)
|
||||
)
|
||||
signal.signal(
|
||||
signal.SIGINT, lambda *args: kill_program_completly(worker_process)
|
||||
)
|
||||
while True:
|
||||
for filename in _iter_module_files():
|
||||
try:
|
||||
mtime = os.stat(filename).st_mtime
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
old_time = mtimes.get(filename)
|
||||
if old_time is None:
|
||||
mtimes[filename] = mtime
|
||||
continue
|
||||
elif mtime > old_time:
|
||||
kill_process_children(worker_process.pid)
|
||||
try:
|
||||
while True:
|
||||
need_reload = False
|
||||
|
||||
for filename in _iter_module_files():
|
||||
try:
|
||||
mtime = os.stat(filename).st_mtime
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
old_time = mtimes.get(filename)
|
||||
if old_time is None:
|
||||
mtimes[filename] = mtime
|
||||
elif mtime > old_time:
|
||||
mtimes[filename] = mtime
|
||||
need_reload = True
|
||||
|
||||
if need_reload:
|
||||
worker_process.terminate()
|
||||
worker_process.wait()
|
||||
worker_process = restart_with_reloader()
|
||||
mtimes[filename] = mtime
|
||||
break
|
||||
|
||||
sleep(sleep_interval)
|
||||
sleep(sleep_interval)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
worker_process.terminate()
|
||||
worker_process.wait()
|
||||
|
||||
221
sanic/request.py
221
sanic/request.py
@@ -1,6 +1,5 @@
|
||||
import asyncio
|
||||
import email.utils
|
||||
import warnings
|
||||
|
||||
from collections import defaultdict, namedtuple
|
||||
from http.cookies import SimpleCookie
|
||||
@@ -56,6 +55,14 @@ class StreamBuffer:
|
||||
self._queue.task_done()
|
||||
return payload
|
||||
|
||||
async def __aiter__(self):
|
||||
"""Support `async for data in request.stream`"""
|
||||
while True:
|
||||
data = await self.read()
|
||||
if not data:
|
||||
break
|
||||
yield data
|
||||
|
||||
async def put(self, payload):
|
||||
await self._queue.put(payload)
|
||||
|
||||
@@ -80,6 +87,7 @@ class Request:
|
||||
"_socket",
|
||||
"app",
|
||||
"body",
|
||||
"conn_info",
|
||||
"ctx",
|
||||
"endpoint",
|
||||
"headers",
|
||||
@@ -110,6 +118,7 @@ class Request:
|
||||
|
||||
# Init but do not inhale
|
||||
self.body_init()
|
||||
self.conn_info = None
|
||||
self.ctx = SimpleNamespace()
|
||||
self.parsed_forwarded = None
|
||||
self.parsed_json = None
|
||||
@@ -123,44 +132,37 @@ class Request:
|
||||
self.endpoint = None
|
||||
|
||||
def __repr__(self):
|
||||
return "<{0}: {1} {2}>".format(
|
||||
self.__class__.__name__, self.method, self.path
|
||||
)
|
||||
|
||||
def get(self, key, default=None):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return self.ctx.__dict__.get(key, default)
|
||||
|
||||
def __contains__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return key in self.ctx.__dict__
|
||||
|
||||
def __getitem__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return self.ctx.__dict__[key]
|
||||
|
||||
def __delitem__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
del self.ctx.__dict__[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
setattr(self.ctx, key, value)
|
||||
class_name = self.__class__.__name__
|
||||
return f"<{class_name}: {self.method} {self.path}>"
|
||||
|
||||
def body_init(self):
|
||||
""".. deprecated:: 20.3"""
|
||||
self.body = []
|
||||
|
||||
def body_push(self, data):
|
||||
""".. deprecated:: 20.3"""
|
||||
self.body.append(data)
|
||||
|
||||
def body_finish(self):
|
||||
""".. deprecated:: 20.3"""
|
||||
self.body = b"".join(self.body)
|
||||
|
||||
async def receive_body(self):
|
||||
"""Receive request.body, if not already received.
|
||||
|
||||
Streaming handlers may call this to receive the full body.
|
||||
|
||||
This is added as a compatibility shim in Sanic 20.3 because future
|
||||
versions of Sanic will make all requests streaming and will use this
|
||||
function instead of the non-async body_init/push/finish functions.
|
||||
|
||||
Please make an issue if your code depends on the old functionality and
|
||||
cannot be upgraded to the new API.
|
||||
"""
|
||||
if not self.stream:
|
||||
return
|
||||
self.body = b"".join([data async for data in self.stream])
|
||||
|
||||
@property
|
||||
def json(self):
|
||||
if self.parsed_json is None:
|
||||
@@ -282,18 +284,6 @@ class Request:
|
||||
|
||||
args = property(get_args)
|
||||
|
||||
@property
|
||||
def raw_args(self) -> dict:
|
||||
if self.app.debug: # pragma: no cover
|
||||
warnings.simplefilter("default")
|
||||
warnings.warn(
|
||||
"Use of raw_args will be deprecated in "
|
||||
"the future versions. Please use args or query_args "
|
||||
"properties instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return {k: v[0] for k, v in self.args.items()}
|
||||
|
||||
def get_query_args(
|
||||
self,
|
||||
keep_blank_values: bool = False,
|
||||
@@ -361,56 +351,55 @@ class Request:
|
||||
self._cookies = {}
|
||||
return self._cookies
|
||||
|
||||
@property
|
||||
def content_type(self):
|
||||
return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
|
||||
|
||||
@property
|
||||
def match_info(self):
|
||||
"""return matched info after resolving route"""
|
||||
return self.app.router.get(self)[2]
|
||||
|
||||
# Transport properties (obtained from local interface only)
|
||||
|
||||
@property
|
||||
def ip(self):
|
||||
"""
|
||||
:return: peer ip of the socket
|
||||
"""
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._ip
|
||||
return self.conn_info.client if self.conn_info else ""
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
"""
|
||||
:return: peer port of the socket
|
||||
"""
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._port
|
||||
return self.conn_info.client_port if self.conn_info else 0
|
||||
|
||||
@property
|
||||
def socket(self):
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._socket
|
||||
|
||||
def _get_address(self):
|
||||
self._socket = self.transport.get_extra_info("peername") or (
|
||||
None,
|
||||
None,
|
||||
)
|
||||
self._ip = self._socket[0]
|
||||
self._port = self._socket[1]
|
||||
return self.conn_info.peername if self.conn_info else (None, None)
|
||||
|
||||
@property
|
||||
def server_name(self):
|
||||
"""
|
||||
Attempt to get the server's external hostname in this order:
|
||||
`config.SERVER_NAME`, proxied or direct Host headers
|
||||
:func:`Request.host`
|
||||
def path(self) -> str:
|
||||
"""Path of the local HTTP request."""
|
||||
return self._parsed_url.path.decode("utf-8")
|
||||
|
||||
:return: the server name without port number
|
||||
:rtype: str
|
||||
"""
|
||||
server_name = self.app.config.get("SERVER_NAME")
|
||||
if server_name:
|
||||
host = server_name.split("//", 1)[-1].split("/", 1)[0]
|
||||
return parse_host(host)[0]
|
||||
return parse_host(self.host)[0]
|
||||
# Proxy properties (using SERVER_NAME/forwarded/request/transport info)
|
||||
|
||||
@property
|
||||
def forwarded(self):
|
||||
"""
|
||||
Active proxy information obtained from request headers, as specified in
|
||||
Sanic configuration.
|
||||
|
||||
Field names by, for, proto, host, port and path are normalized.
|
||||
- for and by IPv6 addresses are bracketed
|
||||
- port (int) is only set by port headers, not from host.
|
||||
- path is url-unencoded
|
||||
|
||||
Additional values may be available from new style Forwarded headers.
|
||||
"""
|
||||
if self.parsed_forwarded is None:
|
||||
self.parsed_forwarded = (
|
||||
parse_forwarded(self.headers, self.app.config)
|
||||
@@ -420,50 +409,30 @@ class Request:
|
||||
return self.parsed_forwarded
|
||||
|
||||
@property
|
||||
def server_port(self):
|
||||
def remote_addr(self) -> str:
|
||||
"""
|
||||
Attempt to get the server's external port number in this order:
|
||||
`config.SERVER_NAME`, proxied or direct Host headers
|
||||
:func:`Request.host`,
|
||||
actual port used by the transport layer socket.
|
||||
:return: server port
|
||||
:rtype: int
|
||||
"""
|
||||
if self.forwarded:
|
||||
return self.forwarded.get("port") or (
|
||||
80 if self.scheme in ("http", "ws") else 443
|
||||
)
|
||||
return (
|
||||
parse_host(self.host)[1]
|
||||
or self.transport.get_extra_info("sockname")[1]
|
||||
)
|
||||
|
||||
@property
|
||||
def remote_addr(self):
|
||||
"""Attempt to return the original client ip based on `forwarded`,
|
||||
`x-forwarded-for` or `x-real-ip`. If HTTP headers are unavailable or
|
||||
untrusted, returns an empty string.
|
||||
|
||||
:return: original client ip.
|
||||
Client IP address, if available.
|
||||
1. proxied remote address `self.forwarded['for']`
|
||||
2. local remote address `self.ip`
|
||||
:return: IPv4, bracketed IPv6, UNIX socket name or arbitrary string
|
||||
"""
|
||||
if not hasattr(self, "_remote_addr"):
|
||||
self._remote_addr = self.forwarded.get("for", "")
|
||||
self._remote_addr = self.forwarded.get("for", "") # or self.ip
|
||||
return self._remote_addr
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
def scheme(self) -> str:
|
||||
"""
|
||||
Attempt to get the request scheme.
|
||||
Seeking the value in this order:
|
||||
`forwarded` header, `x-forwarded-proto` header,
|
||||
`x-scheme` header, the sanic app itself.
|
||||
|
||||
Determine request scheme.
|
||||
1. `config.SERVER_NAME` if in full URL format
|
||||
2. proxied proto/scheme
|
||||
3. local connection protocol
|
||||
:return: http|https|ws|wss or arbitrary value given by the headers.
|
||||
:rtype: str
|
||||
"""
|
||||
forwarded_proto = self.forwarded.get("proto")
|
||||
if forwarded_proto:
|
||||
return forwarded_proto
|
||||
if "//" in self.app.config.get("SERVER_NAME", ""):
|
||||
return self.app.config.SERVER_NAME.split("//")[0]
|
||||
if "proto" in self.forwarded:
|
||||
return self.forwarded["proto"]
|
||||
|
||||
if (
|
||||
self.app.websocket_enabled
|
||||
@@ -479,25 +448,41 @@ class Request:
|
||||
return scheme
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
def host(self) -> str:
|
||||
"""
|
||||
:return: proxied or direct Host header. Hostname and port number may be
|
||||
separated by sanic.headers.parse_host(request.host).
|
||||
The currently effective server 'host' (hostname or hostname:port).
|
||||
1. `config.SERVER_NAME` overrides any client headers
|
||||
2. proxied host of original request
|
||||
3. request host header
|
||||
hostname and port may be separated by
|
||||
`sanic.headers.parse_host(request.host)`.
|
||||
:return: the first matching host found, or empty string
|
||||
"""
|
||||
return self.forwarded.get("host", self.headers.get("Host", ""))
|
||||
server_name = self.app.config.get("SERVER_NAME")
|
||||
if server_name:
|
||||
return server_name.split("//", 1)[-1].split("/", 1)[0]
|
||||
return self.forwarded.get("host") or self.headers.get("host", "")
|
||||
|
||||
@property
|
||||
def content_type(self):
|
||||
return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
|
||||
def server_name(self) -> str:
|
||||
"""The hostname the client connected to, by `request.host`."""
|
||||
return parse_host(self.host)[0] or ""
|
||||
|
||||
@property
|
||||
def match_info(self):
|
||||
"""return matched info after resolving route"""
|
||||
return self.app.router.get(self)[2]
|
||||
def server_port(self) -> int:
|
||||
"""
|
||||
The port the client connected to, by forwarded `port` or
|
||||
`request.host`.
|
||||
|
||||
Default port is returned as 80 and 443 based on `request.scheme`.
|
||||
"""
|
||||
port = self.forwarded.get("port") or parse_host(self.host)[1]
|
||||
return port or (80 if self.scheme in ("http", "ws") else 443)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._parsed_url.path.decode("utf-8")
|
||||
def server_path(self) -> str:
|
||||
"""Full path of current URL. Uses proxied or local path."""
|
||||
return self.forwarded.get("path") or self.path
|
||||
|
||||
@property
|
||||
def query_string(self):
|
||||
@@ -538,7 +523,7 @@ class Request:
|
||||
):
|
||||
netloc = host
|
||||
else:
|
||||
netloc = "{}:{}".format(host, port)
|
||||
netloc = f"{host}:{port}"
|
||||
|
||||
return self.app.url_for(
|
||||
view_name, _external=True, _scheme=scheme, _server=netloc, **kwargs
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import warnings
|
||||
|
||||
from functools import partial
|
||||
from mimetypes import guess_type
|
||||
from os import path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from aiofiles import open as open_async # type: ignore
|
||||
|
||||
from sanic.compat import Header
|
||||
from sanic.compat import Header, open_async
|
||||
from sanic.cookies import CookieJar
|
||||
from sanic.headers import format_http1
|
||||
from sanic.helpers import STATUS_CODES, has_message_body, remove_entity_headers
|
||||
from sanic.headers import format_http1, format_http1_response
|
||||
from sanic.helpers import has_message_body, remove_entity_headers
|
||||
|
||||
|
||||
try:
|
||||
@@ -23,12 +23,7 @@ except ImportError:
|
||||
|
||||
class BaseHTTPResponse:
|
||||
def _encode_body(self, data):
|
||||
try:
|
||||
# Try to encode it regularly
|
||||
return data.encode()
|
||||
except AttributeError:
|
||||
# Convert it to a str if you can't
|
||||
return str(data).encode()
|
||||
return data.encode() if hasattr(data, "encode") else data
|
||||
|
||||
def _parse_headers(self):
|
||||
return format_http1(self.headers.items())
|
||||
@@ -39,6 +34,32 @@ class BaseHTTPResponse:
|
||||
self._cookies = CookieJar(self.headers)
|
||||
return self._cookies
|
||||
|
||||
def get_headers(
|
||||
self,
|
||||
version="1.1",
|
||||
keep_alive=False,
|
||||
keep_alive_timeout=None,
|
||||
body=b"",
|
||||
):
|
||||
""".. deprecated:: 20.3:
|
||||
This function is not public API and will be removed."""
|
||||
|
||||
# self.headers get priority over content_type
|
||||
if self.content_type and "Content-Type" not in self.headers:
|
||||
self.headers["Content-Type"] = self.content_type
|
||||
|
||||
if keep_alive:
|
||||
self.headers["Connection"] = "keep-alive"
|
||||
if keep_alive_timeout is not None:
|
||||
self.headers["Keep-Alive"] = keep_alive_timeout
|
||||
else:
|
||||
self.headers["Connection"] = "close"
|
||||
|
||||
if self.status in (304, 412):
|
||||
self.headers = remove_entity_headers(self.headers)
|
||||
|
||||
return format_http1_response(self.status, self.headers.items(), body)
|
||||
|
||||
|
||||
class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
__slots__ = (
|
||||
@@ -56,7 +77,7 @@ class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
streaming_fn,
|
||||
status=200,
|
||||
headers=None,
|
||||
content_type="text/plain",
|
||||
content_type="text/plain; charset=utf-8",
|
||||
chunked=True,
|
||||
):
|
||||
self.content_type = content_type
|
||||
@@ -65,14 +86,14 @@ class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
self.headers = Header(headers or {})
|
||||
self.chunked = chunked
|
||||
self._cookies = None
|
||||
self.protocol = None
|
||||
|
||||
async def write(self, data):
|
||||
"""Writes a chunk of data to the streaming response.
|
||||
|
||||
:param data: bytes-ish data to be written.
|
||||
:param data: str or bytes-ish data to be written.
|
||||
"""
|
||||
if type(data) != bytes:
|
||||
data = self._encode_body(data)
|
||||
data = self._encode_body(data)
|
||||
|
||||
if self.chunked:
|
||||
await self.protocol.push_data(b"%x\r\n%b\r\n" % (len(data), data))
|
||||
@@ -104,33 +125,11 @@ class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
def get_headers(
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None
|
||||
):
|
||||
# This is all returned in a kind-of funky way
|
||||
# We tried to make this as fast as possible in pure python
|
||||
timeout_header = b""
|
||||
if keep_alive and keep_alive_timeout is not None:
|
||||
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
||||
|
||||
if self.chunked and version == "1.1":
|
||||
self.headers["Transfer-Encoding"] = "chunked"
|
||||
self.headers.pop("Content-Length", None)
|
||||
self.headers["Content-Type"] = self.headers.get(
|
||||
"Content-Type", self.content_type
|
||||
)
|
||||
|
||||
headers = self._parse_headers()
|
||||
|
||||
if self.status == 200:
|
||||
status = b"OK"
|
||||
else:
|
||||
status = STATUS_CODES.get(self.status)
|
||||
|
||||
return (b"HTTP/%b %d %b\r\n" b"%b" b"%b\r\n") % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
timeout_header,
|
||||
headers,
|
||||
)
|
||||
return super().get_headers(version, keep_alive, keep_alive_timeout)
|
||||
|
||||
|
||||
class HTTPResponse(BaseHTTPResponse):
|
||||
@@ -145,23 +144,18 @@ class HTTPResponse(BaseHTTPResponse):
|
||||
body_bytes=b"",
|
||||
):
|
||||
self.content_type = content_type
|
||||
|
||||
if body is not None:
|
||||
self.body = self._encode_body(body)
|
||||
else:
|
||||
self.body = body_bytes
|
||||
|
||||
self.body = body_bytes if body is None else self._encode_body(body)
|
||||
self.status = status
|
||||
self.headers = Header(headers or {})
|
||||
self._cookies = None
|
||||
|
||||
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
# This is all returned in a kind-of funky way
|
||||
# We tried to make this as fast as possible in pure python
|
||||
timeout_header = b""
|
||||
if keep_alive and keep_alive_timeout is not None:
|
||||
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
||||
if body_bytes:
|
||||
warnings.warn(
|
||||
"Parameter `body_bytes` is deprecated, use `body` instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
body = b""
|
||||
if has_message_body(self.status):
|
||||
body = self.body
|
||||
@@ -169,31 +163,7 @@ class HTTPResponse(BaseHTTPResponse):
|
||||
"Content-Length", len(self.body)
|
||||
)
|
||||
|
||||
# self.headers get priority over content_type
|
||||
if self.content_type and "Content-Type" not in self.headers:
|
||||
self.headers["Content-Type"] = self.content_type
|
||||
|
||||
if self.status in (304, 412):
|
||||
self.headers = remove_entity_headers(self.headers)
|
||||
|
||||
headers = self._parse_headers()
|
||||
|
||||
if self.status == 200:
|
||||
status = b"OK"
|
||||
else:
|
||||
status = STATUS_CODES.get(self.status, b"UNKNOWN RESPONSE")
|
||||
|
||||
return (
|
||||
b"HTTP/%b %d %b\r\n" b"Connection: %b\r\n" b"%b" b"%b\r\n" b"%b"
|
||||
) % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
b"keep-alive" if keep_alive else b"close",
|
||||
timeout_header,
|
||||
headers,
|
||||
body,
|
||||
)
|
||||
return self.get_headers(version, keep_alive, keep_alive_timeout, body)
|
||||
|
||||
@property
|
||||
def cookies(self):
|
||||
@@ -202,16 +172,14 @@ class HTTPResponse(BaseHTTPResponse):
|
||||
return self._cookies
|
||||
|
||||
|
||||
def empty(
|
||||
status=204, headers=None,
|
||||
):
|
||||
def empty(status=204, headers=None):
|
||||
"""
|
||||
Returns an empty response to the client.
|
||||
|
||||
:param status Response code.
|
||||
:param headers Custom Headers.
|
||||
"""
|
||||
return HTTPResponse(body_bytes=b"", status=status, headers=headers,)
|
||||
return HTTPResponse(body=b"", status=status, headers=headers)
|
||||
|
||||
|
||||
def json(
|
||||
@@ -220,7 +188,7 @@ def json(
|
||||
headers=None,
|
||||
content_type="application/json",
|
||||
dumps=json_dumps,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Returns response object with body in json format.
|
||||
@@ -249,6 +217,21 @@ def text(
|
||||
:param headers: Custom Headers.
|
||||
:param content_type: the content type (string) of the response
|
||||
"""
|
||||
if not isinstance(body, str):
|
||||
warnings.warn(
|
||||
"Types other than str will be deprecated in future versions for"
|
||||
f" response.text, got type {type(body).__name__})",
|
||||
DeprecationWarning,
|
||||
)
|
||||
# Type conversions are deprecated and quite b0rked but still supported for
|
||||
# text() until applications get fixed. This try-except should be removed.
|
||||
try:
|
||||
# Avoid repr(body).encode() b0rkage for body that is already encoded.
|
||||
# memoryview used only to test bytes-ishness.
|
||||
with memoryview(body):
|
||||
pass
|
||||
except TypeError:
|
||||
body = f"{body}" # no-op if body is already str
|
||||
return HTTPResponse(
|
||||
body, status=status, headers=headers, content_type=content_type
|
||||
)
|
||||
@@ -266,10 +249,7 @@ def raw(
|
||||
:param content_type: the content type (string) of the response.
|
||||
"""
|
||||
return HTTPResponse(
|
||||
body_bytes=body,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=content_type,
|
||||
body=body, status=status, headers=headers, content_type=content_type,
|
||||
)
|
||||
|
||||
|
||||
@@ -277,10 +257,14 @@ def html(body, status=200, headers=None):
|
||||
"""
|
||||
Returns response object with body in html format.
|
||||
|
||||
:param body: Response data to be encoded.
|
||||
:param body: str or bytes-ish, or an object with __html__ or _repr_html_.
|
||||
:param status: Response code.
|
||||
:param headers: Custom Headers.
|
||||
"""
|
||||
if hasattr(body, "__html__"):
|
||||
body = body.__html__()
|
||||
elif hasattr(body, "_repr_html_"):
|
||||
body = body._repr_html_()
|
||||
return HTTPResponse(
|
||||
body,
|
||||
status=status,
|
||||
@@ -308,29 +292,27 @@ async def file(
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
||||
"Content-Disposition", f'attachment; filename="{filename}"'
|
||||
)
|
||||
filename = filename or path.split(location)[-1]
|
||||
|
||||
async with open_async(location, mode="rb") as _file:
|
||||
async with await open_async(location, mode="rb") as f:
|
||||
if _range:
|
||||
await _file.seek(_range.start)
|
||||
out_stream = await _file.read(_range.size)
|
||||
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
||||
_range.start,
|
||||
_range.end,
|
||||
_range.total,
|
||||
)
|
||||
await f.seek(_range.start)
|
||||
out_stream = await f.read(_range.size)
|
||||
headers[
|
||||
"Content-Range"
|
||||
] = f"bytes {_range.start}-{_range.end}/{_range.total}"
|
||||
status = 206
|
||||
else:
|
||||
out_stream = await _file.read()
|
||||
out_stream = await f.read()
|
||||
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
return HTTPResponse(
|
||||
body=out_stream,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=mime_type,
|
||||
body_bytes=out_stream,
|
||||
)
|
||||
|
||||
|
||||
@@ -357,43 +339,36 @@ async def file_stream(
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
||||
"Content-Disposition", f'attachment; filename="{filename}"'
|
||||
)
|
||||
filename = filename or path.split(location)[-1]
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
if _range:
|
||||
start = _range.start
|
||||
end = _range.end
|
||||
total = _range.total
|
||||
|
||||
_file = await open_async(location, mode="rb")
|
||||
headers["Content-Range"] = f"bytes {start}-{end}/{total}"
|
||||
status = 206
|
||||
|
||||
async def _streaming_fn(response):
|
||||
nonlocal _file, chunk_size
|
||||
try:
|
||||
async with await open_async(location, mode="rb") as f:
|
||||
if _range:
|
||||
chunk_size = min((_range.size, chunk_size))
|
||||
await _file.seek(_range.start)
|
||||
await f.seek(_range.start)
|
||||
to_send = _range.size
|
||||
while to_send > 0:
|
||||
content = await _file.read(chunk_size)
|
||||
content = await f.read(min((_range.size, chunk_size)))
|
||||
if len(content) < 1:
|
||||
break
|
||||
to_send -= len(content)
|
||||
await response.write(content)
|
||||
else:
|
||||
while True:
|
||||
content = await _file.read(chunk_size)
|
||||
content = await f.read(chunk_size)
|
||||
if len(content) < 1:
|
||||
break
|
||||
await response.write(content)
|
||||
finally:
|
||||
await _file.close()
|
||||
return # Returning from this fn closes the stream
|
||||
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
if _range:
|
||||
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
||||
_range.start,
|
||||
_range.end,
|
||||
_range.total,
|
||||
)
|
||||
status = 206
|
||||
return StreamingHTTPResponse(
|
||||
streaming_fn=_streaming_fn,
|
||||
status=status,
|
||||
|
||||
@@ -109,7 +109,7 @@ class Router:
|
||||
name, pattern = parameter_string.split(":", 1)
|
||||
if not name:
|
||||
raise ValueError(
|
||||
"Invalid parameter syntax: {}".format(parameter_string)
|
||||
f"Invalid parameter syntax: {parameter_string}"
|
||||
)
|
||||
|
||||
default = (str, pattern)
|
||||
@@ -143,7 +143,7 @@ class Router:
|
||||
routes = []
|
||||
if version is not None:
|
||||
version = re.escape(str(version).strip("/").lstrip("v"))
|
||||
uri = "/".join(["/v{}".format(version), uri.lstrip("/")])
|
||||
uri = "/".join([f"/v{version}", uri.lstrip("/")])
|
||||
# add regular version
|
||||
routes.append(self._add(uri, methods, handler, host, name))
|
||||
|
||||
@@ -203,8 +203,8 @@ class Router:
|
||||
else:
|
||||
if not isinstance(host, Iterable):
|
||||
raise ValueError(
|
||||
"Expected either string or Iterable of "
|
||||
"host strings, not {!r}".format(host)
|
||||
f"Expected either string or Iterable of "
|
||||
f"host strings, not {host!r}"
|
||||
)
|
||||
|
||||
for host_ in host:
|
||||
@@ -225,8 +225,7 @@ class Router:
|
||||
|
||||
if name in parameter_names:
|
||||
raise ParameterNameConflicts(
|
||||
"Multiple parameter named <{name}> "
|
||||
"in route uri {uri}".format(name=name, uri=uri)
|
||||
f"Multiple parameter named <{name}> " f"in route uri {uri}"
|
||||
)
|
||||
parameter_names.add(name)
|
||||
|
||||
@@ -240,23 +239,23 @@ class Router:
|
||||
elif re.search(r"/", pattern):
|
||||
properties["unhashable"] = True
|
||||
|
||||
return "({})".format(pattern)
|
||||
return f"({pattern})"
|
||||
|
||||
pattern_string = re.sub(self.parameter_pattern, add_parameter, uri)
|
||||
pattern = re.compile(r"^{}$".format(pattern_string))
|
||||
pattern = re.compile(fr"^{pattern_string}$")
|
||||
|
||||
def merge_route(route, methods, handler):
|
||||
# merge to the existing route when possible.
|
||||
if not route.methods or not methods:
|
||||
# method-unspecified routes are not mergeable.
|
||||
raise RouteExists("Route already registered: {}".format(uri))
|
||||
raise RouteExists(f"Route already registered: {uri}")
|
||||
elif route.methods.intersection(methods):
|
||||
# already existing method is not overloadable.
|
||||
duplicated = methods.intersection(route.methods)
|
||||
duplicated_methods = ",".join(list(duplicated))
|
||||
|
||||
raise RouteExists(
|
||||
"Route already registered: {} [{}]".format(
|
||||
uri, ",".join(list(duplicated))
|
||||
)
|
||||
f"Route already registered: {uri} [{duplicated_methods}]"
|
||||
)
|
||||
if isinstance(route.handler, CompositionView):
|
||||
view = route.handler
|
||||
@@ -296,9 +295,9 @@ class Router:
|
||||
name = name.split("_static_", 1)[-1]
|
||||
|
||||
if hasattr(handler, "__blueprintname__"):
|
||||
handler_name = "{}.{}".format(
|
||||
handler.__blueprintname__, name or handler.__name__
|
||||
)
|
||||
bp_name = handler.__blueprintname__
|
||||
|
||||
handler_name = f"{bp_name}.{name or handler.__name__}"
|
||||
else:
|
||||
handler_name = name or getattr(handler, "__name__", None)
|
||||
|
||||
@@ -352,37 +351,6 @@ class Router:
|
||||
else:
|
||||
return -1, None
|
||||
|
||||
def remove(self, uri, clean_cache=True, host=None):
|
||||
if host is not None:
|
||||
uri = host + uri
|
||||
try:
|
||||
route = self.routes_all.pop(uri)
|
||||
for handler_name, pairs in self.routes_names.items():
|
||||
if pairs[0] == uri:
|
||||
self.routes_names.pop(handler_name)
|
||||
break
|
||||
|
||||
for handler_name, pairs in self.routes_static_files.items():
|
||||
if pairs[0] == uri:
|
||||
self.routes_static_files.pop(handler_name)
|
||||
break
|
||||
|
||||
except KeyError:
|
||||
raise RouteDoesNotExist("Route was not registered: {}".format(uri))
|
||||
|
||||
if route in self.routes_always_check:
|
||||
self.routes_always_check.remove(route)
|
||||
elif (
|
||||
url_hash(uri) in self.routes_dynamic
|
||||
and route in self.routes_dynamic[url_hash(uri)]
|
||||
):
|
||||
self.routes_dynamic[url_hash(uri)].remove(route)
|
||||
else:
|
||||
self.routes_static.pop(uri)
|
||||
|
||||
if clean_cache:
|
||||
self._get.cache_clear()
|
||||
|
||||
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
|
||||
def find_route_by_view_name(self, view_name, name=None):
|
||||
"""Find a route in the router based on the specified view name.
|
||||
@@ -442,7 +410,7 @@ class Router:
|
||||
# Check against known static routes
|
||||
route = self.routes_static.get(url)
|
||||
method_not_supported = MethodNotSupported(
|
||||
"Method {} not allowed for URL {}".format(method, url),
|
||||
f"Method {method} not allowed for URL {url}",
|
||||
method=method,
|
||||
allowed_methods=self.get_supported_methods(url),
|
||||
)
|
||||
@@ -472,7 +440,7 @@ class Router:
|
||||
# Route was found but the methods didn't match
|
||||
if route_found:
|
||||
raise method_not_supported
|
||||
raise NotFound("Requested URL {} not found".format(url))
|
||||
raise NotFound(f"Requested URL {url} not found")
|
||||
|
||||
kwargs = {
|
||||
p.name: p.cast(value)
|
||||
|
||||
364
sanic/server.py
364
sanic/server.py
@@ -1,20 +1,24 @@
|
||||
import asyncio
|
||||
import multiprocessing
|
||||
import os
|
||||
import secrets
|
||||
import socket
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from collections import deque
|
||||
from functools import partial
|
||||
from inspect import isawaitable
|
||||
from multiprocessing import Process
|
||||
from ipaddress import ip_address
|
||||
from signal import SIG_IGN, SIGINT, SIGTERM, Signals
|
||||
from signal import signal as signal_func
|
||||
from socket import SO_REUSEADDR, SOL_SOCKET, socket
|
||||
from time import time
|
||||
|
||||
from httptools import HttpRequestParser # type: ignore
|
||||
from httptools.parser.errors import HttpParserError # type: ignore
|
||||
|
||||
from sanic.compat import Header
|
||||
from sanic.compat import Header, ctrlc_workaround_for_windows
|
||||
from sanic.exceptions import (
|
||||
HeaderExpectationFailed,
|
||||
InvalidUsage,
|
||||
@@ -36,11 +40,48 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
OS_IS_WINDOWS = os.name == "nt"
|
||||
|
||||
|
||||
class Signal:
|
||||
stopped = False
|
||||
|
||||
|
||||
class ConnInfo:
|
||||
"""Local and remote addresses and SSL status info."""
|
||||
|
||||
__slots__ = (
|
||||
"sockname",
|
||||
"peername",
|
||||
"server",
|
||||
"server_port",
|
||||
"client",
|
||||
"client_port",
|
||||
"ssl",
|
||||
)
|
||||
|
||||
def __init__(self, transport, unix=None):
|
||||
self.ssl = bool(transport.get_extra_info("sslcontext"))
|
||||
self.server = self.client = ""
|
||||
self.server_port = self.client_port = 0
|
||||
self.peername = None
|
||||
self.sockname = addr = transport.get_extra_info("sockname")
|
||||
if isinstance(addr, str): # UNIX socket
|
||||
self.server = unix or addr
|
||||
return
|
||||
# IPv4 (ip, port) or IPv6 (ip, port, flowinfo, scopeid)
|
||||
if isinstance(addr, tuple):
|
||||
self.server = addr[0] if len(addr) == 2 else f"[{addr[0]}]"
|
||||
self.server_port = addr[1]
|
||||
# self.server gets non-standard port appended
|
||||
if addr[1] != (443 if self.ssl else 80):
|
||||
self.server = f"{self.server}:{addr[1]}"
|
||||
self.peername = addr = transport.get_extra_info("peername")
|
||||
if isinstance(addr, tuple):
|
||||
self.client = addr[0] if len(addr) == 2 else f"[{addr[0]}]"
|
||||
self.client_port = addr[1]
|
||||
|
||||
|
||||
class HttpProtocol(asyncio.Protocol):
|
||||
"""
|
||||
This class provides a basic HTTP implementation of the sanic framework.
|
||||
@@ -54,6 +95,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
"transport",
|
||||
"connections",
|
||||
"signal",
|
||||
"conn_info",
|
||||
# request params
|
||||
"parser",
|
||||
"request",
|
||||
@@ -68,7 +110,6 @@ class HttpProtocol(asyncio.Protocol):
|
||||
"request_buffer_queue_size",
|
||||
"request_class",
|
||||
"is_request_stream",
|
||||
"router",
|
||||
"error_handler",
|
||||
# enable or disable access log purpose
|
||||
"access_log",
|
||||
@@ -86,7 +127,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
"_keep_alive",
|
||||
"_header_fragment",
|
||||
"state",
|
||||
"_debug",
|
||||
"_unix",
|
||||
"_body_chunks",
|
||||
)
|
||||
|
||||
@@ -95,46 +136,38 @@ class HttpProtocol(asyncio.Protocol):
|
||||
*,
|
||||
loop,
|
||||
app,
|
||||
request_handler,
|
||||
error_handler,
|
||||
signal=Signal(),
|
||||
connections=None,
|
||||
request_timeout=60,
|
||||
response_timeout=60,
|
||||
keep_alive_timeout=5,
|
||||
request_max_size=None,
|
||||
request_buffer_queue_size=100,
|
||||
request_class=None,
|
||||
access_log=True,
|
||||
keep_alive=True,
|
||||
is_request_stream=False,
|
||||
router=None,
|
||||
state=None,
|
||||
debug=False,
|
||||
**kwargs
|
||||
unix=None,
|
||||
**kwargs,
|
||||
):
|
||||
asyncio.set_event_loop(loop)
|
||||
self.loop = loop
|
||||
deprecated_loop = self.loop if sys.version_info < (3, 7) else None
|
||||
self.app = app
|
||||
self.transport = None
|
||||
self.conn_info = None
|
||||
self.request = None
|
||||
self.parser = None
|
||||
self.url = None
|
||||
self.headers = None
|
||||
self.router = router
|
||||
self.signal = signal
|
||||
self.access_log = access_log
|
||||
self.access_log = self.app.config.ACCESS_LOG
|
||||
self.connections = connections if connections is not None else set()
|
||||
self.request_handler = request_handler
|
||||
self.error_handler = error_handler
|
||||
self.request_timeout = request_timeout
|
||||
self.request_buffer_queue_size = request_buffer_queue_size
|
||||
self.response_timeout = response_timeout
|
||||
self.keep_alive_timeout = keep_alive_timeout
|
||||
self.request_max_size = request_max_size
|
||||
self.request_class = request_class or Request
|
||||
self.is_request_stream = is_request_stream
|
||||
self.request_handler = self.app.handle_request
|
||||
self.error_handler = self.app.error_handler
|
||||
self.request_timeout = self.app.config.REQUEST_TIMEOUT
|
||||
self.request_buffer_queue_size = (
|
||||
self.app.config.REQUEST_BUFFER_QUEUE_SIZE
|
||||
)
|
||||
self.response_timeout = self.app.config.RESPONSE_TIMEOUT
|
||||
self.keep_alive_timeout = self.app.config.KEEP_ALIVE_TIMEOUT
|
||||
self.request_max_size = self.app.config.REQUEST_MAX_SIZE
|
||||
self.request_class = self.app.request_class or Request
|
||||
self.is_request_stream = self.app.is_request_stream
|
||||
self._is_stream_handler = False
|
||||
self._not_paused = asyncio.Event(loop=loop)
|
||||
self._not_paused = asyncio.Event(loop=deprecated_loop)
|
||||
self._total_request_size = 0
|
||||
self._request_timeout_handler = None
|
||||
self._response_timeout_handler = None
|
||||
@@ -143,12 +176,12 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self._last_response_time = None
|
||||
self._request_handler_task = None
|
||||
self._request_stream_task = None
|
||||
self._keep_alive = keep_alive
|
||||
self._keep_alive = self.app.config.KEEP_ALIVE
|
||||
self._header_fragment = b""
|
||||
self.state = state if state else {}
|
||||
if "requests_count" not in self.state:
|
||||
self.state["requests_count"] = 0
|
||||
self._debug = debug
|
||||
self._unix = unix
|
||||
self._not_paused.set()
|
||||
self._body_chunks = deque()
|
||||
|
||||
@@ -177,6 +210,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.request_timeout, self.request_timeout_callback
|
||||
)
|
||||
self.transport = transport
|
||||
self.conn_info = ConnInfo(transport, unix=self._unix)
|
||||
self._last_request_time = time()
|
||||
|
||||
def connection_lost(self, exc):
|
||||
@@ -276,7 +310,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.parser.feed_data(data)
|
||||
except HttpParserError:
|
||||
message = "Bad Request"
|
||||
if self._debug:
|
||||
if self.app.debug:
|
||||
message += "\n" + traceback.format_exc()
|
||||
self.write_error(InvalidUsage(message))
|
||||
|
||||
@@ -314,6 +348,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
transport=self.transport,
|
||||
app=self.app,
|
||||
)
|
||||
self.request.conn_info = self.conn_info
|
||||
# Remove any existing KeepAlive handler here,
|
||||
# It will be recreated if required on the new request.
|
||||
if self._keep_alive_timeout_handler:
|
||||
@@ -324,7 +359,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.expect_handler()
|
||||
|
||||
if self.is_request_stream:
|
||||
self._is_stream_handler = self.router.is_stream_handler(
|
||||
self._is_stream_handler = self.app.router.is_stream_handler(
|
||||
self.request
|
||||
)
|
||||
if self._is_stream_handler:
|
||||
@@ -343,9 +378,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
else:
|
||||
self.write_error(
|
||||
HeaderExpectationFailed(
|
||||
"Unknown Expect: {expect}".format(expect=expect)
|
||||
)
|
||||
HeaderExpectationFailed(f"Unknown Expect: {expect}")
|
||||
)
|
||||
|
||||
def on_body(self, body):
|
||||
@@ -452,13 +485,9 @@ class HttpProtocol(asyncio.Protocol):
|
||||
extra["host"] = "UNKNOWN"
|
||||
if self.request is not None:
|
||||
if self.request.ip:
|
||||
extra["host"] = "{0}:{1}".format(
|
||||
self.request.ip, self.request.port
|
||||
)
|
||||
extra["host"] = f"{self.request.ip}:{self.request.port}"
|
||||
|
||||
extra["request"] = "{0} {1}".format(
|
||||
self.request.method, self.request.url
|
||||
)
|
||||
extra["request"] = f"{self.request.method} {self.request.url}"
|
||||
else:
|
||||
extra["request"] = "nil"
|
||||
|
||||
@@ -488,16 +517,14 @@ class HttpProtocol(asyncio.Protocol):
|
||||
)
|
||||
self.write_error(ServerError("Invalid response type"))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
if self.app.debug:
|
||||
logger.error(
|
||||
"Connection lost before response written @ %s",
|
||||
self.request.ip,
|
||||
)
|
||||
keep_alive = False
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing response failed, connection closed {}".format(repr(e))
|
||||
)
|
||||
self.bail_out(f"Writing response failed, connection closed {e!r}")
|
||||
finally:
|
||||
if not keep_alive:
|
||||
self.transport.close()
|
||||
@@ -541,16 +568,14 @@ class HttpProtocol(asyncio.Protocol):
|
||||
)
|
||||
self.write_error(ServerError("Invalid response type"))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
if self.app.debug:
|
||||
logger.error(
|
||||
"Connection lost before response written @ %s",
|
||||
self.request.ip,
|
||||
)
|
||||
keep_alive = False
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing response failed, connection closed {}".format(repr(e))
|
||||
)
|
||||
self.bail_out(f"Writing response failed, connection closed {e!r}")
|
||||
finally:
|
||||
if not keep_alive:
|
||||
self.transport.close()
|
||||
@@ -574,14 +599,14 @@ class HttpProtocol(asyncio.Protocol):
|
||||
version = self.request.version if self.request else "1.1"
|
||||
self.transport.write(response.output(version))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
if self.app.debug:
|
||||
logger.error(
|
||||
"Connection lost before error written @ %s",
|
||||
self.request.ip if self.request else "Unknown",
|
||||
)
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing error failed, connection closed {}".format(repr(e)),
|
||||
f"Writing error failed, connection closed {e!r}",
|
||||
from_error=True,
|
||||
)
|
||||
finally:
|
||||
@@ -642,7 +667,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
|
||||
:return: boolean - True if closed, false if staying open
|
||||
"""
|
||||
if not self.parser:
|
||||
if not self.parser and self.transport is not None:
|
||||
self.transport.close()
|
||||
return True
|
||||
return False
|
||||
@@ -731,6 +756,26 @@ class AsyncioServer:
|
||||
task = asyncio.ensure_future(coro, loop=self.loop)
|
||||
return task
|
||||
|
||||
def start_serving(self):
|
||||
if self.server:
|
||||
try:
|
||||
return self.server.start_serving()
|
||||
except AttributeError:
|
||||
raise NotImplementedError(
|
||||
"server.start_serving not available in this version "
|
||||
"of asyncio or uvloop."
|
||||
)
|
||||
|
||||
def serve_forever(self):
|
||||
if self.server:
|
||||
try:
|
||||
return self.server.serve_forever()
|
||||
except AttributeError:
|
||||
raise NotImplementedError(
|
||||
"server.serve_forever not available in this version "
|
||||
"of asyncio or uvloop."
|
||||
)
|
||||
|
||||
def __await__(self):
|
||||
"""Starts the asyncio server, returns AsyncServerCoro"""
|
||||
task = asyncio.ensure_future(self.serve_coro)
|
||||
@@ -744,20 +789,13 @@ def serve(
|
||||
host,
|
||||
port,
|
||||
app,
|
||||
request_handler,
|
||||
error_handler,
|
||||
before_start=None,
|
||||
after_start=None,
|
||||
before_stop=None,
|
||||
after_stop=None,
|
||||
debug=False,
|
||||
request_timeout=60,
|
||||
response_timeout=60,
|
||||
keep_alive_timeout=5,
|
||||
ssl=None,
|
||||
sock=None,
|
||||
request_max_size=None,
|
||||
request_buffer_queue_size=100,
|
||||
unix=None,
|
||||
reuse_port=False,
|
||||
loop=None,
|
||||
protocol=HttpProtocol,
|
||||
@@ -767,25 +805,13 @@ def serve(
|
||||
run_async=False,
|
||||
connections=None,
|
||||
signal=Signal(),
|
||||
request_class=None,
|
||||
access_log=True,
|
||||
keep_alive=True,
|
||||
is_request_stream=False,
|
||||
router=None,
|
||||
websocket_max_size=None,
|
||||
websocket_max_queue=None,
|
||||
websocket_read_limit=2 ** 16,
|
||||
websocket_write_limit=2 ** 16,
|
||||
state=None,
|
||||
graceful_shutdown_timeout=15.0,
|
||||
asyncio_server_kwargs=None,
|
||||
):
|
||||
"""Start asynchronous HTTP Server on an individual process.
|
||||
|
||||
:param host: Address to host on
|
||||
:param port: Port to host on
|
||||
:param request_handler: Sanic request handler with middleware
|
||||
:param error_handler: Sanic error handler with middleware
|
||||
:param before_start: function to be executed before the server starts
|
||||
listening. Takes arguments `app` instance and `loop`
|
||||
:param after_start: function to be executed after the server starts
|
||||
@@ -796,35 +822,13 @@ def serve(
|
||||
:param after_stop: function to be executed when a stop signal is
|
||||
received after it is respected. Takes arguments
|
||||
`app` instance and `loop`
|
||||
:param debug: enables debug output (slows server)
|
||||
:param request_timeout: time in seconds
|
||||
:param response_timeout: time in seconds
|
||||
:param keep_alive_timeout: time in seconds
|
||||
:param ssl: SSLContext
|
||||
:param sock: Socket for the server to accept connections from
|
||||
:param request_max_size: size in bytes, `None` for no limit
|
||||
:param unix: Unix socket to listen on instead of TCP port
|
||||
:param reuse_port: `True` for multiple workers
|
||||
:param loop: asyncio compatible event loop
|
||||
:param protocol: subclass of asyncio protocol class
|
||||
:param run_async: bool: Do not create a new event loop for the server,
|
||||
and return an AsyncServer object rather than running it
|
||||
:param request_class: Request class to use
|
||||
:param access_log: disable/enable access log
|
||||
:param websocket_max_size: enforces the maximum size for
|
||||
incoming messages in bytes.
|
||||
:param websocket_max_queue: sets the maximum length of the queue
|
||||
that holds incoming messages.
|
||||
:param websocket_read_limit: sets the high-water limit of the buffer for
|
||||
incoming bytes, the low-water limit is half
|
||||
the high-water limit.
|
||||
:param websocket_write_limit: sets the high-water limit of the buffer for
|
||||
outgoing bytes, the low-water limit is a
|
||||
quarter of the high-water limit.
|
||||
:param is_request_stream: disable/enable Request.stream
|
||||
:param request_buffer_queue_size: streaming request buffer queue size
|
||||
:param router: Router object
|
||||
:param graceful_shutdown_timeout: How long take to Force close non-idle
|
||||
connection
|
||||
:param asyncio_server_kwargs: key-value args for asyncio/uvloop
|
||||
create_server method
|
||||
:return: Nothing
|
||||
@@ -834,8 +838,8 @@ def serve(
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
if debug:
|
||||
loop.set_debug(debug)
|
||||
if app.debug:
|
||||
loop.set_debug(app.debug)
|
||||
|
||||
app.asgi = False
|
||||
|
||||
@@ -846,47 +850,34 @@ def serve(
|
||||
connections=connections,
|
||||
signal=signal,
|
||||
app=app,
|
||||
request_handler=request_handler,
|
||||
error_handler=error_handler,
|
||||
request_timeout=request_timeout,
|
||||
response_timeout=response_timeout,
|
||||
keep_alive_timeout=keep_alive_timeout,
|
||||
request_max_size=request_max_size,
|
||||
request_buffer_queue_size=request_buffer_queue_size,
|
||||
request_class=request_class,
|
||||
access_log=access_log,
|
||||
keep_alive=keep_alive,
|
||||
is_request_stream=is_request_stream,
|
||||
router=router,
|
||||
websocket_max_size=websocket_max_size,
|
||||
websocket_max_queue=websocket_max_queue,
|
||||
websocket_read_limit=websocket_read_limit,
|
||||
websocket_write_limit=websocket_write_limit,
|
||||
state=state,
|
||||
debug=debug,
|
||||
unix=unix,
|
||||
)
|
||||
asyncio_server_kwargs = (
|
||||
asyncio_server_kwargs if asyncio_server_kwargs else {}
|
||||
)
|
||||
# UNIX sockets are always bound by us (to preserve semantics between modes)
|
||||
if unix:
|
||||
sock = bind_unix_socket(unix, backlog=backlog)
|
||||
server_coroutine = loop.create_server(
|
||||
server,
|
||||
host,
|
||||
port,
|
||||
None if sock else host,
|
||||
None if sock else port,
|
||||
ssl=ssl,
|
||||
reuse_port=reuse_port,
|
||||
sock=sock,
|
||||
backlog=backlog,
|
||||
**asyncio_server_kwargs
|
||||
**asyncio_server_kwargs,
|
||||
)
|
||||
|
||||
if run_async:
|
||||
return AsyncioServer(
|
||||
loop,
|
||||
server_coroutine,
|
||||
connections,
|
||||
after_start,
|
||||
before_stop,
|
||||
after_stop,
|
||||
loop=loop,
|
||||
serve_coro=server_coroutine,
|
||||
connections=connections,
|
||||
after_start=after_start,
|
||||
before_stop=before_stop,
|
||||
after_stop=after_stop,
|
||||
)
|
||||
|
||||
trigger_events(before_start, loop)
|
||||
@@ -905,15 +896,11 @@ def serve(
|
||||
|
||||
# Register signals for graceful termination
|
||||
if register_sys_signals:
|
||||
_singals = (SIGTERM,) if run_multiple else (SIGINT, SIGTERM)
|
||||
for _signal in _singals:
|
||||
try:
|
||||
loop.add_signal_handler(_signal, loop.stop)
|
||||
except NotImplementedError:
|
||||
logger.warning(
|
||||
"Sanic tried to use loop.add_signal_handler "
|
||||
"but it is not implemented on this platform."
|
||||
)
|
||||
if OS_IS_WINDOWS:
|
||||
ctrlc_workaround_for_windows(app)
|
||||
else:
|
||||
for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]:
|
||||
loop.add_signal_handler(_signal, app.stop)
|
||||
pid = os.getpid()
|
||||
try:
|
||||
logger.info("Starting worker [%s]", pid)
|
||||
@@ -937,8 +924,9 @@ def serve(
|
||||
# We should provide graceful_shutdown_timeout,
|
||||
# instead of letting connection hangs forever.
|
||||
# Let's roughly calcucate time.
|
||||
graceful = app.config.GRACEFUL_SHUTDOWN_TIMEOUT
|
||||
start_shutdown = 0
|
||||
while connections and (start_shutdown < graceful_shutdown_timeout):
|
||||
while connections and (start_shutdown < graceful):
|
||||
loop.run_until_complete(asyncio.sleep(0.1))
|
||||
start_shutdown = start_shutdown + 0.1
|
||||
|
||||
@@ -951,12 +939,91 @@ def serve(
|
||||
else:
|
||||
conn.close()
|
||||
|
||||
_shutdown = asyncio.gather(*coros, loop=loop)
|
||||
_shutdown = asyncio.gather(*coros)
|
||||
loop.run_until_complete(_shutdown)
|
||||
|
||||
trigger_events(after_stop, loop)
|
||||
|
||||
loop.close()
|
||||
remove_unix_socket(unix)
|
||||
|
||||
|
||||
def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
|
||||
"""Create TCP server socket.
|
||||
:param host: IPv4, IPv6 or hostname may be specified
|
||||
:param port: TCP port number
|
||||
:param backlog: Maximum number of connections to queue
|
||||
:return: socket.socket object
|
||||
"""
|
||||
try: # IP address: family must be specified for IPv6 at least
|
||||
ip = ip_address(host)
|
||||
host = str(ip)
|
||||
sock = socket.socket(
|
||||
socket.AF_INET6 if ip.version == 6 else socket.AF_INET
|
||||
)
|
||||
except ValueError: # Hostname, may become AF_INET or AF_INET6
|
||||
sock = socket.socket()
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((host, port))
|
||||
sock.listen(backlog)
|
||||
return sock
|
||||
|
||||
|
||||
def bind_unix_socket(path: str, *, mode=0o666, backlog=100) -> socket.socket:
|
||||
"""Create unix socket.
|
||||
:param path: filesystem path
|
||||
:param backlog: Maximum number of connections to queue
|
||||
:return: socket.socket object
|
||||
"""
|
||||
"""Open or atomically replace existing socket with zero downtime."""
|
||||
# Sanitise and pre-verify socket path
|
||||
path = os.path.abspath(path)
|
||||
folder = os.path.dirname(path)
|
||||
if not os.path.isdir(folder):
|
||||
raise FileNotFoundError(f"Socket folder does not exist: {folder}")
|
||||
try:
|
||||
if not stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode):
|
||||
raise FileExistsError(f"Existing file is not a socket: {path}")
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
# Create new socket with a random temporary name
|
||||
tmp_path = f"{path}.{secrets.token_urlsafe()}"
|
||||
sock = socket.socket(socket.AF_UNIX)
|
||||
try:
|
||||
# Critical section begins (filename races)
|
||||
sock.bind(tmp_path)
|
||||
try:
|
||||
os.chmod(tmp_path, mode)
|
||||
# Start listening before rename to avoid connection failures
|
||||
sock.listen(backlog)
|
||||
os.rename(tmp_path, path)
|
||||
except: # noqa: E722
|
||||
try:
|
||||
os.unlink(tmp_path)
|
||||
finally:
|
||||
raise
|
||||
except: # noqa: E722
|
||||
try:
|
||||
sock.close()
|
||||
finally:
|
||||
raise
|
||||
return sock
|
||||
|
||||
|
||||
def remove_unix_socket(path: str) -> None:
|
||||
"""Remove dead unix socket during server exit."""
|
||||
if not path:
|
||||
return
|
||||
try:
|
||||
if stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode):
|
||||
# Is it actually dead (doesn't belong to a new server instance)?
|
||||
with socket.socket(socket.AF_UNIX) as testsock:
|
||||
try:
|
||||
testsock.connect(path)
|
||||
except ConnectionRefusedError:
|
||||
os.unlink(path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def serve_multiple(server_settings, workers):
|
||||
@@ -971,11 +1038,17 @@ def serve_multiple(server_settings, workers):
|
||||
server_settings["reuse_port"] = True
|
||||
server_settings["run_multiple"] = True
|
||||
|
||||
# Handling when custom socket is not provided.
|
||||
if server_settings.get("sock") is None:
|
||||
sock = socket()
|
||||
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
|
||||
sock.bind((server_settings["host"], server_settings["port"]))
|
||||
# Create a listening socket or use the one in settings
|
||||
sock = server_settings.get("sock")
|
||||
unix = server_settings["unix"]
|
||||
backlog = server_settings["backlog"]
|
||||
if unix:
|
||||
sock = bind_unix_socket(unix, backlog=backlog)
|
||||
server_settings["unix"] = unix
|
||||
if sock is None:
|
||||
sock = bind_socket(
|
||||
server_settings["host"], server_settings["port"], backlog=backlog
|
||||
)
|
||||
sock.set_inheritable(True)
|
||||
server_settings["sock"] = sock
|
||||
server_settings["host"] = None
|
||||
@@ -990,9 +1063,10 @@ def serve_multiple(server_settings, workers):
|
||||
|
||||
signal_func(SIGINT, lambda s, f: sig_handler(s, f))
|
||||
signal_func(SIGTERM, lambda s, f: sig_handler(s, f))
|
||||
mp = multiprocessing.get_context("fork")
|
||||
|
||||
for _ in range(workers):
|
||||
process = Process(target=serve, kwargs=server_settings)
|
||||
process = mp.Process(target=serve, kwargs=server_settings)
|
||||
process.daemon = True
|
||||
process.start()
|
||||
processes.append(process)
|
||||
@@ -1003,4 +1077,6 @@ def serve_multiple(server_settings, workers):
|
||||
# the above processes will block this until they're stopped
|
||||
for process in processes:
|
||||
process.terminate()
|
||||
server_settings.get("sock").close()
|
||||
|
||||
sock.close()
|
||||
remove_unix_socket(unix)
|
||||
|
||||
176
sanic/static.py
176
sanic/static.py
@@ -1,11 +1,11 @@
|
||||
from functools import partial, wraps
|
||||
from mimetypes import guess_type
|
||||
from os import path
|
||||
from re import sub
|
||||
from time import gmtime, strftime
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiofiles.os import stat # type: ignore
|
||||
|
||||
from sanic.compat import stat_async
|
||||
from sanic.exceptions import (
|
||||
ContentRangeError,
|
||||
FileNotFound,
|
||||
@@ -16,6 +16,89 @@ from sanic.handlers import ContentRangeHandler
|
||||
from sanic.response import HTTPResponse, file, file_stream
|
||||
|
||||
|
||||
async def _static_request_handler(
|
||||
file_or_directory,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
request,
|
||||
content_type=None,
|
||||
file_uri=None,
|
||||
):
|
||||
# Using this to determine if the URL is trying to break out of the path
|
||||
# served. os.path.realpath seems to be very slow
|
||||
if file_uri and "../" in file_uri:
|
||||
raise InvalidUsage("Invalid URL")
|
||||
# Merge served directory and requested file if provided
|
||||
# Strip all / that in the beginning of the URL to help prevent python
|
||||
# from herping a derp and treating the uri as an absolute path
|
||||
root_path = file_path = file_or_directory
|
||||
if file_uri:
|
||||
file_path = path.join(file_or_directory, sub("^[/]*", "", file_uri))
|
||||
|
||||
# URL decode the path sent by the browser otherwise we won't be able to
|
||||
# match filenames which got encoded (filenames with spaces etc)
|
||||
file_path = path.abspath(unquote(file_path))
|
||||
if not file_path.startswith(path.abspath(unquote(root_path))):
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
try:
|
||||
headers = {}
|
||||
# Check if the client has been sent this file before
|
||||
# and it has not been modified since
|
||||
stats = None
|
||||
if use_modified_since:
|
||||
stats = await stat_async(file_path)
|
||||
modified_since = strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT", gmtime(stats.st_mtime)
|
||||
)
|
||||
if request.headers.get("If-Modified-Since") == modified_since:
|
||||
return HTTPResponse(status=304)
|
||||
headers["Last-Modified"] = modified_since
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
headers["Accept-Ranges"] = "bytes"
|
||||
headers["Content-Length"] = str(stats.st_size)
|
||||
if request.method != "HEAD":
|
||||
try:
|
||||
_range = ContentRangeHandler(request, stats)
|
||||
except HeaderNotFound:
|
||||
pass
|
||||
else:
|
||||
del headers["Content-Length"]
|
||||
for key, value in _range.headers.items():
|
||||
headers[key] = value
|
||||
headers["Content-Type"] = (
|
||||
content_type or guess_type(file_path)[0] or "text/plain"
|
||||
)
|
||||
if request.method == "HEAD":
|
||||
return HTTPResponse(headers=headers)
|
||||
else:
|
||||
if stream_large_files:
|
||||
if type(stream_large_files) == int:
|
||||
threshold = stream_large_files
|
||||
else:
|
||||
threshold = 1024 * 1024
|
||||
|
||||
if not stats:
|
||||
stats = await stat_async(file_path)
|
||||
if stats.st_size >= threshold:
|
||||
return await file_stream(
|
||||
file_path, headers=headers, _range=_range
|
||||
)
|
||||
return await file(file_path, headers=headers, _range=_range)
|
||||
except ContentRangeError:
|
||||
raise
|
||||
except Exception:
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
|
||||
|
||||
def register(
|
||||
app,
|
||||
uri,
|
||||
@@ -57,85 +140,20 @@ def register(
|
||||
if not path.isfile(file_or_directory):
|
||||
uri += "<file_uri:" + pattern + ">"
|
||||
|
||||
async def _handler(request, file_uri=None):
|
||||
# Using this to determine if the URL is trying to break out of the path
|
||||
# served. os.path.realpath seems to be very slow
|
||||
if file_uri and "../" in file_uri:
|
||||
raise InvalidUsage("Invalid URL")
|
||||
# Merge served directory and requested file if provided
|
||||
# Strip all / that in the beginning of the URL to help prevent python
|
||||
# from herping a derp and treating the uri as an absolute path
|
||||
root_path = file_path = file_or_directory
|
||||
if file_uri:
|
||||
file_path = path.join(
|
||||
file_or_directory, sub("^[/]*", "", file_uri)
|
||||
)
|
||||
|
||||
# URL decode the path sent by the browser otherwise we won't be able to
|
||||
# match filenames which got encoded (filenames with spaces etc)
|
||||
file_path = path.abspath(unquote(file_path))
|
||||
if not file_path.startswith(path.abspath(unquote(root_path))):
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
try:
|
||||
headers = {}
|
||||
# Check if the client has been sent this file before
|
||||
# and it has not been modified since
|
||||
stats = None
|
||||
if use_modified_since:
|
||||
stats = await stat(file_path)
|
||||
modified_since = strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT", gmtime(stats.st_mtime)
|
||||
)
|
||||
if request.headers.get("If-Modified-Since") == modified_since:
|
||||
return HTTPResponse(status=304)
|
||||
headers["Last-Modified"] = modified_since
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
if not stats:
|
||||
stats = await stat(file_path)
|
||||
headers["Accept-Ranges"] = "bytes"
|
||||
headers["Content-Length"] = str(stats.st_size)
|
||||
if request.method != "HEAD":
|
||||
try:
|
||||
_range = ContentRangeHandler(request, stats)
|
||||
except HeaderNotFound:
|
||||
pass
|
||||
else:
|
||||
del headers["Content-Length"]
|
||||
for key, value in _range.headers.items():
|
||||
headers[key] = value
|
||||
headers["Content-Type"] = (
|
||||
content_type or guess_type(file_path)[0] or "text/plain"
|
||||
)
|
||||
if request.method == "HEAD":
|
||||
return HTTPResponse(headers=headers)
|
||||
else:
|
||||
if stream_large_files:
|
||||
if type(stream_large_files) == int:
|
||||
threshold = stream_large_files
|
||||
else:
|
||||
threshold = 1024 * 1024
|
||||
|
||||
if not stats:
|
||||
stats = await stat(file_path)
|
||||
if stats.st_size >= threshold:
|
||||
return await file_stream(
|
||||
file_path, headers=headers, _range=_range
|
||||
)
|
||||
return await file(file_path, headers=headers, _range=_range)
|
||||
except ContentRangeError:
|
||||
raise
|
||||
except Exception:
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
|
||||
# special prefix for static files
|
||||
if not name.startswith("_static_"):
|
||||
name = "_static_{}".format(name)
|
||||
name = f"_static_{name}"
|
||||
|
||||
_handler = wraps(_static_request_handler)(
|
||||
partial(
|
||||
_static_request_handler,
|
||||
file_or_directory,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
content_type=content_type,
|
||||
)
|
||||
)
|
||||
|
||||
app.route(
|
||||
uri,
|
||||
|
||||
@@ -12,7 +12,7 @@ from sanic.response import text
|
||||
|
||||
ASGI_HOST = "mockserver"
|
||||
HOST = "127.0.0.1"
|
||||
PORT = 42101
|
||||
PORT = None
|
||||
|
||||
|
||||
class SanicTestClient:
|
||||
@@ -23,7 +23,7 @@ class SanicTestClient:
|
||||
self.host = host
|
||||
|
||||
def get_new_session(self):
|
||||
return httpx.Client()
|
||||
return httpx.AsyncClient(verify=False)
|
||||
|
||||
async def _local_request(self, method, url, *args, **kwargs):
|
||||
logger.info(url)
|
||||
@@ -38,20 +38,22 @@ class SanicTestClient:
|
||||
|
||||
try:
|
||||
response = await getattr(session, method.lower())(
|
||||
url, verify=False, *args, **kwargs
|
||||
url, *args, **kwargs
|
||||
)
|
||||
except NameError:
|
||||
raise Exception(response.status_code)
|
||||
|
||||
response.body = await response.aread()
|
||||
response.status = response.status_code
|
||||
response.content_type = response.headers.get("content-type")
|
||||
|
||||
# response can be decoded as json after response._content
|
||||
# is set by response.aread()
|
||||
try:
|
||||
response.json = response.json()
|
||||
except (JSONDecodeError, UnicodeDecodeError):
|
||||
response.json = None
|
||||
|
||||
response.body = await response.read()
|
||||
response.status = response.status_code
|
||||
response.content_type = response.headers.get("content-type")
|
||||
|
||||
if raw_cookies:
|
||||
response.raw_cookies = {}
|
||||
|
||||
@@ -93,7 +95,7 @@ class SanicTestClient:
|
||||
|
||||
if self.port:
|
||||
server_kwargs = dict(
|
||||
host=host or self.host, port=self.port, **server_kwargs
|
||||
host=host or self.host, port=self.port, **server_kwargs,
|
||||
)
|
||||
host, port = host or self.host, self.port
|
||||
else:
|
||||
@@ -101,17 +103,19 @@ class SanicTestClient:
|
||||
sock.bind((host or self.host, 0))
|
||||
server_kwargs = dict(sock=sock, **server_kwargs)
|
||||
host, port = sock.getsockname()
|
||||
self.port = port
|
||||
|
||||
if uri.startswith(
|
||||
("http:", "https:", "ftp:", "ftps://", "//", "ws:", "wss:")
|
||||
):
|
||||
url = uri
|
||||
else:
|
||||
uri = uri if uri.startswith("/") else "/{uri}".format(uri=uri)
|
||||
uri = uri if uri.startswith("/") else f"/{uri}"
|
||||
scheme = "ws" if method == "websocket" else "http"
|
||||
url = "{scheme}://{host}:{port}{uri}".format(
|
||||
scheme=scheme, host=host, port=port, uri=uri
|
||||
)
|
||||
url = f"{scheme}://{host}:{port}{uri}"
|
||||
# Tests construct URLs using PORT = None, which means random port not
|
||||
# known until this function is called, so fix that here
|
||||
url = url.replace(":None/", f":{port}/")
|
||||
|
||||
@self.app.listener("after_server_start")
|
||||
async def _collect_response(sanic, loop):
|
||||
@@ -129,7 +133,7 @@ class SanicTestClient:
|
||||
self.app.listeners["after_server_start"].pop()
|
||||
|
||||
if exceptions:
|
||||
raise ValueError("Exception during request: {}".format(exceptions))
|
||||
raise ValueError(f"Exception during request: {exceptions}")
|
||||
|
||||
if gather_request:
|
||||
try:
|
||||
@@ -137,17 +141,13 @@ class SanicTestClient:
|
||||
return request, response
|
||||
except BaseException: # noqa
|
||||
raise ValueError(
|
||||
"Request and response object expected, got ({})".format(
|
||||
results
|
||||
)
|
||||
f"Request and response object expected, got ({results})"
|
||||
)
|
||||
else:
|
||||
try:
|
||||
return results[-1]
|
||||
except BaseException: # noqa
|
||||
raise ValueError(
|
||||
"Request object expected, got ({})".format(results)
|
||||
)
|
||||
raise ValueError(f"Request object expected, got ({results})")
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test("get", *args, **kwargs)
|
||||
@@ -185,15 +185,15 @@ async def app_call_with_return(self, scope, receive, send):
|
||||
return await asgi_app()
|
||||
|
||||
|
||||
class SanicASGIDispatch(httpx.dispatch.ASGIDispatch):
|
||||
class SanicASGIDispatch(httpx.ASGIDispatch):
|
||||
pass
|
||||
|
||||
|
||||
class SanicASGITestClient(httpx.Client):
|
||||
class SanicASGITestClient(httpx.AsyncClient):
|
||||
def __init__(
|
||||
self,
|
||||
app,
|
||||
base_url: str = "http://{}".format(ASGI_HOST),
|
||||
base_url: str = f"http://{ASGI_HOST}",
|
||||
suppress_exceptions: bool = False,
|
||||
) -> None:
|
||||
app.__class__.__call__ = app_call_with_return
|
||||
@@ -201,7 +201,7 @@ class SanicASGITestClient(httpx.Client):
|
||||
|
||||
self.app = app
|
||||
|
||||
dispatch = SanicASGIDispatch(app=app, client=(ASGI_HOST, PORT))
|
||||
dispatch = SanicASGIDispatch(app=app, client=(ASGI_HOST, PORT or 0))
|
||||
super().__init__(dispatch=dispatch, base_url=base_url)
|
||||
|
||||
self.last_request = None
|
||||
@@ -224,7 +224,7 @@ class SanicASGITestClient(httpx.Client):
|
||||
async def websocket(self, uri, subprotocols=None, *args, **kwargs):
|
||||
scheme = "ws"
|
||||
path = uri
|
||||
root_path = "{}://{}".format(scheme, ASGI_HOST)
|
||||
root_path = f"{scheme}://{ASGI_HOST}"
|
||||
|
||||
headers = kwargs.get("headers", {})
|
||||
headers.setdefault("connection", "upgrade")
|
||||
|
||||
@@ -96,14 +96,10 @@ class CompositionView:
|
||||
handler.is_stream = stream
|
||||
for method in methods:
|
||||
if method not in HTTP_METHODS:
|
||||
raise InvalidUsage(
|
||||
"{} is not a valid HTTP method.".format(method)
|
||||
)
|
||||
raise InvalidUsage(f"{method} is not a valid HTTP method.")
|
||||
|
||||
if method in self.handlers:
|
||||
raise InvalidUsage(
|
||||
"Method {} is already registered.".format(method)
|
||||
)
|
||||
raise InvalidUsage(f"Method {method} is already registered.")
|
||||
self.handlers[method] = handler
|
||||
|
||||
def __call__(self, request, *args, **kwargs):
|
||||
|
||||
@@ -113,7 +113,7 @@ class WebSocketProtocol(HttpProtocol):
|
||||
|
||||
# hook up the websocket protocol
|
||||
self.websocket = WebSocketCommonProtocol(
|
||||
timeout=self.websocket_timeout,
|
||||
close_timeout=self.websocket_timeout,
|
||||
max_size=self.websocket_max_size,
|
||||
max_queue=self.websocket_max_queue,
|
||||
read_limit=self.websocket_read_limit,
|
||||
|
||||
15
setup.py
15
setup.py
@@ -5,7 +5,6 @@ import codecs
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from distutils.util import strtobool
|
||||
|
||||
from setuptools import setup
|
||||
@@ -39,9 +38,7 @@ def open_local(paths, mode="r", encoding="utf8"):
|
||||
|
||||
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
|
||||
try:
|
||||
version = re.findall(
|
||||
r"^__version__ = \"([^']+)\"\r?$", fp.read(), re.M
|
||||
)[0]
|
||||
version = re.findall(r"^__version__ = \"([^']+)\"\r?$", fp.read(), re.M)[0]
|
||||
except IndexError:
|
||||
raise RuntimeError("Unable to determine version.")
|
||||
|
||||
@@ -68,12 +65,12 @@ setup_kwargs = {
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
],
|
||||
"entry_points": {"console_scripts": ["sanic = sanic.__main__:main"]},
|
||||
}
|
||||
|
||||
env_dependency = (
|
||||
'; sys_platform != "win32" ' 'and implementation_name == "cpython"'
|
||||
)
|
||||
env_dependency = '; sys_platform != "win32" ' 'and implementation_name == "cpython"'
|
||||
ujson = "ujson>=1.35" + env_dependency
|
||||
uvloop = "uvloop>=0.5.3" + env_dependency
|
||||
|
||||
@@ -82,9 +79,9 @@ requirements = [
|
||||
uvloop,
|
||||
ujson,
|
||||
"aiofiles>=0.3.0",
|
||||
"websockets>=7.0,<9.0",
|
||||
"websockets>=8.1,<9.0",
|
||||
"multidict>=4.0,<5.0",
|
||||
"httpx==0.9.3",
|
||||
"httpx==0.11.1",
|
||||
]
|
||||
|
||||
tests_require = [
|
||||
|
||||
@@ -103,7 +103,7 @@ def sanic_router():
|
||||
for method, route in route_details:
|
||||
try:
|
||||
router._add(
|
||||
uri="/{}".format(route),
|
||||
uri=f"/{route}",
|
||||
methods=frozenset({method}),
|
||||
host="localhost",
|
||||
handler=_handler,
|
||||
|
||||
@@ -6,6 +6,7 @@ from inspect import isawaitable
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.exceptions import SanicException
|
||||
from sanic.response import text
|
||||
|
||||
@@ -44,10 +45,11 @@ def test_create_asyncio_server(app):
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 7), reason="requires python3.7 or higher"
|
||||
)
|
||||
def test_asyncio_server_start_serving(app):
|
||||
def test_asyncio_server_no_start_serving(app):
|
||||
if not uvloop_installed():
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(
|
||||
port=43123,
|
||||
return_asyncio_server=True,
|
||||
asyncio_server_kwargs=dict(start_serving=False),
|
||||
)
|
||||
@@ -55,6 +57,26 @@ def test_asyncio_server_start_serving(app):
|
||||
assert srv.is_serving() is False
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 7), reason="requires python3.7 or higher"
|
||||
)
|
||||
def test_asyncio_server_start_serving(app):
|
||||
if not uvloop_installed():
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio_srv_coro = app.create_server(
|
||||
port=43124,
|
||||
return_asyncio_server=True,
|
||||
asyncio_server_kwargs=dict(start_serving=False),
|
||||
)
|
||||
srv = loop.run_until_complete(asyncio_srv_coro)
|
||||
assert srv.is_serving() is False
|
||||
loop.run_until_complete(srv.start_serving())
|
||||
assert srv.is_serving() is True
|
||||
wait_close = srv.close()
|
||||
loop.run_until_complete(wait_close)
|
||||
# Looks like we can't easily test `serve_forever()`
|
||||
|
||||
|
||||
def test_app_loop_not_running(app):
|
||||
with pytest.raises(SanicException) as excinfo:
|
||||
app.loop
|
||||
@@ -106,8 +128,8 @@ def test_app_handle_request_handler_is_none(app, monkeypatch):
|
||||
request, response = app.test_client.get("/test")
|
||||
|
||||
assert (
|
||||
response.text
|
||||
== "Error: 'None' was returned while requesting a handler from the router"
|
||||
"'None' was returned while requesting a handler from the router"
|
||||
in response.text
|
||||
)
|
||||
|
||||
|
||||
@@ -166,9 +188,7 @@ def test_handle_request_with_nested_exception_debug(app, monkeypatch):
|
||||
request, response = app.test_client.get("/", debug=True)
|
||||
assert response.status == 500
|
||||
assert response.text.startswith(
|
||||
"Error while handling error: {}\nStack: Traceback (most recent call last):\n".format(
|
||||
err_msg
|
||||
)
|
||||
f"Error while handling error: {err_msg}\nStack: Traceback (most recent call last):\n"
|
||||
)
|
||||
|
||||
|
||||
@@ -188,10 +208,17 @@ def test_handle_request_with_nested_sanic_exception(app, monkeypatch, caplog):
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
request, response = app.test_client.get("/")
|
||||
port = request.server_port
|
||||
assert port > 0
|
||||
assert response.status == 500
|
||||
assert response.text == "Error: Mock SanicException"
|
||||
assert "Mock SanicException" in response.text
|
||||
assert (
|
||||
"sanic.root",
|
||||
logging.ERROR,
|
||||
"Exception occurred while handling uri: 'http://127.0.0.1:42101/'",
|
||||
f"Exception occurred while handling uri: 'http://127.0.0.1:{port}/'",
|
||||
) in caplog.record_tuples
|
||||
|
||||
|
||||
def test_app_name_required():
|
||||
with pytest.deprecated_call():
|
||||
Sanic()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
from collections import deque, namedtuple
|
||||
|
||||
@@ -81,7 +82,12 @@ def test_listeners_triggered(app):
|
||||
with pytest.warns(UserWarning):
|
||||
server.run()
|
||||
|
||||
for task in asyncio.Task.all_tasks():
|
||||
all_tasks = (
|
||||
asyncio.Task.all_tasks()
|
||||
if sys.version_info < (3, 7)
|
||||
else asyncio.all_tasks(asyncio.get_event_loop())
|
||||
)
|
||||
for task in all_tasks:
|
||||
task.cancel()
|
||||
|
||||
assert before_server_start
|
||||
@@ -126,7 +132,12 @@ def test_listeners_triggered_async(app):
|
||||
with pytest.warns(UserWarning):
|
||||
server.run()
|
||||
|
||||
for task in asyncio.Task.all_tasks():
|
||||
all_tasks = (
|
||||
asyncio.Task.all_tasks()
|
||||
if sys.version_info < (3, 7)
|
||||
else asyncio.all_tasks(asyncio.get_event_loop())
|
||||
)
|
||||
for task in all_tasks:
|
||||
task.cancel()
|
||||
|
||||
assert before_server_start
|
||||
@@ -221,7 +232,7 @@ async def test_request_class_custom():
|
||||
class MyCustomRequest(Request):
|
||||
pass
|
||||
|
||||
app = Sanic(request_class=MyCustomRequest)
|
||||
app = Sanic(name=__name__, request_class=MyCustomRequest)
|
||||
|
||||
@app.get("/custom")
|
||||
def custom_request(request):
|
||||
|
||||
@@ -18,4 +18,4 @@ def test_bad_request_response(app):
|
||||
|
||||
app.run(host="127.0.0.1", port=42101, debug=False)
|
||||
assert lines[0] == b"HTTP/1.1 400 Bad Request\r\n"
|
||||
assert lines[-1] == b"Error: Bad Request"
|
||||
assert b"Bad Request" in lines[-1]
|
||||
|
||||
@@ -40,9 +40,9 @@ def test_bp_group_with_additional_route_params(app: Sanic):
|
||||
)
|
||||
def blueprint_2_named_method(request: Request, param):
|
||||
if request.method == "DELETE":
|
||||
return text("DELETE_{}".format(param))
|
||||
return text(f"DELETE_{param}")
|
||||
elif request.method == "PATCH":
|
||||
return text("PATCH_{}".format(param))
|
||||
return text(f"PATCH_{param}")
|
||||
|
||||
blueprint_group = Blueprint.group(
|
||||
blueprint_1, blueprint_2, url_prefix="/api"
|
||||
|
||||
@@ -46,19 +46,19 @@ def test_versioned_routes_get(app, method):
|
||||
func = getattr(bp, method)
|
||||
if callable(func):
|
||||
|
||||
@func("/{}".format(method), version=1)
|
||||
@func(f"/{method}", version=1)
|
||||
def handler(request):
|
||||
return text("OK")
|
||||
|
||||
else:
|
||||
print(func)
|
||||
raise Exception("{} is not callable".format(func))
|
||||
raise Exception(f"{func} is not callable")
|
||||
|
||||
app.blueprint(bp)
|
||||
|
||||
client_method = getattr(app.test_client, method)
|
||||
|
||||
request, response = client_method("/v1/{}".format(method))
|
||||
request, response = client_method(f"/v1/{method}")
|
||||
assert response.status == 200
|
||||
|
||||
|
||||
@@ -252,8 +252,90 @@ def test_several_bp_with_host(app):
|
||||
assert response.text == "Hello3"
|
||||
|
||||
|
||||
def test_bp_with_host_list(app):
|
||||
bp = Blueprint(
|
||||
"test_bp_host",
|
||||
url_prefix="/test1",
|
||||
host=["example.com", "sub.example.com"],
|
||||
)
|
||||
|
||||
@bp.route("/")
|
||||
def handler1(request):
|
||||
return text("Hello")
|
||||
|
||||
@bp.route("/", host=["sub1.example.com"])
|
||||
def handler2(request):
|
||||
return text("Hello subdomain!")
|
||||
|
||||
app.blueprint(bp)
|
||||
headers = {"Host": "example.com"}
|
||||
request, response = app.test_client.get("/test1/", headers=headers)
|
||||
assert response.text == "Hello"
|
||||
|
||||
headers = {"Host": "sub.example.com"}
|
||||
request, response = app.test_client.get("/test1/", headers=headers)
|
||||
assert response.text == "Hello"
|
||||
|
||||
headers = {"Host": "sub1.example.com"}
|
||||
request, response = app.test_client.get("/test1/", headers=headers)
|
||||
|
||||
assert response.text == "Hello subdomain!"
|
||||
|
||||
|
||||
def test_several_bp_with_host_list(app):
|
||||
bp = Blueprint(
|
||||
"test_text",
|
||||
url_prefix="/test",
|
||||
host=["example.com", "sub.example.com"],
|
||||
)
|
||||
bp2 = Blueprint(
|
||||
"test_text2",
|
||||
url_prefix="/test",
|
||||
host=["sub1.example.com", "sub2.example.com"],
|
||||
)
|
||||
|
||||
@bp.route("/")
|
||||
def handler(request):
|
||||
return text("Hello")
|
||||
|
||||
@bp2.route("/")
|
||||
def handler1(request):
|
||||
return text("Hello2")
|
||||
|
||||
@bp2.route("/other/")
|
||||
def handler2(request):
|
||||
return text("Hello3")
|
||||
|
||||
app.blueprint(bp)
|
||||
app.blueprint(bp2)
|
||||
|
||||
assert bp.host == ["example.com", "sub.example.com"]
|
||||
headers = {"Host": "example.com"}
|
||||
request, response = app.test_client.get("/test/", headers=headers)
|
||||
assert response.text == "Hello"
|
||||
|
||||
assert bp.host == ["example.com", "sub.example.com"]
|
||||
headers = {"Host": "sub.example.com"}
|
||||
request, response = app.test_client.get("/test/", headers=headers)
|
||||
assert response.text == "Hello"
|
||||
|
||||
assert bp2.host == ["sub1.example.com", "sub2.example.com"]
|
||||
headers = {"Host": "sub1.example.com"}
|
||||
request, response = app.test_client.get("/test/", headers=headers)
|
||||
assert response.text == "Hello2"
|
||||
request, response = app.test_client.get("/test/other/", headers=headers)
|
||||
assert response.text == "Hello3"
|
||||
|
||||
assert bp2.host == ["sub1.example.com", "sub2.example.com"]
|
||||
headers = {"Host": "sub2.example.com"}
|
||||
request, response = app.test_client.get("/test/", headers=headers)
|
||||
assert response.text == "Hello2"
|
||||
request, response = app.test_client.get("/test/other/", headers=headers)
|
||||
assert response.text == "Hello3"
|
||||
|
||||
|
||||
def test_bp_middleware(app):
|
||||
blueprint = Blueprint("test_middleware")
|
||||
blueprint = Blueprint("test_bp_middleware")
|
||||
|
||||
@blueprint.middleware("response")
|
||||
async def process_response(request, response):
|
||||
@@ -271,6 +353,46 @@ def test_bp_middleware(app):
|
||||
assert response.text == "FAIL"
|
||||
|
||||
|
||||
def test_bp_middleware_order(app):
|
||||
blueprint = Blueprint("test_bp_middleware_order")
|
||||
order = list()
|
||||
|
||||
@blueprint.middleware("request")
|
||||
def mw_1(request):
|
||||
order.append(1)
|
||||
|
||||
@blueprint.middleware("request")
|
||||
def mw_2(request):
|
||||
order.append(2)
|
||||
|
||||
@blueprint.middleware("request")
|
||||
def mw_3(request):
|
||||
order.append(3)
|
||||
|
||||
@blueprint.middleware("response")
|
||||
def mw_4(request, response):
|
||||
order.append(6)
|
||||
|
||||
@blueprint.middleware("response")
|
||||
def mw_5(request, response):
|
||||
order.append(5)
|
||||
|
||||
@blueprint.middleware("response")
|
||||
def mw_6(request, response):
|
||||
order.append(4)
|
||||
|
||||
@blueprint.route("/")
|
||||
def process_response(request):
|
||||
return text("OK")
|
||||
|
||||
app.blueprint(blueprint)
|
||||
order.clear()
|
||||
request, response = app.test_client.get("/")
|
||||
|
||||
assert response.status == 200
|
||||
assert order == [1, 2, 3, 4, 5, 6]
|
||||
|
||||
|
||||
def test_bp_exception_handler(app):
|
||||
blueprint = Blueprint("test_middleware")
|
||||
|
||||
@@ -553,9 +675,7 @@ def test_bp_group_with_default_url_prefix(app):
|
||||
from uuid import uuid4
|
||||
|
||||
resource_id = str(uuid4())
|
||||
request, response = app.test_client.get(
|
||||
"/api/v1/resources/{0}".format(resource_id)
|
||||
)
|
||||
request, response = app.test_client.get(f"/api/v1/resources/{resource_id}")
|
||||
assert response.json == {"resource_id": resource_id}
|
||||
|
||||
|
||||
@@ -669,9 +789,9 @@ def test_duplicate_blueprint(app):
|
||||
app.blueprint(bp1)
|
||||
|
||||
assert str(excinfo.value) == (
|
||||
'A blueprint with the name "{}" is already registered. '
|
||||
f'A blueprint with the name "{bp_name}" is already registered. '
|
||||
"Blueprint names must be unique."
|
||||
).format(bp_name)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("debug", [True, False, None])
|
||||
|
||||
@@ -44,42 +44,42 @@ def test_load_from_object_string_exception(app):
|
||||
|
||||
def test_auto_load_env():
|
||||
environ["SANIC_TEST_ANSWER"] = "42"
|
||||
app = Sanic()
|
||||
app = Sanic(name=__name__)
|
||||
assert app.config.TEST_ANSWER == 42
|
||||
del environ["SANIC_TEST_ANSWER"]
|
||||
|
||||
|
||||
def test_auto_load_bool_env():
|
||||
environ["SANIC_TEST_ANSWER"] = "True"
|
||||
app = Sanic()
|
||||
app = Sanic(name=__name__)
|
||||
assert app.config.TEST_ANSWER == True
|
||||
del environ["SANIC_TEST_ANSWER"]
|
||||
|
||||
|
||||
def test_dont_load_env():
|
||||
environ["SANIC_TEST_ANSWER"] = "42"
|
||||
app = Sanic(load_env=False)
|
||||
app = Sanic(name=__name__, load_env=False)
|
||||
assert getattr(app.config, "TEST_ANSWER", None) is None
|
||||
del environ["SANIC_TEST_ANSWER"]
|
||||
|
||||
|
||||
def test_load_env_prefix():
|
||||
environ["MYAPP_TEST_ANSWER"] = "42"
|
||||
app = Sanic(load_env="MYAPP_")
|
||||
app = Sanic(name=__name__, load_env="MYAPP_")
|
||||
assert app.config.TEST_ANSWER == 42
|
||||
del environ["MYAPP_TEST_ANSWER"]
|
||||
|
||||
|
||||
def test_load_env_prefix_float_values():
|
||||
environ["MYAPP_TEST_ROI"] = "2.3"
|
||||
app = Sanic(load_env="MYAPP_")
|
||||
app = Sanic(name=__name__, load_env="MYAPP_")
|
||||
assert app.config.TEST_ROI == 2.3
|
||||
del environ["MYAPP_TEST_ROI"]
|
||||
|
||||
|
||||
def test_load_env_prefix_string_value():
|
||||
environ["MYAPP_TEST_TOKEN"] = "somerandomtesttoken"
|
||||
app = Sanic(load_env="MYAPP_")
|
||||
app = Sanic(name=__name__, load_env="MYAPP_")
|
||||
assert app.config.TEST_TOKEN == "somerandomtesttoken"
|
||||
del environ["MYAPP_TEST_TOKEN"]
|
||||
|
||||
|
||||
@@ -15,7 +15,8 @@ from sanic.response import text
|
||||
def test_cookies(app):
|
||||
@app.route("/")
|
||||
def handler(request):
|
||||
response = text("Cookies are: {}".format(request.cookies["test"]))
|
||||
cookie_value = request.cookies["test"]
|
||||
response = text(f"Cookies are: {cookie_value}")
|
||||
response.cookies["right_back"] = "at you"
|
||||
return response
|
||||
|
||||
@@ -31,7 +32,8 @@ def test_cookies(app):
|
||||
async def test_cookies_asgi(app):
|
||||
@app.route("/")
|
||||
def handler(request):
|
||||
response = text("Cookies are: {}".format(request.cookies["test"]))
|
||||
cookie_value = request.cookies["test"]
|
||||
response = text(f"Cookies are: {cookie_value}")
|
||||
response.cookies["right_back"] = "at you"
|
||||
return response
|
||||
|
||||
@@ -52,7 +54,7 @@ def test_false_cookies_encoded(app, httponly, expected):
|
||||
response = text("hello cookies")
|
||||
response.cookies["hello"] = "world"
|
||||
response.cookies["hello"]["httponly"] = httponly
|
||||
return text(response.cookies["hello"].encode("utf8"))
|
||||
return text(response.cookies["hello"].encode("utf8").decode())
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
|
||||
@@ -78,7 +80,8 @@ def test_false_cookies(app, httponly, expected):
|
||||
def test_http2_cookies(app):
|
||||
@app.route("/")
|
||||
async def handler(request):
|
||||
response = text("Cookies are: {}".format(request.cookies["test"]))
|
||||
cookie_value = request.cookies["test"]
|
||||
response = text(f"Cookies are: {cookie_value}")
|
||||
return response
|
||||
|
||||
headers = {"cookie": "test=working!"}
|
||||
|
||||
@@ -17,12 +17,12 @@ def test_create_task(app):
|
||||
|
||||
@app.route("/early")
|
||||
def not_set(request):
|
||||
return text(e.is_set())
|
||||
return text(str(e.is_set()))
|
||||
|
||||
@app.route("/late")
|
||||
async def set(request):
|
||||
await asyncio.sleep(0.1)
|
||||
return text(e.is_set())
|
||||
return text(str(e.is_set()))
|
||||
|
||||
request, response = app.test_client.get("/early")
|
||||
assert response.body == b"False"
|
||||
|
||||
@@ -20,7 +20,7 @@ class CustomRequest(Request):
|
||||
|
||||
|
||||
def test_custom_request():
|
||||
app = Sanic(request_class=CustomRequest)
|
||||
app = Sanic(name=__name__, request_class=CustomRequest)
|
||||
|
||||
@app.route("/post", methods=["POST"])
|
||||
async def post_handler(request):
|
||||
|
||||
@@ -172,7 +172,7 @@ def test_handled_unhandled_exception(exception_app):
|
||||
request, response = exception_app.test_client.get("/divide_by_zero")
|
||||
assert response.status == 500
|
||||
soup = BeautifulSoup(response.body, "html.parser")
|
||||
assert soup.h1.text == "Internal Server Error"
|
||||
assert "Internal Server Error" in soup.h1.text
|
||||
|
||||
message = " ".join(soup.p.text.split())
|
||||
assert message == (
|
||||
@@ -218,4 +218,4 @@ def test_abort(exception_app):
|
||||
|
||||
request, response = exception_app.test_client.get("/abort/message")
|
||||
assert response.status == 500
|
||||
assert response.text == "Error: Abort"
|
||||
assert "Abort" in response.text
|
||||
|
||||
@@ -43,7 +43,7 @@ def handler_6(request, arg):
|
||||
try:
|
||||
foo = 1 / arg
|
||||
except Exception as e:
|
||||
raise e from ValueError("{}".format(arg))
|
||||
raise e from ValueError(f"{arg}")
|
||||
return text(foo)
|
||||
|
||||
|
||||
@@ -86,7 +86,7 @@ def test_html_traceback_output_in_debug_mode():
|
||||
|
||||
summary_text = " ".join(soup.select(".summary")[0].text.split())
|
||||
assert (
|
||||
"NameError: name 'bar' " "is not defined while handling path /4"
|
||||
"NameError: name 'bar' is not defined while handling path /4"
|
||||
) == summary_text
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ def test_chained_exception_handler():
|
||||
|
||||
summary_text = " ".join(soup.select(".summary")[0].text.split())
|
||||
assert (
|
||||
"ZeroDivisionError: division by zero " "while handling path /6/0"
|
||||
"ZeroDivisionError: division by zero while handling path /6/0"
|
||||
) == summary_text
|
||||
|
||||
|
||||
|
||||
@@ -7,17 +7,34 @@ import httpx
|
||||
|
||||
from sanic import Sanic, server
|
||||
from sanic.response import text
|
||||
from sanic.testing import HOST, PORT, SanicTestClient
|
||||
from sanic.testing import HOST, SanicTestClient
|
||||
|
||||
|
||||
CONFIG_FOR_TESTS = {"KEEP_ALIVE_TIMEOUT": 2, "KEEP_ALIVE": True}
|
||||
|
||||
old_conn = None
|
||||
PORT = 42101 # test_keep_alive_timeout_reuse doesn't work with random port
|
||||
|
||||
|
||||
class ReusableSanicConnectionPool(
|
||||
httpx.dispatch.connection_pool.ConnectionPool
|
||||
):
|
||||
@property
|
||||
def cert(self):
|
||||
return self.ssl.cert
|
||||
|
||||
@property
|
||||
def verify(self):
|
||||
return self.ssl.verify
|
||||
|
||||
@property
|
||||
def trust_env(self):
|
||||
return self.ssl.trust_env
|
||||
|
||||
@property
|
||||
def http2(self):
|
||||
return self.ssl.http2
|
||||
|
||||
async def acquire_connection(self, origin, timeout):
|
||||
global old_conn
|
||||
connection = self.pop_connection(origin)
|
||||
@@ -26,14 +43,17 @@ class ReusableSanicConnectionPool(
|
||||
pool_timeout = None if timeout is None else timeout.pool_timeout
|
||||
|
||||
await self.max_connections.acquire(timeout=pool_timeout)
|
||||
ssl_config = httpx.config.SSLConfig(
|
||||
cert=self.cert,
|
||||
verify=self.verify,
|
||||
trust_env=self.trust_env,
|
||||
http2=self.http2,
|
||||
)
|
||||
connection = httpx.dispatch.connection.HTTPConnection(
|
||||
origin,
|
||||
verify=self.verify,
|
||||
cert=self.cert,
|
||||
http2=self.http2,
|
||||
ssl=ssl_config,
|
||||
backend=self.backend,
|
||||
release_func=self.release_connection,
|
||||
trust_env=self.trust_env,
|
||||
uds=self.uds,
|
||||
)
|
||||
|
||||
@@ -49,7 +69,7 @@ class ReusableSanicConnectionPool(
|
||||
return connection
|
||||
|
||||
|
||||
class ResusableSanicSession(httpx.Client):
|
||||
class ResusableSanicSession(httpx.AsyncClient):
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
dispatch = ReusableSanicConnectionPool()
|
||||
super().__init__(dispatch=dispatch, *args, **kwargs)
|
||||
@@ -97,11 +117,9 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
):
|
||||
url = uri
|
||||
else:
|
||||
uri = uri if uri.startswith("/") else "/{uri}".format(uri=uri)
|
||||
uri = uri if uri.startswith("/") else f"/{uri}"
|
||||
scheme = "http"
|
||||
url = "{scheme}://{host}:{port}{uri}".format(
|
||||
scheme=scheme, host=HOST, port=PORT, uri=uri
|
||||
)
|
||||
url = f"{scheme}://{HOST}:{PORT}{uri}"
|
||||
|
||||
@self.app.listener("after_server_start")
|
||||
async def _collect_response(loop):
|
||||
@@ -134,7 +152,7 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
self.app.listeners["after_server_start"].pop()
|
||||
|
||||
if exceptions:
|
||||
raise ValueError("Exception during request: {}".format(exceptions))
|
||||
raise ValueError(f"Exception during request: {exceptions}")
|
||||
|
||||
if gather_request:
|
||||
self.app.request_middleware.pop()
|
||||
@@ -143,17 +161,13 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
return request, response
|
||||
except Exception:
|
||||
raise ValueError(
|
||||
"Request and response object expected, got ({})".format(
|
||||
results
|
||||
)
|
||||
f"Request and response object expected, got ({results})"
|
||||
)
|
||||
else:
|
||||
try:
|
||||
return results[-1]
|
||||
except Exception:
|
||||
raise ValueError(
|
||||
"Request object expected, got ({})".format(results)
|
||||
)
|
||||
raise ValueError(f"Request object expected, got ({results})")
|
||||
|
||||
def kill_server(self):
|
||||
try:
|
||||
@@ -163,7 +177,7 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
self._server = None
|
||||
|
||||
if self._session:
|
||||
self._loop.run_until_complete(self._session.close())
|
||||
self._loop.run_until_complete(self._session.aclose())
|
||||
self._session = None
|
||||
|
||||
except Exception as e3:
|
||||
@@ -182,7 +196,7 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
self._session = self.get_new_session()
|
||||
try:
|
||||
response = await getattr(self._session, method.lower())(
|
||||
url, verify=False, timeout=request_keepalive, *args, **kwargs
|
||||
url, timeout=request_keepalive, *args, **kwargs
|
||||
)
|
||||
except NameError:
|
||||
raise Exception(response.status_code)
|
||||
@@ -192,7 +206,7 @@ class ReuseableSanicTestClient(SanicTestClient):
|
||||
except (JSONDecodeError, UnicodeDecodeError):
|
||||
response.json = None
|
||||
|
||||
response.body = await response.read()
|
||||
response.body = await response.aread()
|
||||
response.status = response.status_code
|
||||
response.content_type = response.headers.get("content-type")
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from importlib import reload
|
||||
@@ -12,6 +13,7 @@ import sanic
|
||||
from sanic import Sanic
|
||||
from sanic.log import LOGGING_CONFIG_DEFAULTS, logger
|
||||
from sanic.response import text
|
||||
from sanic.testing import SanicTestClient
|
||||
|
||||
|
||||
logging_format = """module: %(module)s; \
|
||||
@@ -127,7 +129,7 @@ def test_log_connection_lost(app, debug, monkeypatch):
|
||||
def test_logger(caplog):
|
||||
rand_string = str(uuid.uuid4())
|
||||
|
||||
app = Sanic()
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.get("/")
|
||||
def log_info(request):
|
||||
@@ -137,15 +139,67 @@ def test_logger(caplog):
|
||||
with caplog.at_level(logging.INFO):
|
||||
request, response = app.test_client.get("/")
|
||||
|
||||
port = request.server_port
|
||||
|
||||
# Note: testing with random port doesn't show the banner because it doesn't
|
||||
# define host and port. This test supports both modes.
|
||||
if caplog.record_tuples[0] == (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
f"Goin' Fast @ http://127.0.0.1:{port}",
|
||||
):
|
||||
caplog.record_tuples.pop(0)
|
||||
|
||||
assert caplog.record_tuples[0] == (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
"Goin' Fast @ http://127.0.0.1:42101",
|
||||
f"http://127.0.0.1:{port}/",
|
||||
)
|
||||
assert caplog.record_tuples[1] == ("sanic.root", logging.INFO, rand_string)
|
||||
assert caplog.record_tuples[-1] == (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
"Server Stopped",
|
||||
)
|
||||
|
||||
|
||||
def test_logger_static_and_secure(caplog):
|
||||
# Same as test_logger, except for more coverage:
|
||||
# - test_client initialised separately for static port
|
||||
# - using ssl
|
||||
rand_string = str(uuid.uuid4())
|
||||
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.get("/")
|
||||
def log_info(request):
|
||||
logger.info(rand_string)
|
||||
return text("hello")
|
||||
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
ssl_cert = os.path.join(current_dir, "certs/selfsigned.cert")
|
||||
ssl_key = os.path.join(current_dir, "certs/selfsigned.key")
|
||||
|
||||
ssl_dict = {"cert": ssl_cert, "key": ssl_key}
|
||||
|
||||
test_client = SanicTestClient(app, port=42101)
|
||||
with caplog.at_level(logging.INFO):
|
||||
request, response = test_client.get(
|
||||
f"https://127.0.0.1:{test_client.port}/",
|
||||
server_kwargs=dict(ssl=ssl_dict),
|
||||
)
|
||||
|
||||
port = test_client.port
|
||||
|
||||
assert caplog.record_tuples[0] == (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
f"Goin' Fast @ https://127.0.0.1:{port}",
|
||||
)
|
||||
assert caplog.record_tuples[1] == (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
"http://127.0.0.1:42101/",
|
||||
f"https://127.0.0.1:{port}/",
|
||||
)
|
||||
assert caplog.record_tuples[2] == ("sanic.root", logging.INFO, rand_string)
|
||||
assert caplog.record_tuples[-1] == (
|
||||
|
||||
@@ -49,10 +49,10 @@ def test_logo_false(app, caplog):
|
||||
loop.run_until_complete(_server.wait_closed())
|
||||
app.stop()
|
||||
|
||||
banner, port = caplog.record_tuples[ROW][2].rsplit(":", 1)
|
||||
assert caplog.record_tuples[ROW][1] == logging.INFO
|
||||
assert caplog.record_tuples[ROW][
|
||||
2
|
||||
] == "Goin' Fast @ http://127.0.0.1:{}".format(PORT)
|
||||
assert banner == "Goin' Fast @ http://127.0.0.1"
|
||||
assert int(port) > 0
|
||||
|
||||
|
||||
def test_logo_true(app, caplog):
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
|
||||
from asyncio import CancelledError
|
||||
|
||||
from sanic.exceptions import NotFound
|
||||
from sanic.exceptions import NotFound, SanicException
|
||||
from sanic.request import Request
|
||||
from sanic.response import HTTPResponse, text
|
||||
|
||||
@@ -93,7 +93,7 @@ def test_middleware_response_raise_cancelled_error(app, caplog):
|
||||
"sanic.root",
|
||||
logging.ERROR,
|
||||
"Exception occurred while handling uri: 'http://127.0.0.1:42101/'",
|
||||
) in caplog.record_tuples
|
||||
) not in caplog.record_tuples
|
||||
|
||||
|
||||
def test_middleware_response_raise_exception(app, caplog):
|
||||
@@ -102,14 +102,16 @@ def test_middleware_response_raise_exception(app, caplog):
|
||||
raise Exception("Exception at response middleware")
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
reqrequest, response = app.test_client.get("/")
|
||||
reqrequest, response = app.test_client.get("/fail")
|
||||
|
||||
assert response.status == 404
|
||||
# 404 errors are not logged
|
||||
assert (
|
||||
"sanic.root",
|
||||
logging.ERROR,
|
||||
"Exception occurred while handling uri: 'http://127.0.0.1:42101/'",
|
||||
) in caplog.record_tuples
|
||||
) not in caplog.record_tuples
|
||||
# Middleware exception ignored but logged
|
||||
assert (
|
||||
"sanic.error",
|
||||
logging.ERROR,
|
||||
|
||||
@@ -87,3 +87,15 @@ def test_pickle_app_with_bp(app, protocol):
|
||||
request, response = up_p_app.test_client.get("/")
|
||||
assert up_p_app.is_request_stream is False
|
||||
assert response.text == "Hello"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("protocol", [3, 4])
|
||||
def test_pickle_app_with_static(app, protocol):
|
||||
app.route("/")(handler)
|
||||
app.static("/static", "/tmp/static")
|
||||
p_app = pickle.dumps(app, protocol=protocol)
|
||||
del app
|
||||
up_p_app = pickle.loads(p_app)
|
||||
assert up_p_app
|
||||
request, response = up_p_app.test_client.get("/static/missing.txt")
|
||||
assert response.status == 404
|
||||
|
||||
@@ -21,13 +21,13 @@ def test_versioned_named_routes_get(app, method):
|
||||
bp = Blueprint("test_bp", url_prefix="/bp")
|
||||
|
||||
method = method.lower()
|
||||
route_name = "route_{}".format(method)
|
||||
route_name2 = "route2_{}".format(method)
|
||||
route_name = f"route_{method}"
|
||||
route_name2 = f"route2_{method}"
|
||||
|
||||
func = getattr(app, method)
|
||||
if callable(func):
|
||||
|
||||
@func("/{}".format(method), version=1, name=route_name)
|
||||
@func(f"/{method}", version=1, name=route_name)
|
||||
def handler(request):
|
||||
return text("OK")
|
||||
|
||||
@@ -38,7 +38,7 @@ def test_versioned_named_routes_get(app, method):
|
||||
func = getattr(bp, method)
|
||||
if callable(func):
|
||||
|
||||
@func("/{}".format(method), version=1, name=route_name2)
|
||||
@func(f"/{method}", version=1, name=route_name2)
|
||||
def handler2(request):
|
||||
return text("OK")
|
||||
|
||||
@@ -48,14 +48,14 @@ def test_versioned_named_routes_get(app, method):
|
||||
|
||||
app.blueprint(bp)
|
||||
|
||||
assert app.router.routes_all["/v1/{}".format(method)].name == route_name
|
||||
assert app.router.routes_all[f"/v1/{method}"].name == route_name
|
||||
|
||||
route = app.router.routes_all["/v1/bp/{}".format(method)]
|
||||
assert route.name == "test_bp.{}".format(route_name2)
|
||||
route = app.router.routes_all[f"/v1/bp/{method}"]
|
||||
assert route.name == f"test_bp.{route_name2}"
|
||||
|
||||
assert app.url_for(route_name) == "/v1/{}".format(method)
|
||||
url = app.url_for("test_bp.{}".format(route_name2))
|
||||
assert url == "/v1/bp/{}".format(method)
|
||||
assert app.url_for(route_name) == f"/v1/{method}"
|
||||
url = app.url_for(f"test_bp.{route_name2}")
|
||||
assert url == f"/v1/bp/{method}"
|
||||
with pytest.raises(URLBuildError):
|
||||
app.url_for("handler")
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ def test_payload_too_large_at_data_received_default(app):
|
||||
|
||||
response = app.test_client.get("/1", gather_request=False)
|
||||
assert response.status == 413
|
||||
assert response.text == "Error: Payload Too Large"
|
||||
assert "Payload Too Large" in response.text
|
||||
|
||||
|
||||
def test_payload_too_large_at_on_header_default(app):
|
||||
@@ -40,4 +40,4 @@ def test_payload_too_large_at_on_header_default(app):
|
||||
data = "a" * 1000
|
||||
response = app.test_client.post("/1", gather_request=False, data=data)
|
||||
assert response.status == 413
|
||||
assert response.text == "Error: Payload Too Large"
|
||||
assert "Payload Too Large" in response.text
|
||||
|
||||
@@ -115,14 +115,14 @@ def test_redirect_with_params(app, test_str):
|
||||
|
||||
@app.route("/api/v1/test/<test>/")
|
||||
async def init_handler(request, test):
|
||||
return redirect("/api/v2/test/{}/".format(use_in_uri))
|
||||
return redirect(f"/api/v2/test/{use_in_uri}/")
|
||||
|
||||
@app.route("/api/v2/test/<test>/")
|
||||
async def target_handler(request, test):
|
||||
assert test == test_str
|
||||
return text("OK")
|
||||
|
||||
_, response = app.test_client.get("/api/v1/test/{}/".format(use_in_uri))
|
||||
_, response = app.test_client.get(f"/api/v1/test/{use_in_uri}/")
|
||||
assert response.status == 200
|
||||
|
||||
assert response.content == b"OK"
|
||||
|
||||
108
tests/test_reloader.py
Normal file
108
tests/test_reloader.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import os
|
||||
import secrets
|
||||
import sys
|
||||
|
||||
from contextlib import suppress
|
||||
from subprocess import PIPE, Popen, TimeoutExpired
|
||||
from tempfile import TemporaryDirectory
|
||||
from textwrap import dedent
|
||||
from threading import Timer
|
||||
from time import sleep
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
# We need to interrupt the autoreloader without killing it, so that the server gets terminated
|
||||
# https://stefan.sofa-rockers.org/2013/08/15/handling-sub-process-hierarchies-python-linux-os-x/
|
||||
|
||||
try:
|
||||
from signal import CTRL_BREAK_EVENT
|
||||
from subprocess import CREATE_NEW_PROCESS_GROUP
|
||||
|
||||
flags = CREATE_NEW_PROCESS_GROUP
|
||||
except ImportError:
|
||||
flags = 0
|
||||
|
||||
|
||||
def terminate(proc):
|
||||
if flags:
|
||||
proc.send_signal(CTRL_BREAK_EVENT)
|
||||
else:
|
||||
proc.terminate()
|
||||
|
||||
|
||||
def write_app(filename, **runargs):
|
||||
text = secrets.token_urlsafe()
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
f"""\
|
||||
import os
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
def complete(*args):
|
||||
print("complete", os.getpid(), {text!r})
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(**{runargs!r})
|
||||
"""
|
||||
)
|
||||
)
|
||||
return text
|
||||
|
||||
|
||||
def scanner(proc):
|
||||
for line in proc.stdout:
|
||||
line = line.decode().strip()
|
||||
print(">", line)
|
||||
if line.startswith("complete"):
|
||||
yield line
|
||||
|
||||
|
||||
argv = dict(
|
||||
script=[sys.executable, "reloader.py"],
|
||||
module=[sys.executable, "-m", "reloader"],
|
||||
sanic=[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"sanic",
|
||||
"--port",
|
||||
"42104",
|
||||
"--debug",
|
||||
"reloader.app",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"runargs, mode",
|
||||
[
|
||||
(dict(port=42102, auto_reload=True), "script"),
|
||||
(dict(port=42103, debug=True), "module"),
|
||||
(dict(), "sanic"),
|
||||
],
|
||||
)
|
||||
async def test_reloader_live(runargs, mode):
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
filename = os.path.join(tmpdir, "reloader.py")
|
||||
text = write_app(filename, **runargs)
|
||||
proc = Popen(argv[mode], cwd=tmpdir, stdout=PIPE, creationflags=flags)
|
||||
try:
|
||||
timeout = Timer(5, terminate, [proc])
|
||||
timeout.start()
|
||||
# Python apparently keeps using the old source sometimes if
|
||||
# we don't sleep before rewrite (pycache timestamp problem?)
|
||||
sleep(1)
|
||||
line = scanner(proc)
|
||||
assert text in next(line)
|
||||
# Edit source code and try again
|
||||
text = write_app(filename, **runargs)
|
||||
assert text in next(line)
|
||||
finally:
|
||||
timeout.cancel()
|
||||
terminate(proc)
|
||||
with suppress(TimeoutExpired):
|
||||
proc.wait(timeout=3)
|
||||
@@ -44,44 +44,6 @@ def test_custom_context(app):
|
||||
}
|
||||
|
||||
|
||||
# Remove this once the deprecated API is abolished.
|
||||
def test_custom_context_old(app):
|
||||
@app.middleware("request")
|
||||
def store(request):
|
||||
try:
|
||||
request["foo"]
|
||||
except KeyError:
|
||||
pass
|
||||
request["user"] = "sanic"
|
||||
sidekick = request.get("sidekick", "tails") # Item missing -> default
|
||||
request["sidekick"] = sidekick
|
||||
request["bar"] = request["sidekick"]
|
||||
del request["sidekick"]
|
||||
|
||||
@app.route("/")
|
||||
def handler(request):
|
||||
return json(
|
||||
{
|
||||
"user": request.get("user"),
|
||||
"sidekick": request.get("sidekick"),
|
||||
"has_bar": "bar" in request,
|
||||
"has_sidekick": "sidekick" in request,
|
||||
}
|
||||
)
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
|
||||
assert response.json == {
|
||||
"user": "sanic",
|
||||
"sidekick": None,
|
||||
"has_bar": True,
|
||||
"has_sidekick": False,
|
||||
}
|
||||
response_json = loads(response.text)
|
||||
assert response_json["user"] == "sanic"
|
||||
assert response_json.get("sidekick") is None
|
||||
|
||||
|
||||
def test_app_injection(app):
|
||||
expected = random.choice(range(0, 100))
|
||||
|
||||
|
||||
@@ -3,12 +3,12 @@ import pytest
|
||||
from sanic.blueprints import Blueprint
|
||||
from sanic.exceptions import HeaderExpectationFailed
|
||||
from sanic.request import StreamBuffer
|
||||
from sanic.response import stream, text
|
||||
from sanic.response import json, stream, text
|
||||
from sanic.views import CompositionView, HTTPMethodView
|
||||
from sanic.views import stream as stream_decorator
|
||||
|
||||
|
||||
data = "abc" * 10000000
|
||||
data = "abc" * 1_000_000
|
||||
|
||||
|
||||
def test_request_stream_method_view(app):
|
||||
@@ -329,15 +329,12 @@ def test_request_stream_handle_exception(app):
|
||||
# 404
|
||||
request, response = app.test_client.post("/in_valid_post", data=data)
|
||||
assert response.status == 404
|
||||
assert response.text == "Error: Requested URL /in_valid_post not found"
|
||||
assert "Requested URL /in_valid_post not found" in response.text
|
||||
|
||||
# 405
|
||||
request, response = app.test_client.get("/post/random_id")
|
||||
assert response.status == 405
|
||||
assert (
|
||||
response.text == "Error: Method GET not allowed for URL"
|
||||
" /post/random_id"
|
||||
)
|
||||
assert "Method GET not allowed for URL /post/random_id" in response.text
|
||||
|
||||
|
||||
def test_request_stream_blueprint(app):
|
||||
@@ -616,3 +613,44 @@ def test_request_stream(app):
|
||||
request, response = app.test_client.post("/bp_stream", data=data)
|
||||
assert response.status == 200
|
||||
assert response.text == data
|
||||
|
||||
|
||||
def test_streaming_new_api(app):
|
||||
@app.post("/non-stream")
|
||||
async def handler(request):
|
||||
assert request.body == b"x"
|
||||
await request.receive_body() # This should do nothing
|
||||
assert request.body == b"x"
|
||||
return text("OK")
|
||||
|
||||
@app.post("/1", stream=True)
|
||||
async def handler(request):
|
||||
assert request.stream
|
||||
assert not request.body
|
||||
await request.receive_body()
|
||||
return text(request.body.decode().upper())
|
||||
|
||||
@app.post("/2", stream=True)
|
||||
async def handler(request):
|
||||
ret = []
|
||||
async for data in request.stream:
|
||||
# We should have no b"" or None, just proper chunks
|
||||
assert data
|
||||
assert isinstance(data, bytes)
|
||||
ret.append(data.decode("ASCII"))
|
||||
return json(ret)
|
||||
|
||||
request, response = app.test_client.post("/non-stream", data="x")
|
||||
assert response.status == 200
|
||||
|
||||
request, response = app.test_client.post("/1", data="TEST data")
|
||||
assert request.body == b"TEST data"
|
||||
assert response.status == 200
|
||||
assert response.text == "TEST DATA"
|
||||
|
||||
request, response = app.test_client.post("/2", data=data)
|
||||
assert response.status == 200
|
||||
res = response.json
|
||||
assert isinstance(res, list)
|
||||
assert len(res) > 1
|
||||
assert "".join(res) == data
|
||||
|
||||
@@ -14,18 +14,15 @@ class DelayableHTTPConnection(httpx.dispatch.connection.HTTPConnection):
|
||||
self._request_delay = kwargs.pop("request_delay")
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
async def send(self, request, verify=None, cert=None, timeout=None):
|
||||
if self.h11_connection is None and self.h2_connection is None:
|
||||
await self.connect(verify=verify, cert=cert, timeout=timeout)
|
||||
async def send(self, request, timeout=None):
|
||||
|
||||
if self.connection is None:
|
||||
self.connection = await self.connect(timeout=timeout)
|
||||
|
||||
if self._request_delay:
|
||||
await asyncio.sleep(self._request_delay)
|
||||
|
||||
if self.h2_connection is not None:
|
||||
response = await self.h2_connection.send(request, timeout=timeout)
|
||||
else:
|
||||
assert self.h11_connection is not None
|
||||
response = await self.h11_connection.send(request, timeout=timeout)
|
||||
response = await self.connection.send(request, timeout=timeout)
|
||||
|
||||
return response
|
||||
|
||||
@@ -46,12 +43,9 @@ class DelayableSanicConnectionPool(
|
||||
await self.max_connections.acquire(timeout=pool_timeout)
|
||||
connection = DelayableHTTPConnection(
|
||||
origin,
|
||||
verify=self.verify,
|
||||
cert=self.cert,
|
||||
http2=self.http2,
|
||||
ssl=self.ssl,
|
||||
backend=self.backend,
|
||||
release_func=self.release_connection,
|
||||
trust_env=self.trust_env,
|
||||
uds=self.uds,
|
||||
request_delay=self._request_delay,
|
||||
)
|
||||
@@ -61,7 +55,7 @@ class DelayableSanicConnectionPool(
|
||||
return connection
|
||||
|
||||
|
||||
class DelayableSanicSession(httpx.Client):
|
||||
class DelayableSanicSession(httpx.AsyncClient):
|
||||
def __init__(self, request_delay=None, *args, **kwargs) -> None:
|
||||
dispatch = DelayableSanicConnectionPool(request_delay=request_delay)
|
||||
super().__init__(dispatch=dispatch, *args, **kwargs)
|
||||
@@ -102,7 +96,7 @@ def test_default_server_error_request_timeout():
|
||||
client = DelayableSanicTestClient(request_timeout_default_app, 2)
|
||||
request, response = client.get("/1")
|
||||
assert response.status == 408
|
||||
assert response.text == "Error: Request Timeout"
|
||||
assert "Request Timeout" in response.text
|
||||
|
||||
|
||||
def test_default_server_error_request_dont_timeout():
|
||||
@@ -125,4 +119,4 @@ def test_default_server_error_websocket_request_timeout():
|
||||
request, response = client.get("/ws1", headers=headers)
|
||||
|
||||
assert response.status == 408
|
||||
assert response.text == "Error: Request Timeout"
|
||||
assert "Request Timeout" in response.text
|
||||
|
||||
@@ -11,8 +11,8 @@ import pytest
|
||||
from sanic import Blueprint, Sanic
|
||||
from sanic.exceptions import ServerError
|
||||
from sanic.request import DEFAULT_HTTP_CONTENT_TYPE, Request, RequestParameters
|
||||
from sanic.response import json, text
|
||||
from sanic.testing import ASGI_HOST, HOST, PORT
|
||||
from sanic.response import html, json, text
|
||||
from sanic.testing import ASGI_HOST, HOST, PORT, SanicTestClient
|
||||
|
||||
|
||||
# ------------------------------------------------------------ #
|
||||
@@ -44,7 +44,7 @@ async def test_sync_asgi(app):
|
||||
def test_ip(app):
|
||||
@app.route("/")
|
||||
def handler(request):
|
||||
return text("{}".format(request.ip))
|
||||
return text(f"{request.ip}")
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
|
||||
@@ -55,7 +55,7 @@ def test_ip(app):
|
||||
async def test_ip_asgi(app):
|
||||
@app.route("/")
|
||||
def handler(request):
|
||||
return text("{}".format(request.url))
|
||||
return text(f"{request.url}")
|
||||
|
||||
request, response = await app.asgi_client.get("/")
|
||||
|
||||
@@ -72,6 +72,41 @@ def test_text(app):
|
||||
assert response.text == "Hello"
|
||||
|
||||
|
||||
def test_html(app):
|
||||
class Foo:
|
||||
def __html__(self):
|
||||
return "<h1>Foo</h1>"
|
||||
|
||||
def _repr_html_(self):
|
||||
return "<h1>Foo object repr</h1>"
|
||||
|
||||
class Bar:
|
||||
def _repr_html_(self):
|
||||
return "<h1>Bar object repr</h1>"
|
||||
|
||||
@app.route("/")
|
||||
async def handler(request):
|
||||
return html("<h1>Hello</h1>")
|
||||
|
||||
@app.route("/foo")
|
||||
async def handler(request):
|
||||
return html(Foo())
|
||||
|
||||
@app.route("/bar")
|
||||
async def handler(request):
|
||||
return html(Bar())
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
assert response.content_type == "text/html; charset=utf-8"
|
||||
assert response.text == "<h1>Hello</h1>"
|
||||
|
||||
request, response = app.test_client.get("/foo")
|
||||
assert response.text == "<h1>Foo</h1>"
|
||||
|
||||
request, response = app.test_client.get("/bar")
|
||||
assert response.text == "<h1>Bar object repr</h1>"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_text_asgi(app):
|
||||
@app.route("/")
|
||||
@@ -290,7 +325,7 @@ def test_token(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "{}".format(token),
|
||||
"Authorization": f"{token}",
|
||||
}
|
||||
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
@@ -300,7 +335,7 @@ def test_token(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "Token {}".format(token),
|
||||
"Authorization": f"Token {token}",
|
||||
}
|
||||
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
@@ -310,7 +345,7 @@ def test_token(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Authorization": f"Bearer {token}",
|
||||
}
|
||||
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
@@ -335,7 +370,7 @@ async def test_token_asgi(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "{}".format(token),
|
||||
"Authorization": f"{token}",
|
||||
}
|
||||
|
||||
request, response = await app.asgi_client.get("/", headers=headers)
|
||||
@@ -345,7 +380,7 @@ async def test_token_asgi(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "Token {}".format(token),
|
||||
"Authorization": f"Token {token}",
|
||||
}
|
||||
|
||||
request, response = await app.asgi_client.get("/", headers=headers)
|
||||
@@ -355,7 +390,7 @@ async def test_token_asgi(app):
|
||||
token = "a1d895e0-553a-421a-8e22-5ff8ecb48cbf"
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Authorization": f"Bearer {token}",
|
||||
}
|
||||
|
||||
request, response = await app.asgi_client.get("/", headers=headers)
|
||||
@@ -419,11 +454,13 @@ def test_standard_forwarded(app):
|
||||
"X-Real-IP": "127.0.0.2",
|
||||
"X-Forwarded-For": "127.0.1.1",
|
||||
"X-Scheme": "ws",
|
||||
"Host": "local.site",
|
||||
}
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
assert response.json == {"for": "127.0.0.2", "proto": "ws"}
|
||||
assert request.remote_addr == "127.0.0.2"
|
||||
assert request.scheme == "ws"
|
||||
assert request.server_name == "local.site"
|
||||
assert request.server_port == 80
|
||||
|
||||
app.config.FORWARDED_SECRET = "mySecret"
|
||||
@@ -993,8 +1030,8 @@ def test_url_attributes_no_ssl(app, path, query, expected_url):
|
||||
|
||||
app.add_route(handler, path)
|
||||
|
||||
request, response = app.test_client.get(path + "?{}".format(query))
|
||||
assert request.url == expected_url.format(HOST, PORT)
|
||||
request, response = app.test_client.get(path + f"?{query}")
|
||||
assert request.url == expected_url.format(HOST, request.server_port)
|
||||
|
||||
parsed = urlparse(request.url)
|
||||
|
||||
@@ -1019,7 +1056,7 @@ async def test_url_attributes_no_ssl_asgi(app, path, query, expected_url):
|
||||
|
||||
app.add_route(handler, path)
|
||||
|
||||
request, response = await app.asgi_client.get(path + "?{}".format(query))
|
||||
request, response = await app.asgi_client.get(path + f"?{query}")
|
||||
assert request.url == expected_url.format(ASGI_HOST)
|
||||
|
||||
parsed = urlparse(request.url)
|
||||
@@ -1051,11 +1088,12 @@ def test_url_attributes_with_ssl_context(app, path, query, expected_url):
|
||||
|
||||
app.add_route(handler, path)
|
||||
|
||||
port = app.test_client.port
|
||||
request, response = app.test_client.get(
|
||||
"https://{}:{}".format(HOST, PORT) + path + "?{}".format(query),
|
||||
f"https://{HOST}:{PORT}" + path + f"?{query}",
|
||||
server_kwargs={"ssl": context},
|
||||
)
|
||||
assert request.url == expected_url.format(HOST, PORT)
|
||||
assert request.url == expected_url.format(HOST, request.server_port)
|
||||
|
||||
parsed = urlparse(request.url)
|
||||
|
||||
@@ -1087,10 +1125,10 @@ def test_url_attributes_with_ssl_dict(app, path, query, expected_url):
|
||||
app.add_route(handler, path)
|
||||
|
||||
request, response = app.test_client.get(
|
||||
"https://{}:{}".format(HOST, PORT) + path + "?{}".format(query),
|
||||
f"https://{HOST}:{PORT}" + path + f"?{query}",
|
||||
server_kwargs={"ssl": ssl_dict},
|
||||
)
|
||||
assert request.url == expected_url.format(HOST, PORT)
|
||||
assert request.url == expected_url.format(HOST, request.server_port)
|
||||
|
||||
parsed = urlparse(request.url)
|
||||
|
||||
@@ -1571,33 +1609,6 @@ async def test_request_args_no_query_string_await(app):
|
||||
assert request.args == {}
|
||||
|
||||
|
||||
def test_request_raw_args(app):
|
||||
|
||||
params = {"test": "OK"}
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
return text("pass")
|
||||
|
||||
request, response = app.test_client.get("/", params=params)
|
||||
|
||||
assert request.raw_args == params
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_raw_args_asgi(app):
|
||||
|
||||
params = {"test": "OK"}
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
return text("pass")
|
||||
|
||||
request, response = await app.asgi_client.get("/", params=params)
|
||||
|
||||
assert request.raw_args == params
|
||||
|
||||
|
||||
def test_request_query_args(app):
|
||||
# test multiple params with the same key
|
||||
params = [("test", "value1"), ("test", "value2")]
|
||||
@@ -1798,13 +1809,17 @@ def test_request_port(app):
|
||||
port = request.port
|
||||
assert isinstance(port, int)
|
||||
|
||||
delattr(request, "_socket")
|
||||
delattr(request, "_port")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_port_asgi(app):
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = await app.asgi_client.get("/")
|
||||
|
||||
port = request.port
|
||||
assert isinstance(port, int)
|
||||
assert hasattr(request, "_socket")
|
||||
assert hasattr(request, "_port")
|
||||
|
||||
|
||||
def test_request_socket(app):
|
||||
@@ -1823,12 +1838,6 @@ def test_request_socket(app):
|
||||
assert ip == request.ip
|
||||
assert port == request.port
|
||||
|
||||
delattr(request, "_socket")
|
||||
|
||||
socket = request.socket
|
||||
assert isinstance(socket, tuple)
|
||||
assert hasattr(request, "_socket")
|
||||
|
||||
|
||||
def test_request_server_name(app):
|
||||
@app.get("/")
|
||||
@@ -1857,7 +1866,7 @@ def test_request_server_name_in_host_header(app):
|
||||
request, response = app.test_client.get(
|
||||
"/", headers={"Host": "mal_formed"}
|
||||
)
|
||||
assert request.server_name == None # For now (later maybe 127.0.0.1)
|
||||
assert request.server_name == ""
|
||||
|
||||
|
||||
def test_request_server_name_forwarded(app):
|
||||
@@ -1882,8 +1891,9 @@ def test_request_server_port(app):
|
||||
def handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.get("/", headers={"Host": "my-server"})
|
||||
assert request.server_port == app.test_client.port
|
||||
test_client = SanicTestClient(app)
|
||||
request, response = test_client.get("/", headers={"Host": "my-server"})
|
||||
assert request.server_port == 80
|
||||
|
||||
|
||||
def test_request_server_port_in_host_header(app):
|
||||
@@ -1904,7 +1914,10 @@ def test_request_server_port_in_host_header(app):
|
||||
request, response = app.test_client.get(
|
||||
"/", headers={"Host": "mal_formed:5555"}
|
||||
)
|
||||
assert request.server_port == app.test_client.port
|
||||
if PORT is None:
|
||||
assert request.server_port != 5555
|
||||
else:
|
||||
assert request.server_port == app.test_client.port
|
||||
|
||||
|
||||
def test_request_server_port_forwarded(app):
|
||||
@@ -1939,12 +1952,12 @@ def test_server_name_and_url_for(app):
|
||||
def handler(request):
|
||||
return text("ok")
|
||||
|
||||
app.config.SERVER_NAME = "my-server"
|
||||
app.config.SERVER_NAME = "my-server" # This means default port
|
||||
assert app.url_for("handler", _external=True) == "http://my-server/foo"
|
||||
request, response = app.test_client.get("/foo")
|
||||
assert (
|
||||
request.url_for("handler")
|
||||
== f"http://my-server:{app.test_client.port}/foo"
|
||||
== f"http://my-server/foo"
|
||||
)
|
||||
|
||||
app.config.SERVER_NAME = "https://my-server/path"
|
||||
@@ -2005,7 +2018,7 @@ async def test_request_form_invalid_content_type_asgi(app):
|
||||
|
||||
|
||||
def test_endpoint_basic():
|
||||
app = Sanic()
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.route("/")
|
||||
def my_unique_handler(request):
|
||||
@@ -2018,7 +2031,7 @@ def test_endpoint_basic():
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_endpoint_basic_asgi():
|
||||
app = Sanic()
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.route("/")
|
||||
def my_unique_handler(request):
|
||||
@@ -2097,5 +2110,5 @@ def test_url_for_without_server_name(app):
|
||||
request, response = app.test_client.get("/sample")
|
||||
assert (
|
||||
response.json["url"]
|
||||
== f"http://127.0.0.1:{app.test_client.port}/url-for"
|
||||
== f"http://127.0.0.1:{request.server_port}/url-for"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from collections import namedtuple
|
||||
from mimetypes import guess_type
|
||||
@@ -15,13 +16,14 @@ from aiofiles import os as async_os
|
||||
from sanic.response import (
|
||||
HTTPResponse,
|
||||
StreamingHTTPResponse,
|
||||
empty,
|
||||
file,
|
||||
file_stream,
|
||||
json,
|
||||
raw,
|
||||
stream,
|
||||
text,
|
||||
)
|
||||
from sanic.response import empty
|
||||
from sanic.server import HttpProtocol
|
||||
from sanic.testing import HOST, PORT
|
||||
|
||||
@@ -29,13 +31,14 @@ from sanic.testing import HOST, PORT
|
||||
JSON_DATA = {"ok": True}
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings("ignore:Types other than str will be")
|
||||
def test_response_body_not_a_string(app):
|
||||
"""Test when a response body sent from the application is not a string"""
|
||||
random_num = choice(range(1000))
|
||||
|
||||
@app.route("/hello")
|
||||
async def hello_route(request):
|
||||
return HTTPResponse(body=random_num)
|
||||
return text(random_num)
|
||||
|
||||
request, response = app.test_client.get("/hello")
|
||||
assert response.text == str(random_num)
|
||||
@@ -240,7 +243,7 @@ def test_non_chunked_streaming_adds_correct_headers(non_chunked_streaming_app):
|
||||
|
||||
|
||||
def test_non_chunked_streaming_returns_correct_content(
|
||||
non_chunked_streaming_app
|
||||
non_chunked_streaming_app,
|
||||
):
|
||||
request, response = non_chunked_streaming_app.test_client.get("/")
|
||||
assert response.text == "foo,bar"
|
||||
@@ -255,7 +258,7 @@ def test_stream_response_status_returns_correct_headers(status):
|
||||
|
||||
@pytest.mark.parametrize("keep_alive_timeout", [10, 20, 30])
|
||||
def test_stream_response_keep_alive_returns_correct_headers(
|
||||
keep_alive_timeout
|
||||
keep_alive_timeout,
|
||||
):
|
||||
response = StreamingHTTPResponse(sample_streaming_fn)
|
||||
headers = response.get_headers(
|
||||
@@ -284,7 +287,7 @@ def test_stream_response_does_not_include_chunked_header_if_disabled():
|
||||
|
||||
|
||||
def test_stream_response_writes_correct_content_to_transport_when_chunked(
|
||||
streaming_app
|
||||
streaming_app,
|
||||
):
|
||||
response = StreamingHTTPResponse(sample_streaming_fn)
|
||||
response.protocol = MagicMock(HttpProtocol)
|
||||
@@ -406,7 +409,7 @@ def test_file_response(app, file_name, static_file_directory, status):
|
||||
mime_type=guess_type(file_path)[0] or "text/plain",
|
||||
)
|
||||
|
||||
request, response = app.test_client.get("/files/{}".format(file_name))
|
||||
request, response = app.test_client.get(f"/files/{file_name}")
|
||||
assert response.status == status
|
||||
assert response.body == get_file_content(static_file_directory, file_name)
|
||||
assert "Content-Disposition" not in response.headers
|
||||
@@ -429,12 +432,13 @@ def test_file_response_custom_filename(
|
||||
file_path = os.path.abspath(unquote(file_path))
|
||||
return file(file_path, filename=dest)
|
||||
|
||||
request, response = app.test_client.get("/files/{}".format(source))
|
||||
request, response = app.test_client.get(f"/files/{source}")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(static_file_directory, source)
|
||||
assert response.headers[
|
||||
"Content-Disposition"
|
||||
] == 'attachment; filename="{}"'.format(dest)
|
||||
assert (
|
||||
response.headers["Content-Disposition"]
|
||||
== f'attachment; filename="{dest}"'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file", "decode me.txt"])
|
||||
@@ -459,7 +463,7 @@ def test_file_head_response(app, file_name, static_file_directory):
|
||||
mime_type=guess_type(file_path)[0] or "text/plain",
|
||||
)
|
||||
|
||||
request, response = app.test_client.head("/files/{}".format(file_name))
|
||||
request, response = app.test_client.head(f"/files/{file_name}")
|
||||
assert response.status == 200
|
||||
assert "Accept-Ranges" in response.headers
|
||||
assert "Content-Length" in response.headers
|
||||
@@ -482,7 +486,7 @@ def test_file_stream_response(app, file_name, static_file_directory):
|
||||
mime_type=guess_type(file_path)[0] or "text/plain",
|
||||
)
|
||||
|
||||
request, response = app.test_client.get("/files/{}".format(file_name))
|
||||
request, response = app.test_client.get(f"/files/{file_name}")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(static_file_directory, file_name)
|
||||
assert "Content-Disposition" not in response.headers
|
||||
@@ -505,12 +509,13 @@ def test_file_stream_response_custom_filename(
|
||||
file_path = os.path.abspath(unquote(file_path))
|
||||
return file_stream(file_path, chunk_size=32, filename=dest)
|
||||
|
||||
request, response = app.test_client.get("/files/{}".format(source))
|
||||
request, response = app.test_client.get(f"/files/{source}")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(static_file_directory, source)
|
||||
assert response.headers[
|
||||
"Content-Disposition"
|
||||
] == 'attachment; filename="{}"'.format(dest)
|
||||
assert (
|
||||
response.headers["Content-Disposition"]
|
||||
== f'attachment; filename="{dest}"'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file", "decode me.txt"])
|
||||
@@ -538,7 +543,7 @@ def test_file_stream_head_response(app, file_name, static_file_directory):
|
||||
mime_type=guess_type(file_path)[0] or "text/plain",
|
||||
)
|
||||
|
||||
request, response = app.test_client.head("/files/{}".format(file_name))
|
||||
request, response = app.test_client.head(f"/files/{file_name}")
|
||||
assert response.status == 200
|
||||
# A HEAD request should never be streamed/chunked.
|
||||
if "Transfer-Encoding" in response.headers:
|
||||
@@ -576,11 +581,12 @@ def test_file_stream_response_range(
|
||||
_range=range,
|
||||
)
|
||||
|
||||
request, response = app.test_client.get("/files/{}".format(file_name))
|
||||
request, response = app.test_client.get(f"/files/{file_name}")
|
||||
assert response.status == 206
|
||||
assert "Content-Range" in response.headers
|
||||
assert response.headers["Content-Range"] == "bytes {}-{}/{}".format(
|
||||
range.start, range.end, range.total
|
||||
assert (
|
||||
response.headers["Content-Range"]
|
||||
== f"bytes {range.start}-{range.end}/{range.total}"
|
||||
)
|
||||
|
||||
|
||||
@@ -602,3 +608,17 @@ def test_empty_response(app):
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.content_type is None
|
||||
assert response.body == b""
|
||||
|
||||
|
||||
def test_response_body_bytes_deprecated(app):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
|
||||
HTTPResponse(body_bytes=b"bytes")
|
||||
|
||||
assert len(w) == 1
|
||||
assert issubclass(w[0].category, DeprecationWarning)
|
||||
assert (
|
||||
"Parameter `body_bytes` is deprecated, use `body` instead"
|
||||
in str(w[0].message)
|
||||
)
|
||||
|
||||
@@ -40,7 +40,7 @@ async def handler_2(request):
|
||||
def test_default_server_error_response_timeout():
|
||||
request, response = response_timeout_default_app.test_client.get("/1")
|
||||
assert response.status == 503
|
||||
assert response.text == "Error: Response Timeout"
|
||||
assert "Response Timeout" in response.text
|
||||
|
||||
|
||||
response_handler_cancelled_app.flag = False
|
||||
@@ -65,5 +65,5 @@ async def handler_3(request):
|
||||
def test_response_handler_cancelled():
|
||||
request, response = response_handler_cancelled_app.test_client.get("/1")
|
||||
assert response.status == 503
|
||||
assert response.text == "Error: Response Timeout"
|
||||
assert "Response Timeout" in response.text
|
||||
assert response_handler_cancelled_app.flag is False
|
||||
|
||||
@@ -6,6 +6,7 @@ from sanic import Sanic
|
||||
from sanic.constants import HTTP_METHODS
|
||||
from sanic.response import json, text
|
||||
from sanic.router import ParameterNameConflicts, RouteDoesNotExist, RouteExists
|
||||
from sanic.testing import SanicTestClient
|
||||
|
||||
|
||||
# ------------------------------------------------------------ #
|
||||
@@ -20,17 +21,17 @@ def test_versioned_routes_get(app, method):
|
||||
func = getattr(app, method)
|
||||
if callable(func):
|
||||
|
||||
@func("/{}".format(method), version=1)
|
||||
@func(f"/{method}", version=1)
|
||||
def handler(request):
|
||||
return text("OK")
|
||||
|
||||
else:
|
||||
print(func)
|
||||
raise Exception("Method: {} is not callable".format(method))
|
||||
raise Exception(f"Method: {method} is not callable")
|
||||
|
||||
client_method = getattr(app.test_client, method)
|
||||
|
||||
request, response = client_method("/v1/{}".format(method))
|
||||
request, response = client_method(f"/v1/{method}")
|
||||
assert response.status == 200
|
||||
|
||||
|
||||
@@ -167,35 +168,36 @@ def test_route_optional_slash(app):
|
||||
def test_route_strict_slashes_set_to_false_and_host_is_a_list(app):
|
||||
# Part of regression test for issue #1120
|
||||
|
||||
site1 = "127.0.0.1:{}".format(app.test_client.port)
|
||||
test_client = SanicTestClient(app, port=42101)
|
||||
site1 = f"127.0.0.1:{test_client.port}"
|
||||
|
||||
# before fix, this raises a RouteExists error
|
||||
@app.get("/get", host=[site1, "site2.com"], strict_slashes=False)
|
||||
def get_handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.get("http://" + site1 + "/get")
|
||||
request, response = test_client.get("http://" + site1 + "/get")
|
||||
assert response.text == "OK"
|
||||
|
||||
@app.post("/post", host=[site1, "site2.com"], strict_slashes=False)
|
||||
def post_handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.post("http://" + site1 + "/post")
|
||||
request, response = test_client.post("http://" + site1 + "/post")
|
||||
assert response.text == "OK"
|
||||
|
||||
@app.put("/put", host=[site1, "site2.com"], strict_slashes=False)
|
||||
def put_handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.put("http://" + site1 + "/put")
|
||||
request, response = test_client.put("http://" + site1 + "/put")
|
||||
assert response.text == "OK"
|
||||
|
||||
@app.delete("/delete", host=[site1, "site2.com"], strict_slashes=False)
|
||||
def delete_handler(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.delete("http://" + site1 + "/delete")
|
||||
request, response = test_client.delete("http://" + site1 + "/delete")
|
||||
assert response.text == "OK"
|
||||
|
||||
|
||||
@@ -412,7 +414,8 @@ def test_dynamic_route_uuid(app):
|
||||
assert response.text == "OK"
|
||||
assert type(results[0]) is uuid.UUID
|
||||
|
||||
request, response = app.test_client.get("/quirky/{}".format(uuid.uuid4()))
|
||||
generated_uuid = uuid.uuid4()
|
||||
request, response = app.test_client.get(f"/quirky/{generated_uuid}")
|
||||
assert response.status == 200
|
||||
|
||||
request, response = app.test_client.get("/quirky/non-existing")
|
||||
@@ -528,6 +531,19 @@ def test_add_webscoket_route(app, strict_slashes):
|
||||
assert ev.is_set()
|
||||
|
||||
|
||||
def test_add_webscoket_route_with_version(app):
|
||||
ev = asyncio.Event()
|
||||
|
||||
async def handler(request, ws):
|
||||
assert ws.subprotocol is None
|
||||
ev.set()
|
||||
|
||||
app.add_websocket_route(handler, "/ws", version=1)
|
||||
request, response = app.test_client.websocket("/v1/ws")
|
||||
assert response.opened is True
|
||||
assert ev.is_set()
|
||||
|
||||
|
||||
def test_route_duplicate(app):
|
||||
|
||||
with pytest.raises(RouteExists):
|
||||
@@ -551,6 +567,35 @@ def test_route_duplicate(app):
|
||||
pass
|
||||
|
||||
|
||||
def test_double_stack_route(app):
|
||||
@app.route("/test/1")
|
||||
@app.route("/test/2")
|
||||
async def handler1(request):
|
||||
return text("OK")
|
||||
|
||||
request, response = app.test_client.get("/test/1")
|
||||
assert response.status == 200
|
||||
request, response = app.test_client.get("/test/2")
|
||||
assert response.status == 200
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_route_asgi(app):
|
||||
ev = asyncio.Event()
|
||||
|
||||
@app.websocket("/test/1")
|
||||
@app.websocket("/test/2")
|
||||
async def handler(request, ws):
|
||||
ev.set()
|
||||
|
||||
request, response = await app.asgi_client.websocket("/test/1")
|
||||
first_set = ev.is_set()
|
||||
ev.clear()
|
||||
request, response = await app.asgi_client.websocket("/test/1")
|
||||
second_set = ev.is_set()
|
||||
assert first_set and second_set
|
||||
|
||||
|
||||
def test_method_not_allowed(app):
|
||||
@app.route("/test", methods=["GET"])
|
||||
async def handler(request):
|
||||
@@ -738,55 +783,6 @@ def test_add_route_method_not_allowed(app):
|
||||
assert response.status == 405
|
||||
|
||||
|
||||
def test_remove_static_route(app):
|
||||
async def handler1(request):
|
||||
return text("OK1")
|
||||
|
||||
async def handler2(request):
|
||||
return text("OK2")
|
||||
|
||||
app.add_route(handler1, "/test")
|
||||
app.add_route(handler2, "/test2")
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 200
|
||||
|
||||
request, response = app.test_client.get("/test2")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/test")
|
||||
app.remove_route("/test2")
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 404
|
||||
|
||||
request, response = app.test_client.get("/test2")
|
||||
assert response.status == 404
|
||||
|
||||
|
||||
def test_remove_dynamic_route(app):
|
||||
async def handler(request, name):
|
||||
return text("OK")
|
||||
|
||||
app.add_route(handler, "/folder/<name>")
|
||||
|
||||
request, response = app.test_client.get("/folder/test123")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/folder/<name>")
|
||||
request, response = app.test_client.get("/folder/test123")
|
||||
assert response.status == 404
|
||||
|
||||
|
||||
def test_remove_inexistent_route(app):
|
||||
|
||||
uri = "/test"
|
||||
with pytest.raises(RouteDoesNotExist) as excinfo:
|
||||
app.remove_route(uri)
|
||||
|
||||
assert str(excinfo.value) == "Route was not registered: {}".format(uri)
|
||||
|
||||
|
||||
def test_removing_slash(app):
|
||||
@app.get("/rest/<resource>")
|
||||
def get(_):
|
||||
@@ -799,59 +795,6 @@ def test_removing_slash(app):
|
||||
assert len(app.router.routes_all.keys()) == 2
|
||||
|
||||
|
||||
def test_remove_unhashable_route(app):
|
||||
async def handler(request, unhashable):
|
||||
return text("OK")
|
||||
|
||||
app.add_route(handler, "/folder/<unhashable:[A-Za-z0-9/]+>/end/")
|
||||
|
||||
request, response = app.test_client.get("/folder/test/asdf/end/")
|
||||
assert response.status == 200
|
||||
|
||||
request, response = app.test_client.get("/folder/test///////end/")
|
||||
assert response.status == 200
|
||||
|
||||
request, response = app.test_client.get("/folder/test/end/")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/folder/<unhashable:[A-Za-z0-9/]+>/end/")
|
||||
|
||||
request, response = app.test_client.get("/folder/test/asdf/end/")
|
||||
assert response.status == 404
|
||||
|
||||
request, response = app.test_client.get("/folder/test///////end/")
|
||||
assert response.status == 404
|
||||
|
||||
request, response = app.test_client.get("/folder/test/end/")
|
||||
assert response.status == 404
|
||||
|
||||
|
||||
def test_remove_route_without_clean_cache(app):
|
||||
async def handler(request):
|
||||
return text("OK")
|
||||
|
||||
app.add_route(handler, "/test")
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/test", clean_cache=True)
|
||||
app.remove_route("/test/", clean_cache=True)
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 404
|
||||
|
||||
app.add_route(handler, "/test")
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/test", clean_cache=False)
|
||||
|
||||
request, response = app.test_client.get("/test")
|
||||
assert response.status == 200
|
||||
|
||||
|
||||
def test_overload_routes(app):
|
||||
@app.route("/overload", methods=["GET"])
|
||||
async def handler1(request):
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import asyncio
|
||||
import signal
|
||||
|
||||
from contextlib import closing
|
||||
from socket import socket
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic.testing import HOST, PORT
|
||||
@@ -22,7 +25,7 @@ skipif_no_alarm = pytest.mark.skipif(
|
||||
|
||||
def create_listener(listener_name, in_list):
|
||||
async def _listener(app, loop):
|
||||
print("DEBUG MESSAGE FOR PYTEST for {}".format(listener_name))
|
||||
print(f"DEBUG MESSAGE FOR PYTEST for {listener_name}")
|
||||
in_list.insert(0, app.name + listener_name)
|
||||
|
||||
return _listener
|
||||
@@ -118,25 +121,30 @@ def test_create_server_trigger_events(app):
|
||||
app.listener("after_server_stop")(after_stop)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
serv_coro = app.create_server(return_asyncio_server=True)
|
||||
serv_task = asyncio.ensure_future(serv_coro, loop=loop)
|
||||
server = loop.run_until_complete(serv_task)
|
||||
server.after_start()
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt as e:
|
||||
loop.stop()
|
||||
finally:
|
||||
# Run the on_stop function if provided
|
||||
server.before_stop()
|
||||
|
||||
# Wait for server to close
|
||||
close_task = server.close()
|
||||
loop.run_until_complete(close_task)
|
||||
# Use random port for tests
|
||||
with closing(socket()) as sock:
|
||||
sock.bind(("127.0.0.1", 0))
|
||||
|
||||
# Complete all tasks on the loop
|
||||
signal.stopped = True
|
||||
for connection in server.connections:
|
||||
connection.close_if_idle()
|
||||
server.after_stop()
|
||||
assert flag1 and flag2 and flag3
|
||||
serv_coro = app.create_server(return_asyncio_server=True, sock=sock)
|
||||
serv_task = asyncio.ensure_future(serv_coro, loop=loop)
|
||||
server = loop.run_until_complete(serv_task)
|
||||
server.after_start()
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt as e:
|
||||
loop.stop()
|
||||
finally:
|
||||
# Run the on_stop function if provided
|
||||
server.before_stop()
|
||||
|
||||
# Wait for server to close
|
||||
close_task = server.close()
|
||||
loop.run_until_complete(close_task)
|
||||
|
||||
# Complete all tasks on the loop
|
||||
signal.stopped = True
|
||||
for connection in server.connections:
|
||||
connection.close_if_idle()
|
||||
server.after_stop()
|
||||
assert flag1 and flag2 and flag3
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import asyncio
|
||||
import os
|
||||
import signal
|
||||
|
||||
from queue import Queue
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic.compat import ctrlc_workaround_for_windows
|
||||
from sanic.response import HTTPResponse
|
||||
from sanic.testing import HOST, PORT
|
||||
|
||||
@@ -16,13 +21,19 @@ calledq = Queue()
|
||||
|
||||
|
||||
def set_loop(app, loop):
|
||||
loop.add_signal_handler = MagicMock()
|
||||
global mock
|
||||
mock = MagicMock()
|
||||
if os.name == "nt":
|
||||
signal.signal = mock
|
||||
else:
|
||||
loop.add_signal_handler = mock
|
||||
|
||||
|
||||
def after(app, loop):
|
||||
calledq.put(loop.add_signal_handler.called)
|
||||
calledq.put(mock.called)
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="May hang CI on py38/windows")
|
||||
def test_register_system_signals(app):
|
||||
"""Test if sanic register system signals"""
|
||||
|
||||
@@ -38,6 +49,7 @@ def test_register_system_signals(app):
|
||||
assert calledq.get() is True
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="May hang CI on py38/windows")
|
||||
def test_dont_register_system_signals(app):
|
||||
"""Test if sanic don't register system signals"""
|
||||
|
||||
@@ -51,3 +63,47 @@ def test_dont_register_system_signals(app):
|
||||
|
||||
app.run(HOST, PORT, register_sys_signals=False)
|
||||
assert calledq.get() is False
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name == "nt", reason="windows cannot SIGINT processes")
|
||||
def test_windows_workaround():
|
||||
"""Test Windows workaround (on any other OS)"""
|
||||
# At least some code coverage, even though this test doesn't work on
|
||||
# Windows...
|
||||
class MockApp:
|
||||
def __init__(self):
|
||||
self.is_stopping = False
|
||||
|
||||
def stop(self):
|
||||
assert not self.is_stopping
|
||||
self.is_stopping = True
|
||||
|
||||
def add_task(self, func):
|
||||
loop = asyncio.get_event_loop()
|
||||
self.stay_active_task = loop.create_task(func(self))
|
||||
|
||||
async def atest(stop_first):
|
||||
app = MockApp()
|
||||
ctrlc_workaround_for_windows(app)
|
||||
await asyncio.sleep(0.05)
|
||||
if stop_first:
|
||||
app.stop()
|
||||
await asyncio.sleep(0.2)
|
||||
assert app.is_stopping == stop_first
|
||||
# First Ctrl+C: should call app.stop() within 0.1 seconds
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
await asyncio.sleep(0.2)
|
||||
assert app.is_stopping
|
||||
assert app.stay_active_task.result() == None
|
||||
# Second Ctrl+C should raise
|
||||
with pytest.raises(KeyboardInterrupt):
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
return "OK"
|
||||
|
||||
# Run in our private loop
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
res = loop.run_until_complete(atest(False))
|
||||
assert res == "OK"
|
||||
res = loop.run_until_complete(atest(True))
|
||||
assert res == "OK"
|
||||
|
||||
@@ -97,9 +97,7 @@ def test_static_file_content_type(app, static_file_directory, file_name):
|
||||
def test_static_directory(app, file_name, base_uri, static_file_directory):
|
||||
app.static(base_uri, static_file_directory)
|
||||
|
||||
request, response = app.test_client.get(
|
||||
uri="{}/{}".format(base_uri, file_name)
|
||||
)
|
||||
request, response = app.test_client.get(uri=f"{base_uri}/{file_name}")
|
||||
assert response.status == 200
|
||||
assert response.body == get_file_content(static_file_directory, file_name)
|
||||
|
||||
@@ -234,11 +232,11 @@ def test_static_content_range_invalid_unit(
|
||||
)
|
||||
|
||||
unit = "bit"
|
||||
headers = {"Range": "{}=1-0".format(unit)}
|
||||
headers = {"Range": f"{unit}=1-0"}
|
||||
request, response = app.test_client.get("/testing.file", headers=headers)
|
||||
|
||||
assert response.status == 416
|
||||
assert response.text == "Error: {} is not a valid Range Type".format(unit)
|
||||
assert f"{unit} is not a valid Range Type" in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file", "decode me.txt"])
|
||||
@@ -252,13 +250,11 @@ def test_static_content_range_invalid_start(
|
||||
)
|
||||
|
||||
start = "start"
|
||||
headers = {"Range": "bytes={}-0".format(start)}
|
||||
headers = {"Range": f"bytes={start}-0"}
|
||||
request, response = app.test_client.get("/testing.file", headers=headers)
|
||||
|
||||
assert response.status == 416
|
||||
assert response.text == "Error: '{}' is invalid for Content Range".format(
|
||||
start
|
||||
)
|
||||
assert f"'{start}' is invalid for Content Range" in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file", "decode me.txt"])
|
||||
@@ -272,13 +268,11 @@ def test_static_content_range_invalid_end(
|
||||
)
|
||||
|
||||
end = "end"
|
||||
headers = {"Range": "bytes=1-{}".format(end)}
|
||||
headers = {"Range": f"bytes=1-{end}"}
|
||||
request, response = app.test_client.get("/testing.file", headers=headers)
|
||||
|
||||
assert response.status == 416
|
||||
assert response.text == "Error: '{}' is invalid for Content Range".format(
|
||||
end
|
||||
)
|
||||
assert f"'{end}' is invalid for Content Range" in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file", "decode me.txt"])
|
||||
@@ -295,7 +289,7 @@ def test_static_content_range_invalid_parameters(
|
||||
request, response = app.test_client.get("/testing.file", headers=headers)
|
||||
|
||||
assert response.status == 416
|
||||
assert response.text == "Error: Invalid for Content Range parameters"
|
||||
assert "Invalid for Content Range parameters" in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,7 +363,7 @@ def test_file_not_found(app, static_file_directory):
|
||||
request, response = app.test_client.get("/static/not_found")
|
||||
|
||||
assert response.status == 404
|
||||
assert response.text == "Error: File not found"
|
||||
assert "File not found" in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("static_name", ["_static_name", "static"])
|
||||
@@ -377,20 +371,6 @@ def test_file_not_found(app, static_file_directory):
|
||||
def test_static_name(app, static_file_directory, static_name, file_name):
|
||||
app.static("/static", static_file_directory, name=static_name)
|
||||
|
||||
request, response = app.test_client.get("/static/{}".format(file_name))
|
||||
request, response = app.test_client.get(f"/static/{file_name}")
|
||||
|
||||
assert response.status == 200
|
||||
|
||||
|
||||
@pytest.mark.parametrize("file_name", ["test.file"])
|
||||
def test_static_remove_route(app, static_file_directory, file_name):
|
||||
app.static(
|
||||
"/testing.file", get_file_path(static_file_directory, file_name)
|
||||
)
|
||||
|
||||
request, response = app.test_client.get("/testing.file")
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/testing.file")
|
||||
request, response = app.test_client.get("/testing.file")
|
||||
assert response.status == 404
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import socket
|
||||
|
||||
from sanic.response import json, text
|
||||
from sanic.testing import PORT, SanicTestClient
|
||||
|
||||
@@ -29,7 +27,8 @@ def test_test_client_port_default(app):
|
||||
return json(request.transport.get_extra_info("sockname")[1])
|
||||
|
||||
test_client = SanicTestClient(app)
|
||||
assert test_client.port == PORT
|
||||
assert test_client.port == PORT # Can be None before request
|
||||
|
||||
request, response = test_client.get("/get")
|
||||
assert response.json == PORT
|
||||
assert test_client.port > 0
|
||||
assert response.json == test_client.port
|
||||
|
||||
235
tests/test_unix_socket.py
Normal file
235
tests/test_unix_socket.py
Normal file
@@ -0,0 +1,235 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
pytestmark = pytest.mark.skipif(os.name != "posix", reason="UNIX only")
|
||||
SOCKPATH = "/tmp/sanictest.sock"
|
||||
SOCKPATH2 = "/tmp/sanictest2.sock"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def socket_cleanup():
|
||||
try:
|
||||
os.unlink(SOCKPATH)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.unlink(SOCKPATH2)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
# Run test function
|
||||
yield
|
||||
try:
|
||||
os.unlink(SOCKPATH2)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.unlink(SOCKPATH)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def test_unix_socket_creation(caplog):
|
||||
from socket import AF_UNIX, socket
|
||||
|
||||
with socket(AF_UNIX) as sock:
|
||||
sock.bind(SOCKPATH)
|
||||
assert os.path.exists(SOCKPATH)
|
||||
ino = os.stat(SOCKPATH).st_ino
|
||||
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
def running(app, loop):
|
||||
assert os.path.exists(SOCKPATH)
|
||||
assert ino != os.stat(SOCKPATH).st_ino
|
||||
app.stop()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
app.run(unix=SOCKPATH)
|
||||
|
||||
assert (
|
||||
"sanic.root",
|
||||
logging.INFO,
|
||||
f"Goin' Fast @ {SOCKPATH} http://...",
|
||||
) in caplog.record_tuples
|
||||
assert not os.path.exists(SOCKPATH)
|
||||
|
||||
|
||||
def test_invalid_paths():
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
with pytest.raises(FileExistsError):
|
||||
app.run(unix=".")
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
app.run(unix="no-such-directory/sanictest.sock")
|
||||
|
||||
|
||||
def test_dont_replace_file():
|
||||
with open(SOCKPATH, "w") as f:
|
||||
f.write("File, not socket")
|
||||
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
def stop(app, loop):
|
||||
app.stop()
|
||||
|
||||
with pytest.raises(FileExistsError):
|
||||
app.run(unix=SOCKPATH)
|
||||
|
||||
|
||||
def test_dont_follow_symlink():
|
||||
from socket import AF_UNIX, socket
|
||||
|
||||
with socket(AF_UNIX) as sock:
|
||||
sock.bind(SOCKPATH2)
|
||||
os.symlink(SOCKPATH2, SOCKPATH)
|
||||
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
def stop(app, loop):
|
||||
app.stop()
|
||||
|
||||
with pytest.raises(FileExistsError):
|
||||
app.run(unix=SOCKPATH)
|
||||
|
||||
|
||||
def test_socket_deleted_while_running():
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
async def hack(app, loop):
|
||||
os.unlink(SOCKPATH)
|
||||
app.stop()
|
||||
|
||||
app.run(host="myhost.invalid", unix=SOCKPATH)
|
||||
|
||||
|
||||
def test_socket_replaced_with_file():
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.listener("after_server_start")
|
||||
async def hack(app, loop):
|
||||
os.unlink(SOCKPATH)
|
||||
with open(SOCKPATH, "w") as f:
|
||||
f.write("Not a socket")
|
||||
app.stop()
|
||||
|
||||
app.run(host="myhost.invalid", unix=SOCKPATH)
|
||||
|
||||
|
||||
def test_unix_connection():
|
||||
app = Sanic(name=__name__)
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
return text(f"{request.conn_info.server}")
|
||||
|
||||
@app.listener("after_server_start")
|
||||
async def client(app, loop):
|
||||
try:
|
||||
async with httpx.AsyncClient(uds=SOCKPATH) as client:
|
||||
r = await client.get("http://myhost.invalid/")
|
||||
assert r.status_code == 200
|
||||
assert r.text == os.path.abspath(SOCKPATH)
|
||||
finally:
|
||||
app.stop()
|
||||
|
||||
app.run(host="myhost.invalid", unix=SOCKPATH)
|
||||
|
||||
|
||||
app_multi = Sanic(name=__name__)
|
||||
|
||||
|
||||
def handler(request):
|
||||
return text(f"{request.conn_info.server}")
|
||||
|
||||
|
||||
async def client(app, loop):
|
||||
try:
|
||||
async with httpx.AsyncClient(uds=SOCKPATH) as client:
|
||||
r = await client.get("http://myhost.invalid/")
|
||||
assert r.status_code == 200
|
||||
assert r.text == os.path.abspath(SOCKPATH)
|
||||
finally:
|
||||
app.stop()
|
||||
|
||||
|
||||
def test_unix_connection_multiple_workers():
|
||||
app_multi.get("/")(handler)
|
||||
app_multi.listener("after_server_start")(client)
|
||||
app_multi.run(host="myhost.invalid", unix=SOCKPATH, workers=2)
|
||||
|
||||
|
||||
async def test_zero_downtime():
|
||||
"""Graceful server termination and socket replacement on restarts"""
|
||||
from signal import SIGINT
|
||||
from time import monotonic as current_time
|
||||
|
||||
async def client():
|
||||
for _ in range(40):
|
||||
async with httpx.AsyncClient(uds=SOCKPATH) as client:
|
||||
r = await client.get("http://localhost/sleep/0.1")
|
||||
assert r.status_code == 200
|
||||
assert r.text == f"Slept 0.1 seconds.\n"
|
||||
|
||||
def spawn():
|
||||
command = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"sanic",
|
||||
"--unix",
|
||||
SOCKPATH,
|
||||
"examples.delayed_response.app",
|
||||
]
|
||||
DN = subprocess.DEVNULL
|
||||
return subprocess.Popen(
|
||||
command, stdin=DN, stdout=DN, stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
try:
|
||||
processes = [spawn()]
|
||||
while not os.path.exists(SOCKPATH):
|
||||
if processes[0].poll() is not None:
|
||||
raise Exception("Worker did not start properly")
|
||||
await asyncio.sleep(0.0001)
|
||||
ino = os.stat(SOCKPATH).st_ino
|
||||
task = asyncio.get_event_loop().create_task(client())
|
||||
start_time = current_time()
|
||||
while current_time() < start_time + 4:
|
||||
# Start a new one and wait until the socket is replaced
|
||||
processes.append(spawn())
|
||||
while ino == os.stat(SOCKPATH).st_ino:
|
||||
await asyncio.sleep(0.001)
|
||||
ino = os.stat(SOCKPATH).st_ino
|
||||
# Graceful termination of the previous one
|
||||
processes[-2].send_signal(SIGINT)
|
||||
# Wait until client has completed all requests
|
||||
await task
|
||||
processes[-1].send_signal(SIGINT)
|
||||
for worker in processes:
|
||||
try:
|
||||
worker.wait(1.0)
|
||||
except subprocess.TimeoutExpired:
|
||||
raise Exception(
|
||||
f"Worker would not terminate:\n{worker.stderr}"
|
||||
)
|
||||
finally:
|
||||
for worker in processes:
|
||||
worker.kill()
|
||||
# Test for clean run and termination
|
||||
assert len(processes) > 5
|
||||
assert [worker.poll() for worker in processes] == len(processes) * [0]
|
||||
assert not os.path.exists(SOCKPATH)
|
||||
@@ -20,30 +20,24 @@ URL_FOR_ARGS3 = dict(
|
||||
arg1="v1",
|
||||
_anchor="anchor",
|
||||
_scheme="http",
|
||||
_server="{}:{}".format(test_host, test_port),
|
||||
_server=f"{test_host}:{test_port}",
|
||||
_external=True,
|
||||
)
|
||||
URL_FOR_VALUE3 = "http://{}:{}/myurl?arg1=v1#anchor".format(
|
||||
test_host, test_port
|
||||
)
|
||||
URL_FOR_VALUE3 = f"http://{test_host}:{test_port}/myurl?arg1=v1#anchor"
|
||||
URL_FOR_ARGS4 = dict(
|
||||
arg1="v1",
|
||||
_anchor="anchor",
|
||||
_external=True,
|
||||
_server="http://{}:{}".format(test_host, test_port),
|
||||
)
|
||||
URL_FOR_VALUE4 = "http://{}:{}/myurl?arg1=v1#anchor".format(
|
||||
test_host, test_port
|
||||
_server=f"http://{test_host}:{test_port}",
|
||||
)
|
||||
URL_FOR_VALUE4 = f"http://{test_host}:{test_port}/myurl?arg1=v1#anchor"
|
||||
|
||||
|
||||
def _generate_handlers_from_names(app, l):
|
||||
for name in l:
|
||||
# this is the easiest way to generate functions with dynamic names
|
||||
exec(
|
||||
'@app.route(name)\ndef {}(request):\n\treturn text("{}")'.format(
|
||||
name, name
|
||||
)
|
||||
f'@app.route(name)\ndef {name}(request):\n\treturn text("{name}")'
|
||||
)
|
||||
|
||||
|
||||
@@ -60,7 +54,7 @@ def test_simple_url_for_getting(simple_app):
|
||||
for letter in string.ascii_letters:
|
||||
url = simple_app.url_for(letter)
|
||||
|
||||
assert url == "/{}".format(letter)
|
||||
assert url == f"/{letter}"
|
||||
request, response = simple_app.test_client.get(url)
|
||||
assert response.status == 200
|
||||
assert response.text == letter
|
||||
@@ -88,7 +82,7 @@ def test_simple_url_for_getting_with_more_params(app, args, url):
|
||||
|
||||
def test_url_for_with_server_name(app):
|
||||
|
||||
server_name = "{}:{}".format(test_host, test_port)
|
||||
server_name = f"{test_host}:{test_port}"
|
||||
app.config.update({"SERVER_NAME": server_name})
|
||||
path = "/myurl"
|
||||
|
||||
@@ -96,7 +90,7 @@ def test_url_for_with_server_name(app):
|
||||
def passes(request):
|
||||
return text("this should pass")
|
||||
|
||||
url = "http://{}{}".format(server_name, path)
|
||||
url = f"http://{server_name}{path}"
|
||||
assert url == app.url_for("passes", _server=None, _external=True)
|
||||
request, response = app.test_client.get(url)
|
||||
assert response.status == 200
|
||||
@@ -118,7 +112,7 @@ def test_fails_url_build_if_param_not_passed(app):
|
||||
url = "/"
|
||||
|
||||
for letter in string.ascii_letters:
|
||||
url += "<{}>/".format(letter)
|
||||
url += f"<{letter}>/"
|
||||
|
||||
@app.route(url)
|
||||
def fail(request):
|
||||
@@ -182,7 +176,7 @@ def test_passes_with_negative_int_message(app):
|
||||
@app.route("path/<possibly_neg:int>/another-word")
|
||||
def good(request, possibly_neg):
|
||||
assert isinstance(possibly_neg, int)
|
||||
return text("this should pass with `{}`".format(possibly_neg))
|
||||
return text(f"this should pass with `{possibly_neg}`")
|
||||
|
||||
u_plus_3 = app.url_for("good", possibly_neg=3)
|
||||
assert u_plus_3 == "/path/3/another-word", u_plus_3
|
||||
@@ -237,13 +231,13 @@ def test_passes_with_negative_number_message(app, number):
|
||||
@app.route("path/<possibly_neg:number>/another-word")
|
||||
def good(request, possibly_neg):
|
||||
assert isinstance(possibly_neg, (int, float))
|
||||
return text("this should pass with `{}`".format(possibly_neg))
|
||||
return text(f"this should pass with `{possibly_neg}`")
|
||||
|
||||
u = app.url_for("good", possibly_neg=number)
|
||||
assert u == "/path/{}/another-word".format(number), u
|
||||
assert u == f"/path/{number}/another-word", u
|
||||
request, response = app.test_client.get(u)
|
||||
# For ``number``, it has been cast to a float - so a ``3`` becomes a ``3.0``
|
||||
assert response.text == "this should pass with `{}`".format(float(number))
|
||||
assert response.text == f"this should pass with `{float(number)}`"
|
||||
|
||||
|
||||
def test_adds_other_supplied_values_as_query_string(app):
|
||||
@@ -275,7 +269,7 @@ def blueprint_app(app):
|
||||
|
||||
@first_print.route("/foo/<param>")
|
||||
def foo_with_param(request, param):
|
||||
return text("foo from first : {}".format(param))
|
||||
return text(f"foo from first : {param}")
|
||||
|
||||
@second_print.route("/foo") # noqa
|
||||
def foo(request):
|
||||
@@ -283,7 +277,7 @@ def blueprint_app(app):
|
||||
|
||||
@second_print.route("/foo/<param>") # noqa
|
||||
def foo_with_param(request, param):
|
||||
return text("foo from second : {}".format(param))
|
||||
return text(f"foo from second : {param}")
|
||||
|
||||
app.blueprint(first_print)
|
||||
app.blueprint(second_print)
|
||||
|
||||
63
tests/test_url_for.py
Normal file
63
tests/test_url_for.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import asyncio
|
||||
|
||||
from sanic.blueprints import Blueprint
|
||||
|
||||
|
||||
def test_routes_with_host(app):
|
||||
@app.route("/")
|
||||
@app.route("/", name="hostindex", host="example.com")
|
||||
@app.route("/path", name="hostpath", host="path.example.com")
|
||||
def index(request):
|
||||
pass
|
||||
|
||||
assert app.url_for("index") == "/"
|
||||
assert app.url_for("hostindex") == "/"
|
||||
assert app.url_for("hostpath") == "/path"
|
||||
assert app.url_for("hostindex", _external=True) == "http://example.com/"
|
||||
assert (
|
||||
app.url_for("hostpath", _external=True)
|
||||
== "http://path.example.com/path"
|
||||
)
|
||||
|
||||
|
||||
def test_websocket_bp_route_name(app):
|
||||
"""Tests that blueprint websocket route is named."""
|
||||
event = asyncio.Event()
|
||||
bp = Blueprint("test_bp", url_prefix="/bp")
|
||||
|
||||
@bp.get("/main")
|
||||
async def main(request):
|
||||
...
|
||||
|
||||
@bp.websocket("/route")
|
||||
async def test_route(request, ws):
|
||||
event.set()
|
||||
|
||||
@bp.websocket("/route2")
|
||||
async def test_route2(request, ws):
|
||||
event.set()
|
||||
|
||||
@bp.websocket("/route3", name="foobar_3")
|
||||
async def test_route3(request, ws):
|
||||
event.set()
|
||||
|
||||
app.blueprint(bp)
|
||||
|
||||
uri = app.url_for("test_bp.main")
|
||||
assert uri == "/bp/main"
|
||||
|
||||
uri = app.url_for("test_bp.test_route")
|
||||
assert uri == "/bp/route"
|
||||
request, response = app.test_client.websocket(uri)
|
||||
assert response.opened is True
|
||||
assert event.is_set()
|
||||
|
||||
event.clear()
|
||||
uri = app.url_for("test_bp.test_route2")
|
||||
assert uri == "/bp/route2"
|
||||
request, response = app.test_client.websocket(uri)
|
||||
assert response.opened is True
|
||||
assert event.is_set()
|
||||
|
||||
uri = app.url_for("test_bp.foobar_3")
|
||||
assert uri == "/bp/route3"
|
||||
@@ -118,7 +118,7 @@ def test_static_directory(app, file_name, base_uri, static_file_directory):
|
||||
app.static(base_uri2, static_file_directory, name="uploads")
|
||||
|
||||
uri = app.url_for("static", name="static", filename=file_name)
|
||||
assert uri == "{}/{}".format(base_uri, file_name)
|
||||
assert uri == f"{base_uri}/{file_name}"
|
||||
|
||||
request, response = app.test_client.get(uri)
|
||||
assert response.status == 200
|
||||
@@ -134,7 +134,7 @@ def test_static_directory(app, file_name, base_uri, static_file_directory):
|
||||
assert uri2 == uri3
|
||||
assert uri3 == uri4
|
||||
|
||||
assert uri5 == "{}/{}".format(base_uri2, file_name)
|
||||
assert uri5 == f"{base_uri2}/{file_name}"
|
||||
assert uri5 == uri6
|
||||
|
||||
bp = Blueprint("test_bp_static", url_prefix="/bp")
|
||||
@@ -157,10 +157,10 @@ def test_static_directory(app, file_name, base_uri, static_file_directory):
|
||||
"static", name="test_bp_static.uploads", filename="/" + file_name
|
||||
)
|
||||
|
||||
assert uri == "/bp{}/{}".format(base_uri, file_name)
|
||||
assert uri == f"/bp{base_uri}/{file_name}"
|
||||
assert uri == uri2
|
||||
|
||||
assert uri4 == "/bp{}/{}".format(base_uri2, file_name)
|
||||
assert uri4 == f"/bp{base_uri2}/{file_name}"
|
||||
assert uri4 == uri5
|
||||
|
||||
request, response = app.test_client.get(uri)
|
||||
|
||||
@@ -48,17 +48,3 @@ def test_vhosts_with_defaults(app):
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
assert response.text == "default"
|
||||
|
||||
|
||||
def test_remove_vhost_route(app):
|
||||
@app.route("/", host="example.com")
|
||||
async def handler1(request):
|
||||
return text("You're at example.com!")
|
||||
|
||||
headers = {"Host": "example.com"}
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
assert response.status == 200
|
||||
|
||||
app.remove_route("/", host="example.com")
|
||||
request, response = app.test_client.get("/", headers=headers)
|
||||
assert response.status == 404
|
||||
|
||||
@@ -49,7 +49,7 @@ def test_unexisting_methods(app):
|
||||
request, response = app.test_client.get("/")
|
||||
assert response.text == "I am get method"
|
||||
request, response = app.test_client.post("/")
|
||||
assert response.text == "Error: Method POST not allowed for URL /"
|
||||
assert "Method POST not allowed for URL /" in response.text
|
||||
|
||||
|
||||
def test_argument_methods(app):
|
||||
@@ -151,8 +151,7 @@ def test_with_custom_class_methods(app):
|
||||
def get(self, request):
|
||||
self._iternal_method()
|
||||
return text(
|
||||
"I am get method and global var "
|
||||
"is {}".format(self.global_var)
|
||||
f"I am get method and global var " f"is {self.global_var}"
|
||||
)
|
||||
|
||||
app.add_route(DummyView.as_view(), "/")
|
||||
|
||||
@@ -129,6 +129,10 @@ def test_handle_quit(worker):
|
||||
assert worker.exit_code == 0
|
||||
|
||||
|
||||
async def _a_noop(*a, **kw):
|
||||
pass
|
||||
|
||||
|
||||
def test_run_max_requests_exceeded(worker):
|
||||
loop = asyncio.new_event_loop()
|
||||
worker.ppid = 1
|
||||
@@ -145,7 +149,7 @@ def test_run_max_requests_exceeded(worker):
|
||||
"server2": {"requests_count": 15},
|
||||
}
|
||||
worker.max_requests = 10
|
||||
worker._run = mock.Mock(wraps=asyncio.coroutine(lambda *a, **kw: None))
|
||||
worker._run = mock.Mock(wraps=_a_noop)
|
||||
|
||||
# exceeding request count
|
||||
_runner = asyncio.ensure_future(worker._check_alive(), loop=loop)
|
||||
@@ -160,7 +164,7 @@ def test_run_max_requests_exceeded(worker):
|
||||
|
||||
def test_worker_close(worker):
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.sleep = mock.Mock(wraps=asyncio.coroutine(lambda *a, **kw: None))
|
||||
asyncio.sleep = mock.Mock(wraps=_a_noop)
|
||||
worker.ppid = 1
|
||||
worker.pid = 2
|
||||
worker.cfg.graceful_timeout = 1.0
|
||||
@@ -169,17 +173,13 @@ def test_worker_close(worker):
|
||||
worker.wsgi = mock.Mock()
|
||||
conn = mock.Mock()
|
||||
conn.websocket = mock.Mock()
|
||||
conn.websocket.close_connection = mock.Mock(
|
||||
wraps=asyncio.coroutine(lambda *a, **kw: None)
|
||||
)
|
||||
conn.websocket.close_connection = mock.Mock(wraps=_a_noop)
|
||||
worker.connections = set([conn])
|
||||
worker.log = mock.Mock()
|
||||
worker.loop = loop
|
||||
server = mock.Mock()
|
||||
server.close = mock.Mock(wraps=lambda *a, **kw: None)
|
||||
server.wait_closed = mock.Mock(
|
||||
wraps=asyncio.coroutine(lambda *a, **kw: None)
|
||||
)
|
||||
server.wait_closed = mock.Mock(wraps=_a_noop)
|
||||
worker.servers = {server: {"requests_count": 14}}
|
||||
worker.max_requests = 10
|
||||
|
||||
|
||||
10
tox.ini
10
tox.ini
@@ -1,11 +1,11 @@
|
||||
[tox]
|
||||
envlist = py36, py37, pyNightly, {py36,py37,pyNightly}-no-ext, lint, check, security, docs
|
||||
envlist = py36, py37, py38, pyNightly, {py36,py37,py38,pyNightly}-no-ext, lint, check, security, docs
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
setenv =
|
||||
{py36,py37,pyNightly}-no-ext: SANIC_NO_UJSON=1
|
||||
{py36,py37,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
|
||||
{py36,py37,py38,pyNightly}-no-ext: SANIC_NO_UJSON=1
|
||||
{py36,py37,py38,pyNightly}-no-ext: SANIC_NO_UVLOOP=1
|
||||
deps =
|
||||
coverage
|
||||
pytest==5.2.1
|
||||
@@ -13,13 +13,13 @@ deps =
|
||||
pytest-sanic
|
||||
pytest-sugar
|
||||
httpcore==0.3.0
|
||||
httpx==0.9.3
|
||||
httpx==0.11.1
|
||||
chardet<=2.3.0
|
||||
beautifulsoup4
|
||||
gunicorn
|
||||
pytest-benchmark
|
||||
uvicorn
|
||||
websockets>=7.0,<8.0
|
||||
websockets>=8.1,<9.0
|
||||
commands =
|
||||
pytest {posargs:tests --cov sanic}
|
||||
- coverage combine --append
|
||||
|
||||
Reference in New Issue
Block a user