Compare commits
453 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be0d539746 | ||
|
|
4f9739ed2c | ||
|
|
0df37fa653 | ||
|
|
3e932505b0 | ||
|
|
01be691936 | ||
|
|
134c414fe5 | ||
|
|
c54a8b10bb | ||
|
|
6fc3381229 | ||
|
|
927c0e082e | ||
|
|
7674e917e4 | ||
|
|
e13f42c17b | ||
|
|
b7d4121586 | ||
|
|
fbcd4b9767 | ||
|
|
17c5e28727 | ||
|
|
e62b29ca44 | ||
|
|
1e4b1c4d1a | ||
|
|
ae91852cd5 | ||
|
|
2011f3a0b2 | ||
|
|
228a31ee0a | ||
|
|
8bf2bdff74 | ||
|
|
41862eca61 | ||
|
|
21307b397b | ||
|
|
3f9c94ba4a | ||
|
|
aa270d3ac2 | ||
|
|
a15d9552c4 | ||
|
|
2363c0653e | ||
|
|
651c98d19a | ||
|
|
c1a7e0e3cd | ||
|
|
80b32d0c71 | ||
|
|
3842eb36fd | ||
|
|
7c7bedfa5d | ||
|
|
5dafa9a170 | ||
|
|
b397637bb9 | ||
|
|
95a0b2db2c | ||
|
|
83864f890a | ||
|
|
a019ff61e3 | ||
|
|
b3ada6308b | ||
|
|
4e50295bf0 | ||
|
|
32eb8abb63 | ||
|
|
84b41123f2 | ||
|
|
23f2d33394 | ||
|
|
97f288a534 | ||
|
|
68d5039c5f | ||
|
|
9d07988d75 | ||
|
|
1eaa2e3a5f | ||
|
|
c7f2399ded | ||
|
|
650ab61c2e | ||
|
|
b7df86e7dd | ||
|
|
72b445621b | ||
|
|
ba0e9baffa | ||
|
|
503622438a | ||
|
|
d5e9aae425 | ||
|
|
a2666a2b8a | ||
|
|
966b05b47e | ||
|
|
78fe97b9cb | ||
|
|
d2094fed38 | ||
|
|
e2d65ba57c | ||
|
|
c9d8ab4b27 | ||
|
|
891f99d71d | ||
|
|
3f47fa9f99 | ||
|
|
b1c23fdbaa | ||
|
|
62e0e5b9ec | ||
|
|
fb61834a2e | ||
|
|
8fbbe94fe1 | ||
|
|
ab706dda7d | ||
|
|
b2d4132a14 | ||
|
|
322cf89c92 | ||
|
|
09acd64ba1 | ||
|
|
072fcfe03e | ||
|
|
13079c6e30 | ||
|
|
1b1a51c1bb | ||
|
|
39d134994d | ||
|
|
5f9e98554f | ||
|
|
0d9a21718f | ||
|
|
daf42c5f43 | ||
|
|
3685b4de85 | ||
|
|
2631f10c5e | ||
|
|
f21db60859 | ||
|
|
c15158224b | ||
|
|
a57c14c70b | ||
|
|
bb2bd2fe53 | ||
|
|
aebe2b5809 | ||
|
|
9172399b8c | ||
|
|
22c0d97783 | ||
|
|
3ead529693 | ||
|
|
e36f398aa6 | ||
|
|
18cd4caf70 | ||
|
|
80df45ba6d | ||
|
|
16d262e3e5 | ||
|
|
83e3d4ca1f | ||
|
|
1c9141bd5d | ||
|
|
1b984422db | ||
|
|
b6453e9fac | ||
|
|
7b8e3624b8 | ||
|
|
8a56da84e6 | ||
|
|
14a00490e2 | ||
|
|
29bf967a7e | ||
|
|
eeb79f2587 | ||
|
|
6d1741694d | ||
|
|
746dccf8f9 | ||
|
|
28a897e599 | ||
|
|
21ebf6d777 | ||
|
|
a2dbbb25a1 | ||
|
|
2a64dabe82 | ||
|
|
046ca6eaf1 | ||
|
|
3661afa461 | ||
|
|
12f1985375 | ||
|
|
bb800c9db8 | ||
|
|
262048df95 | ||
|
|
9255eb6902 | ||
|
|
56d386f152 | ||
|
|
193dbe89cd | ||
|
|
601e158ffe | ||
|
|
42b9fa3779 | ||
|
|
4767a67acd | ||
|
|
4c8cc84b64 | ||
|
|
c5efc4b64b | ||
|
|
25e2151fdf | ||
|
|
cb10e261a2 | ||
|
|
bd89c7f269 | ||
|
|
d4ef151cc4 | ||
|
|
669cfa33df | ||
|
|
f70ab2f68a | ||
|
|
900020ddc9 | ||
|
|
ec428135ce | ||
|
|
8e2a1a61a5 | ||
|
|
5fb8f5d3e7 | ||
|
|
c68523150f | ||
|
|
ae2b8f0056 | ||
|
|
ef6d78c580 | ||
|
|
ccd4c9615c | ||
|
|
7d6e60ab7d | ||
|
|
9615e37ef9 | ||
|
|
6be12ba773 | ||
|
|
03855d316b | ||
|
|
9f07109616 | ||
|
|
6a4a3f617f | ||
|
|
f32c9be41f | ||
|
|
d83d829e0a | ||
|
|
99e56ef74b | ||
|
|
df23692802 | ||
|
|
b68a7fe7ae | ||
|
|
5c9ba189bc | ||
|
|
5631a31099 | ||
|
|
f4bc0efc31 | ||
|
|
53f45810ff | ||
|
|
d58151a0eb | ||
|
|
de582d2fc7 | ||
|
|
653ac7ee14 | ||
|
|
0b4769289a | ||
|
|
3bedb223fc | ||
|
|
94a1720e04 | ||
|
|
d0c8808340 | ||
|
|
dd32d81726 | ||
|
|
378a732968 | ||
|
|
b2e82685b4 | ||
|
|
566940e052 | ||
|
|
dab802fbf4 | ||
|
|
7bca95205d | ||
|
|
669e2ed5b0 | ||
|
|
783eb1a6e8 | ||
|
|
9b9599b12f | ||
|
|
6ed0d3def7 | ||
|
|
c42731a55c | ||
|
|
abf8534ea9 | ||
|
|
773a66bc5b | ||
|
|
269100eac1 | ||
|
|
2a15583b87 | ||
|
|
b534df242b | ||
|
|
734730640a | ||
|
|
bee7cfa6aa | ||
|
|
d5813152ab | ||
|
|
eacf78b83c | ||
|
|
348964fe12 | ||
|
|
e5c7589fc0 | ||
|
|
4260528645 | ||
|
|
34fe26e51b | ||
|
|
8a59907319 | ||
|
|
52deebaf65 | ||
|
|
1e05b22fbc | ||
|
|
ab56af5d15 | ||
|
|
123f00eee6 | ||
|
|
42bf103269 | ||
|
|
c8d2a462e3 | ||
|
|
08794ae1cf | ||
|
|
4f70dba935 | ||
|
|
b926a2c9b0 | ||
|
|
52bdd1d5a2 | ||
|
|
bc7d0f0da5 | ||
|
|
6a8e9c9e95 | ||
|
|
211a922f3c | ||
|
|
2758a3ade6 | ||
|
|
ef3c9eae73 | ||
|
|
9cf2e1b519 | ||
|
|
51c2f7a599 | ||
|
|
5bdd046b11 | ||
|
|
95526a82de | ||
|
|
af7ad0a621 | ||
|
|
1473753d43 | ||
|
|
b36bd21813 | ||
|
|
f8f0241c27 | ||
|
|
1af16836d4 | ||
|
|
757974714e | ||
|
|
eed22a7a24 | ||
|
|
0242bc999f | ||
|
|
b89c533818 | ||
|
|
2cb05ab865 | ||
|
|
391639210e | ||
|
|
99f34c9f50 | ||
|
|
d418cc9950 | ||
|
|
6dfafb0787 | ||
|
|
7067295e67 | ||
|
|
2af229eb1a | ||
|
|
8dd8e9916e | ||
|
|
96af1fe7cf | ||
|
|
cb3a03356b | ||
|
|
68aa2ae3ce | ||
|
|
52de354e24 | ||
|
|
f4f90cada4 | ||
|
|
62420e0f40 | ||
|
|
102e651741 | ||
|
|
65daaaf64b | ||
|
|
b7a6f36e95 | ||
|
|
a86a10b128 | ||
|
|
0b728ade3a | ||
|
|
74f05108d7 | ||
|
|
9d4d15ddc7 | ||
|
|
0c5c6dff8f | ||
|
|
391fcdc83d | ||
|
|
d76d5e2c5f | ||
|
|
f0ada573bb | ||
|
|
ec5b790b51 | ||
|
|
613b23748d | ||
|
|
cea1547e08 | ||
|
|
fd5ae01e1d | ||
|
|
9b6b93d467 | ||
|
|
ca179c12a1 | ||
|
|
4d527035ae | ||
|
|
19b42830ea | ||
|
|
f5162f8ab1 | ||
|
|
ff38a3c6b6 | ||
|
|
94e85686b5 | ||
|
|
aea4a8ed33 | ||
|
|
05dd3b2e9d | ||
|
|
040468755c | ||
|
|
50b359fdb2 | ||
|
|
72f2e18a84 | ||
|
|
b36dc22b45 | ||
|
|
15b1c875f5 | ||
|
|
13804dc380 | ||
|
|
9bea23da29 | ||
|
|
7005fabd4d | ||
|
|
de8c37ad00 | ||
|
|
a80499c4b7 | ||
|
|
82f7f847ba | ||
|
|
4880761fe0 | ||
|
|
87ab0b386d | ||
|
|
c42c274002 | ||
|
|
2d82b8951f | ||
|
|
b7702bc3e8 | ||
|
|
9c6b83501f | ||
|
|
5189d8b14c | ||
|
|
e13053ed89 | ||
|
|
efa77cf5ec | ||
|
|
f6355bd075 | ||
|
|
e3dfce88ff | ||
|
|
939b5ea095 | ||
|
|
e6fba01682 | ||
|
|
1623d397be | ||
|
|
09678d601d | ||
|
|
67d51f7e1b | ||
|
|
aa7f2759a6 | ||
|
|
9b9dd67797 | ||
|
|
3f73bc075a | ||
|
|
f2a55d01ea | ||
|
|
56989a017b | ||
|
|
bf029c1b9d | ||
|
|
ada5918bc8 | ||
|
|
375ebd39f0 | ||
|
|
a33ebbaf11 | ||
|
|
19b304b0fc | ||
|
|
0b64fe6746 | ||
|
|
e978121d58 | ||
|
|
4efd450b32 | ||
|
|
d2670664ba | ||
|
|
fa7405fe9c | ||
|
|
33297f48a5 | ||
|
|
956793e066 | ||
|
|
1bfbc67c62 | ||
|
|
b5287184e9 | ||
|
|
7c9957e058 | ||
|
|
fca7cb9fb0 | ||
|
|
268d254d85 | ||
|
|
181adebf82 | ||
|
|
06297a1918 | ||
|
|
aa0874b6d8 | ||
|
|
822ced6294 | ||
|
|
1a59614f79 | ||
|
|
f2d528e52a | ||
|
|
f7adc5f84c | ||
|
|
e955e833c4 | ||
|
|
096c44b910 | ||
|
|
efb9a42045 | ||
|
|
296cda7801 | ||
|
|
90b9d73244 | ||
|
|
c8b0e7f2a7 | ||
|
|
6ce88ab5a4 | ||
|
|
e13ab805df | ||
|
|
e58ea8c7b4 | ||
|
|
dd5bac61cb | ||
|
|
6270b27a97 | ||
|
|
f89ba1d39f | ||
|
|
8b5d137d8f | ||
|
|
2629fab649 | ||
|
|
92cd10c6a8 | ||
|
|
cc3edb90dc | ||
|
|
c60ba81984 | ||
|
|
ece3cdaa2e | ||
|
|
4cb40f2042 | ||
|
|
0e9f350982 | ||
|
|
cf439f01f8 | ||
|
|
f1f1b8a630 | ||
|
|
d4d1df03c9 | ||
|
|
92b73a6f4f | ||
|
|
b63c06c75a | ||
|
|
3e3bce422e | ||
|
|
e3a27c2cc4 | ||
|
|
f13f451084 | ||
|
|
df0e3de911 | ||
|
|
8466be8728 | ||
|
|
5cf2144b3f | ||
|
|
7c182f63c8 | ||
|
|
056180782c | ||
|
|
ff0d5870e9 | ||
|
|
b70176f8c7 | ||
|
|
e3655b525d | ||
|
|
e63d0091af | ||
|
|
7b0af2d80d | ||
|
|
7d79a86d4d | ||
|
|
ba46aff069 | ||
|
|
7a65471ba5 | ||
|
|
c7c46da975 | ||
|
|
c708e8425f | ||
|
|
905c51bef0 | ||
|
|
bd87098b7e | ||
|
|
5f486cc25f | ||
|
|
f79fb72a33 | ||
|
|
0505aa2dda | ||
|
|
485ff32e42 | ||
|
|
5ead67972f | ||
|
|
9c860dbff3 | ||
|
|
a20ad99638 | ||
|
|
8ef7bf8e7b | ||
|
|
0d5be1969a | ||
|
|
d06ea9bfc3 | ||
|
|
57e79882e1 | ||
|
|
20d1ab60c7 | ||
|
|
277c2ce2d2 | ||
|
|
34e51f01d1 | ||
|
|
f4b4e3a58c | ||
|
|
def2e033c8 | ||
|
|
dfec18278b | ||
|
|
cd5bdecda3 | ||
|
|
9b6217ba41 | ||
|
|
272f6e195d | ||
|
|
aa9bf04dfe | ||
|
|
9ae6dfb6d2 | ||
|
|
619bb79a2f | ||
|
|
0cad831eca | ||
|
|
f15a7fb588 | ||
|
|
1bdf9ca057 | ||
|
|
c8c370b784 | ||
|
|
63182f55f7 | ||
|
|
41759248e2 | ||
|
|
3149d5a66d | ||
|
|
8b13597099 | ||
|
|
36032cc26e | ||
|
|
4cb107aedc | ||
|
|
176f8d1981 | ||
|
|
9a26030bd5 | ||
|
|
6778f4d9e0 | ||
|
|
fd61b9e3e2 | ||
|
|
298d5cdf24 | ||
|
|
1bf1c9d006 | ||
|
|
7dc62be5cf | ||
|
|
be580a6a5b | ||
|
|
8ce519668b | ||
|
|
801258c46a | ||
|
|
32a1db3622 | ||
|
|
ed1f3daacc | ||
|
|
b7d74c82ba | ||
|
|
c3b31a6fb0 | ||
|
|
f4c55bbc07 | ||
|
|
a16842f7bc | ||
|
|
439a38664f | ||
|
|
5cc12fd945 | ||
|
|
fe116fff5a | ||
|
|
06aaaf4727 | ||
|
|
6deb9b49b2 | ||
|
|
d59e92d3e5 | ||
|
|
cc83c1f0cf | ||
|
|
1fe7306af8 | ||
|
|
c796d73fc3 | ||
|
|
eb93f884f3 | ||
|
|
3673feb256 | ||
|
|
7c9c783e9d | ||
|
|
74a4b9efaa | ||
|
|
4466e8cce1 | ||
|
|
b689037984 | ||
|
|
db1ba21d88 | ||
|
|
50d270ef7c | ||
|
|
d1a578b555 | ||
|
|
76e9859cf8 | ||
|
|
add9d363c5 | ||
|
|
1498baab0f | ||
|
|
df7f63d45d | ||
|
|
f7425126a1 | ||
|
|
790047e450 | ||
|
|
9198b5b0be | ||
|
|
d534acb79d | ||
|
|
d100f54551 | ||
|
|
7a9e100b0f | ||
|
|
fafe23d7c2 | ||
|
|
9a08bdae4a | ||
|
|
bcc11fa7fe | ||
|
|
7d0c0fdf7c | ||
|
|
0e33d46ead | ||
|
|
efbacc17cf | ||
|
|
bd6dbd9090 | ||
|
|
076cf51fb2 | ||
|
|
f8a6af1e28 | ||
|
|
96912f436d | ||
|
|
f0e162442f | ||
|
|
04b8dd989f | ||
|
|
5851c8bd91 | ||
|
|
78efcf93f8 | ||
|
|
bb35bc3898 | ||
|
|
f38783bdef | ||
|
|
d8f9986089 | ||
|
|
3e616b599a | ||
|
|
d38fc17191 | ||
|
|
7ae0eb0dc3 | ||
|
|
9082eb56a7 | ||
|
|
c578974246 | ||
|
|
fec81ffe73 | ||
|
|
30e6a310f1 | ||
|
|
a7dd73c657 | ||
|
|
f770e16f6d | ||
|
|
c1222175b3 | ||
|
|
7928b9b3a2 | ||
|
|
63bbcb5152 | ||
|
|
9150767574 | ||
|
|
75f2180cb1 |
27
.appveyor.yml
Normal file
27
.appveyor.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
version: "{branch}.{build}"
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: py36-no-ext
|
||||
PYTHON: "C:\\Python36-x64"
|
||||
PYTHON_VERSION: "3.6.x"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
- TOXENV: py37-no-ext
|
||||
PYTHON: "C:\\Python37-x64"
|
||||
PYTHON_VERSION: "3.7.x"
|
||||
PYTHON_ARCH: "64"
|
||||
|
||||
init: SET "PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
|
||||
|
||||
install:
|
||||
- pip install tox
|
||||
|
||||
build: off
|
||||
|
||||
test_script: tox
|
||||
|
||||
notifications:
|
||||
- provider: Email
|
||||
on_build_success: false
|
||||
on_build_status_changed: false
|
||||
2
.black.toml
Normal file
2
.black.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[tool.black]
|
||||
line-length = 79
|
||||
@@ -5,3 +5,11 @@ omit = site-packages, sanic/utils.py, sanic/__main__.py
|
||||
|
||||
[html]
|
||||
directory = coverage
|
||||
|
||||
[report]
|
||||
exclude_lines =
|
||||
no cov
|
||||
no qa
|
||||
noqa
|
||||
NOQA
|
||||
pragma: no cover
|
||||
|
||||
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is, make sure to paste any exceptions and tracebacks.
|
||||
|
||||
|
||||
**Code snippet**
|
||||
Relevant source code, make sure to remove what is not necessary.
|
||||
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
|
||||
**Environment (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Version [e.g. 0.8.3]
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
16
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
16
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for Sanic
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context or sample code about the feature request here.
|
||||
13
.github/ISSUE_TEMPLATE/help-wanted.md
vendored
Normal file
13
.github/ISSUE_TEMPLATE/help-wanted.md
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
name: Help wanted
|
||||
about: Do you need help? Try community.sanicframework.org
|
||||
|
||||
---
|
||||
|
||||
*DELETE ALL BEFORE POSTING*
|
||||
*Post your HELP WANTED questions on [the community forum](https://community.sanicframework.org/)*.
|
||||
|
||||
Checkout the community forum before posting any question here.
|
||||
We prefer if you put these kinds of questions here:
|
||||
|
||||
https://community.sanicframework.org/c/questions-and-help
|
||||
19
.github/stale.yml
vendored
Normal file
19
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 90
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 30
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- bug
|
||||
- urgent
|
||||
- necessary
|
||||
- help wanted
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. If this
|
||||
is incorrect, please respond with an update. Thank you for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -10,8 +10,11 @@ coverage
|
||||
settings.py
|
||||
.idea/*
|
||||
.cache/*
|
||||
.mypy_cache/
|
||||
.python-version
|
||||
docs/_build/
|
||||
docs/_api/
|
||||
build/*
|
||||
.DS_Store
|
||||
dist/*
|
||||
pip-wheel-metadata/
|
||||
|
||||
38
.travis.yml
38
.travis.yml
@@ -5,33 +5,55 @@ cache:
|
||||
- $HOME/.cache/pip
|
||||
matrix:
|
||||
include:
|
||||
- env: TOX_ENV=py35
|
||||
python: 3.5
|
||||
- env: TOX_ENV=py35-no-ext
|
||||
python: 3.5
|
||||
- env: TOX_ENV=py36
|
||||
python: 3.6
|
||||
name: "Python 3.6 with Extensions"
|
||||
- env: TOX_ENV=py36-no-ext
|
||||
python: 3.6
|
||||
name: "Python 3.6 without Extensions"
|
||||
- env: TOX_ENV=py37
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.7 with Extensions"
|
||||
- env: TOX_ENV=py37-no-ext
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
sudo: true
|
||||
- env: TOX_ENV=flake8
|
||||
name: "Python 3.7 without Extensions"
|
||||
- env: TOX_ENV=type-checking
|
||||
python: 3.6
|
||||
name: "Python 3.6 Type checks"
|
||||
- env: TOX_ENV=type-checking
|
||||
python: 3.7
|
||||
name: "Python 3.7 Type checks"
|
||||
- env: TOX_ENV=lint
|
||||
python: 3.6
|
||||
name: "Python 3.6 Linter checks"
|
||||
- env: TOX_ENV=check
|
||||
python: 3.6
|
||||
install: pip install -U tox
|
||||
name: "Python 3.6 Package checks"
|
||||
- env: TOX_ENV=security
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.7 Bandit security scan"
|
||||
- env: TOX_ENV=docs
|
||||
python: 3.7
|
||||
dist: xenial
|
||||
sudo: true
|
||||
name: "Python 3.7 Documentation tests"
|
||||
install:
|
||||
- pip install -U tox
|
||||
- pip install codecov
|
||||
script: travis_retry tox -e $TOX_ENV
|
||||
after_success:
|
||||
- codecov
|
||||
deploy:
|
||||
provider: pypi
|
||||
user: channelcat
|
||||
user: brewmaster
|
||||
password:
|
||||
secure: OgADRQH3+dTL5swGzXkeRJDNbLpFzwqYnXB4iLD0Npvzj9QnKyQVvkbaeq6VmV9dpEFb5ULaAKYQq19CrXYDm28yanUSn6jdJ4SukaHusi7xt07U6H7pmoX/uZ2WZYqCSLM8cSp8TXY/3oV3rY5Jfj/AibE5XTbim5/lrhsvW6NR+ALzxc0URRPAHDZEPpojTCjSTjpY0aDsaKWg4mXVRMFfY3O68j6KaIoukIZLuoHfePLKrbZxaPG5VxNhMHEaICdxVxE/dO+7pQmQxXuIsEOHK1QiVJ9YrSGcNqgEqhN36kYP8dqMeVB07sv8Xa6o/Uax2/wXS2HEJvuwP1YD6WkoZuo9ZB85bcMdg7BV9jJDbVFVPJwc75BnTLHrMa3Q1KrRlKRDBUXBUsQivPuWhFNwUgvEayq2qSI3aRQR4Z0O+DfboEhXYojSoD64/EWBTZ7vhgbvOTGEdukUQSYrKj9P8jc1s8exomTsAiqdFxTUpzfiammUSL+M93lP4urtahl1jjXFX7gd3DzdEEb0NsGkx5lm/qdsty8/TeAvKUmC+RVU6T856W6MqN0P+yGbpWUARcSE7fwztC3SPxwAuxvIN3BHmRhOUHoORPNG2VpfbnscIzBKJR4v0JKzbpi0IDa66K+tCGsCEvQuL4cxVOtoUySPWNSUAyUWWUrGM2k=
|
||||
secure: "GoawLwmbtJOgKB6AJ0ZSYUUnNwIoonseHBxaAUH3zu79TS/Afrq+yB3lsVaMSG0CbyDgN4FrfD1phT1NzbvZ1VcLIOTDtCrmpQ1kLDw+zwgF40ab8sp8fPkKVHHHfCCs1mjltHIpxQa5lZTJcAs6Bpi/lbUWWwYxFzSV8pHw4W4hY09EHUd2o+evLTSVxaploetSt725DJUYKICUr2eAtCC11IDnIW4CzBJEx6krVV3uhzfTJW0Ls17x0c6sdZ9icMnV/G9xO/eQH6RIHe4xcrWJ6cmLDNKoGAkJp+BKr1CeVVg7Jw/MzPjvZKL2/ki6Beue1y6GUIy7lOS7jPVaOEhJ23b0zQwFcLMZw+Tt+E3v6QfHk+B/WBBBnM3zUZed9UI+QyW8+lqLLt39sQX0FO0P3eaDh8qTXtUuon2jTyFMMAMTFRTNpJmpAzuBH9yeMmDeALPTh0HphI+BkoUl5q1QbWFYjjnZMH2CatApxpLybt9A7rwm//PbOG0TSI93GEKNQ4w5DYryKTfwHzRBptNSephJSuxZYEfJsmUtas5es1D7Fe0PkyjxNNSU+eO+8wsTlitLUsJO4k0jAgy+cEKdU7YJ3J0GZVXocSkrNnUfd2hQPcJ3UtEJx3hLqqr8EM7EZBAasc1yGHh36NFetclzFY24YPih0G1+XurhTys="
|
||||
on:
|
||||
tags: true
|
||||
distributions: "sdist bdist_wheel"
|
||||
|
||||
22
CHANGELOG.md
22
CHANGELOG.md
@@ -1,22 +0,0 @@
|
||||
Version 0.1
|
||||
-----------
|
||||
- 0.1.7
|
||||
- Reversed static url and directory arguments to meet spec
|
||||
- 0.1.6
|
||||
- Static files
|
||||
- Lazy Cookie Loading
|
||||
- 0.1.5
|
||||
- Cookies
|
||||
- Blueprint listeners and ordering
|
||||
- Faster Router
|
||||
- Fix: Incomplete file reads on medium+ sized post requests
|
||||
- Breaking: after_start and before_stop now pass sanic as their first argument
|
||||
- 0.1.4
|
||||
- Multiprocessing
|
||||
- 0.1.3
|
||||
- Blueprint support
|
||||
- Faster Response processing
|
||||
- 0.1.1 - 0.1.2
|
||||
- Struggling to update pypi via CI
|
||||
- 0.1.0
|
||||
- Released to public
|
||||
398
CHANGELOG.rst
Normal file
398
CHANGELOG.rst
Normal file
@@ -0,0 +1,398 @@
|
||||
Version 19.6.3
|
||||
==============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
- Enable Towncrier Support
|
||||
|
||||
As part of this feature, `towncrier` is being introduced as a mechanism to partially automate the process
|
||||
of generating and managing change logs as part of each of pull requests. (`#1631 <https://github.com/huge-success/sanic/issues/1631>`__)
|
||||
|
||||
|
||||
Improved Documentation
|
||||
**********************
|
||||
|
||||
- Documentation infrastructure changes
|
||||
|
||||
- Enable having a single common `CHANGELOG` file for both GitHub page and documentation
|
||||
- Fix Sphinix deprecation warnings
|
||||
- Fix documentation warnings due to invalid `rst` indentation
|
||||
- Enable common contribution guidelines file across GitHub and documentation via `CONTRIBUTING.rst` (`#1631 <https://github.com/huge-success/sanic/issues/1631>`__)
|
||||
|
||||
|
||||
Version 19.6.2
|
||||
==============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
*
|
||||
`#1562 <https://github.com/huge-success/sanic/pull/1562>`_
|
||||
Remove ``aiohttp`` dependencey and create new ``SanicTestClient`` based upon
|
||||
`requests-async <https://github.com/encode/requests-async>`_
|
||||
|
||||
*
|
||||
`#1475 <https://github.com/huge-success/sanic/pull/1475>`_
|
||||
Added ASGI support (Beta)
|
||||
|
||||
*
|
||||
`#1436 <https://github.com/huge-success/sanic/pull/1436>`_
|
||||
Add Configure support from object string
|
||||
|
||||
|
||||
Bugfixes
|
||||
********
|
||||
|
||||
*
|
||||
`#1587 <https://github.com/huge-success/sanic/pull/1587>`_
|
||||
Add missing handle for Expect header.
|
||||
|
||||
*
|
||||
`#1560 <https://github.com/huge-success/sanic/pull/1560>`_
|
||||
Allow to disable Transfer-Encoding: chunked.
|
||||
|
||||
*
|
||||
`#1558 <https://github.com/huge-success/sanic/pull/1558>`_
|
||||
Fix graceful shutdown.
|
||||
|
||||
*
|
||||
`#1594 <https://github.com/huge-success/sanic/pull/1594>`_
|
||||
Strict Slashes behavior fix
|
||||
|
||||
Deprecations and Removals
|
||||
*************************
|
||||
|
||||
*
|
||||
`#1544 <https://github.com/huge-success/sanic/pull/1544>`_
|
||||
Drop dependency on distutil
|
||||
|
||||
*
|
||||
`#1562 <https://github.com/huge-success/sanic/pull/1562>`_
|
||||
Drop support for Python 3.5
|
||||
|
||||
*
|
||||
`#1568 <https://github.com/huge-success/sanic/pull/1568>`_
|
||||
Deprecate route removal.
|
||||
|
||||
.. warning::
|
||||
Sanic will not support Python 3.5 from version 19.6 and forward. However,
|
||||
version 18.12LTS will have its support period extended thru December 2020, and
|
||||
therefore passing Python's official support version 3.5, which is set to expire
|
||||
in September 2020.
|
||||
|
||||
|
||||
Version 19.3
|
||||
============
|
||||
|
||||
Features
|
||||
********
|
||||
|
||||
*
|
||||
`#1497 <https://github.com/huge-success/sanic/pull/1497>`_
|
||||
Add support for zero-length and RFC 5987 encoded filename for
|
||||
multipart/form-data requests.
|
||||
|
||||
*
|
||||
`#1484 <https://github.com/huge-success/sanic/pull/1484>`_
|
||||
The type of ``expires`` attribute of ``sanic.cookies.Cookie`` is now
|
||||
enforced to be of type ``datetime``.
|
||||
|
||||
*
|
||||
`#1482 <https://github.com/huge-success/sanic/pull/1482>`_
|
||||
Add support for the ``stream`` parameter of ``sanic.Sanic.add_route()``
|
||||
available to ``sanic.Blueprint.add_route()``.
|
||||
|
||||
*
|
||||
`#1481 <https://github.com/huge-success/sanic/pull/1481>`_
|
||||
Accept negative values for route parameters with type ``int`` or ``number``.
|
||||
|
||||
*
|
||||
`#1476 <https://github.com/huge-success/sanic/pull/1476>`_
|
||||
Deprecated the use of ``sanic.request.Request.raw_args`` - it has a
|
||||
fundamental flaw in which is drops repeated query string parameters.
|
||||
Added ``sanic.request.Request.query_args`` as a replacement for the
|
||||
original use-case.
|
||||
|
||||
*
|
||||
`#1472 <https://github.com/huge-success/sanic/pull/1472>`_
|
||||
Remove an unwanted ``None`` check in Request class ``repr`` implementation.
|
||||
This changes the default ``repr`` of a Request from ``<Request>`` to
|
||||
``<Request: None />``
|
||||
|
||||
*
|
||||
`#1470 <https://github.com/huge-success/sanic/pull/1470>`_
|
||||
Added 2 new parameters to ``sanic.app.Sanic.create_server``\ :
|
||||
|
||||
|
||||
* ``return_asyncio_server`` - whether to return an asyncio.Server.
|
||||
* ``asyncio_server_kwargs`` - kwargs to pass to ``loop.create_server`` for
|
||||
the event loop that sanic is using.
|
||||
|
||||
This is a breaking change.
|
||||
|
||||
*
|
||||
`#1499 <https://github.com/huge-success/sanic/pull/1499>`_
|
||||
Added a set of test cases that test and benchmark route resolution.
|
||||
|
||||
*
|
||||
`#1457 <https://github.com/huge-success/sanic/pull/1457>`_
|
||||
The type of the ``"max-age"`` value in a ``sanic.cookies.Cookie`` is now
|
||||
enforced to be an integer. Non-integer values are replaced with ``0``.
|
||||
|
||||
*
|
||||
`#1445 <https://github.com/huge-success/sanic/pull/1445>`_
|
||||
Added the ``endpoint`` attribute to an incoming ``request``\ , containing the
|
||||
name of the handler function.
|
||||
|
||||
*
|
||||
`#1423 <https://github.com/huge-success/sanic/pull/1423>`_
|
||||
Improved request streaming. ``request.stream`` is now a bounded-size buffer
|
||||
instead of an unbounded queue. Callers must now call
|
||||
``await request.stream.read()`` instead of ``await request.stream.get()``
|
||||
to read each portion of the body.
|
||||
|
||||
This is a breaking change.
|
||||
|
||||
Bugfixes
|
||||
********
|
||||
|
||||
|
||||
*
|
||||
`#1502 <https://github.com/huge-success/sanic/pull/1502>`_
|
||||
Sanic was prefetching ``time.time()`` and updating it once per second to
|
||||
avoid excessive ``time.time()`` calls. The implementation was observed to
|
||||
cause memory leaks in some cases. The benefit of the prefetch appeared
|
||||
to negligible, so this has been removed. Fixes
|
||||
`#1500 <https://github.com/huge-success/sanic/pull/1500>`_
|
||||
|
||||
*
|
||||
`#1501 <https://github.com/huge-success/sanic/pull/1501>`_
|
||||
Fix a bug in the auto-reloader when the process was launched as a module
|
||||
i.e. ``python -m init0.mod1`` where the sanic server is started
|
||||
in ``init0/mod1.py`` with ``debug`` enabled and imports another module in
|
||||
``init0``.
|
||||
|
||||
*
|
||||
`#1376 <https://github.com/huge-success/sanic/pull/1376>`_
|
||||
Allow sanic test client to bind to a random port by specifying
|
||||
``port=None`` when constructing a ``SanicTestClient``
|
||||
|
||||
*
|
||||
`#1399 <https://github.com/huge-success/sanic/pull/1399>`_
|
||||
Added the ability to specify middleware on a blueprint group, so that all
|
||||
routes produced from the blueprints in the group have the middleware
|
||||
applied.
|
||||
|
||||
*
|
||||
`#1442 <https://github.com/huge-success/sanic/pull/1442>`_
|
||||
Allow the the use the ``SANIC_ACCESS_LOG`` environment variable to
|
||||
enable/disable the access log when not explicitly passed to ``app.run()``.
|
||||
This allows the access log to be disabled for example when running via
|
||||
gunicorn.
|
||||
|
||||
Developer infrastructure
|
||||
************************
|
||||
|
||||
* `#1529 <https://github.com/huge-success/sanic/pull/1529>`_ Update project PyPI credentials
|
||||
* `#1515 <https://github.com/huge-success/sanic/pull/1515>`_ fix linter issue causing travis build failures (fix #1514)
|
||||
* `#1490 <https://github.com/huge-success/sanic/pull/1490>`_ Fix python version in doc build
|
||||
* `#1478 <https://github.com/huge-success/sanic/pull/1478>`_ Upgrade setuptools version and use native docutils in doc build
|
||||
* `#1464 <https://github.com/huge-success/sanic/pull/1464>`_ Upgrade pytest, and fix caplog unit tests
|
||||
|
||||
Improved Documentation
|
||||
**********************
|
||||
|
||||
* `#1516 <https://github.com/huge-success/sanic/pull/1516>`_ Fix typo at the exception documentation
|
||||
* `#1510 <https://github.com/huge-success/sanic/pull/1510>`_ fix typo in Asyncio example
|
||||
* `#1486 <https://github.com/huge-success/sanic/pull/1486>`_ Documentation typo
|
||||
* `#1477 <https://github.com/huge-success/sanic/pull/1477>`_ Fix grammar in README.md
|
||||
* `#1489 <https://github.com/huge-success/sanic/pull/1489>`_ Added "databases" to the extensions list
|
||||
* `#1483 <https://github.com/huge-success/sanic/pull/1483>`_ Add sanic-zipkin to extensions list
|
||||
* `#1487 <https://github.com/huge-success/sanic/pull/1487>`_ Removed link to deleted repo, Sanic-OAuth, from the extensions list
|
||||
* `#1460 <https://github.com/huge-success/sanic/pull/1460>`_ 18.12 changelog
|
||||
* `#1449 <https://github.com/huge-success/sanic/pull/1449>`_ Add example of amending request object
|
||||
* `#1446 <https://github.com/huge-success/sanic/pull/1446>`_ Update README
|
||||
* `#1444 <https://github.com/huge-success/sanic/pull/1444>`_ Update README
|
||||
* `#1443 <https://github.com/huge-success/sanic/pull/1443>`_ Update README, including new logo
|
||||
* `#1440 <https://github.com/huge-success/sanic/pull/1440>`_ fix minor type and pip install instruction mismatch
|
||||
* `#1424 <https://github.com/huge-success/sanic/pull/1424>`_ Documentation Enhancements
|
||||
|
||||
Note: 19.3.0 was skipped for packagement purposes and not released on PyPI
|
||||
|
||||
Version 18.12
|
||||
=============
|
||||
|
||||
18.12.0
|
||||
*******
|
||||
|
||||
*
|
||||
Changes:
|
||||
|
||||
|
||||
* Improved codebase test coverage from 81% to 91%.
|
||||
* Added stream_large_files and host examples in static_file document
|
||||
* Added methods to append and finish body content on Request (#1379)
|
||||
* Integrated with .appveyor.yml for windows ci support
|
||||
* Added documentation for AF_INET6 and AF_UNIX socket usage
|
||||
* Adopt black/isort for codestyle
|
||||
* Cancel task when connection_lost
|
||||
* Simplify request ip and port retrieval logic
|
||||
* Handle config error in load config file.
|
||||
* Integrate with codecov for CI
|
||||
* Add missed documentation for config section.
|
||||
* Deprecate Handler.log
|
||||
* Pinned httptools requirement to version 0.0.10+
|
||||
|
||||
*
|
||||
Fixes:
|
||||
|
||||
|
||||
* Fix ``remove_entity_headers`` helper function (#1415)
|
||||
* Fix TypeError when use Blueprint.group() to group blueprint with default url_prefix, Use os.path.normpath to avoid invalid url_prefix like api//v1
|
||||
f8a6af1 Rename the ``http`` module to ``helpers`` to prevent conflicts with the built-in Python http library (fixes #1323)
|
||||
* Fix unittests on windows
|
||||
* Fix Namespacing of sanic logger
|
||||
* Fix missing quotes in decorator example
|
||||
* Fix redirect with quoted param
|
||||
* Fix doc for latest blueprint code
|
||||
* Fix build of latex documentation relating to markdown lists
|
||||
* Fix loop exception handling in app.py
|
||||
* Fix content length mismatch in windows and other platform
|
||||
* Fix Range header handling for static files (#1402)
|
||||
* Fix the logger and make it work (#1397)
|
||||
* Fix type pikcle->pickle in multiprocessing test
|
||||
* Fix pickling blueprints Change the string passed in the "name" section of the namedtuples in Blueprint to match the name of the Blueprint module attribute name. This allows blueprints to be pickled and unpickled, without errors, which is a requirment of running Sanic in multiprocessing mode in Windows. Added a test for pickling and unpickling blueprints Added a test for pickling and unpickling sanic itself Added a test for enabling multiprocessing on an app with a blueprint (only useful to catch this bug if the tests are run on Windows).
|
||||
* Fix document for logging
|
||||
|
||||
Version 0.8
|
||||
===========
|
||||
|
||||
0.8.3
|
||||
*****
|
||||
|
||||
* Changes:
|
||||
|
||||
* Ownership changed to org 'huge-success'
|
||||
|
||||
0.8.0
|
||||
*****
|
||||
|
||||
* Changes:
|
||||
|
||||
|
||||
* Add Server-Sent Events extension (Innokenty Lebedev)
|
||||
* Graceful handling of request_handler_task cancellation (Ashley Sommer)
|
||||
* Sanitize URL before redirection (aveao)
|
||||
* Add url_bytes to request (johndoe46)
|
||||
* py37 support for travisci (yunstanford)
|
||||
* Auto reloader support for OSX (garyo)
|
||||
* Add UUID route support (Volodymyr Maksymiv)
|
||||
* Add pausable response streams (Ashley Sommer)
|
||||
* Add weakref to request slots (vopankov)
|
||||
* remove ubuntu 12.04 from test fixture due to deprecation (yunstanford)
|
||||
* Allow streaming handlers in add_route (kinware)
|
||||
* use travis_retry for tox (Raphael Deem)
|
||||
* update aiohttp version for test client (yunstanford)
|
||||
* add redirect import for clarity (yingshaoxo)
|
||||
* Update HTTP Entity headers (Arnulfo Solís)
|
||||
* Add register_listener method (Stephan Fitzpatrick)
|
||||
* Remove uvloop/ujson dependencies for Windows (abuckenheimer)
|
||||
* Content-length header on 204/304 responses (Arnulfo Solís)
|
||||
* Extend WebSocketProtocol arguments and add docs (Bob Olde Hampsink, yunstanford)
|
||||
* Update development status from pre-alpha to beta (Maksim Anisenkov)
|
||||
* KeepAlive Timout log level changed to debug (Arnulfo Solís)
|
||||
* Pin pytest to 3.3.2 because of pytest-dev/pytest#3170 (Maksim Aniskenov)
|
||||
* Install Python 3.5 and 3.6 on docker container for tests (Shahin Azad)
|
||||
* Add support for blueprint groups and nesting (Elias Tarhini)
|
||||
* Remove uvloop for windows setup (Aleksandr Kurlov)
|
||||
* Auto Reload (Yaser Amari)
|
||||
* Documentation updates/fixups (multiple contributors)
|
||||
|
||||
* Fixes:
|
||||
|
||||
|
||||
* Fix: auto_reload in Linux (Ashley Sommer)
|
||||
* Fix: broken tests for aiohttp >= 3.3.0 (Ashley Sommer)
|
||||
* Fix: disable auto_reload by default on windows (abuckenheimer)
|
||||
* Fix (1143): Turn off access log with gunicorn (hqy)
|
||||
* Fix (1268): Support status code for file response (Cosmo Borsky)
|
||||
* Fix (1266): Add content_type flag to Sanic.static (Cosmo Borsky)
|
||||
* Fix: subprotocols parameter missing from add_websocket_route (ciscorn)
|
||||
* Fix (1242): Responses for CI header (yunstanford)
|
||||
* Fix (1237): add version constraint for websockets (yunstanford)
|
||||
* Fix (1231): memory leak - always release resource (Phillip Xu)
|
||||
* Fix (1221): make request truthy if transport exists (Raphael Deem)
|
||||
* Fix failing tests for aiohttp>=3.1.0 (Ashley Sommer)
|
||||
* Fix try_everything examples (PyManiacGR, kot83)
|
||||
* Fix (1158): default to auto_reload in debug mode (Raphael Deem)
|
||||
* Fix (1136): ErrorHandler.response handler call too restrictive (Julien Castiaux)
|
||||
* Fix: raw requires bytes-like object (cloudship)
|
||||
* Fix (1120): passing a list in to a route decorator's host arg (Timothy Ebiuwhe)
|
||||
* Fix: Bug in multipart/form-data parser (DirkGuijt)
|
||||
* Fix: Exception for missing parameter when value is null (NyanKiyoshi)
|
||||
* Fix: Parameter check (Howie Hu)
|
||||
* Fix (1089): Routing issue with named parameters and different methods (yunstanford)
|
||||
* Fix (1085): Signal handling in multi-worker mode (yunstanford)
|
||||
* Fix: single quote in readme.rst (Cosven)
|
||||
* Fix: method typos (Dmitry Dygalo)
|
||||
* Fix: log_response correct output for ip and port (Wibowo Arindrarto)
|
||||
* Fix (1042): Exception Handling (Raphael Deem)
|
||||
* Fix: Chinese URIs (Howie Hu)
|
||||
* Fix (1079): timeout bug when self.transport is None (Raphael Deem)
|
||||
* Fix (1074): fix strict_slashes when route has slash (Raphael Deem)
|
||||
* Fix (1050): add samesite cookie to cookie keys (Raphael Deem)
|
||||
* Fix (1065): allow add_task after server starts (Raphael Deem)
|
||||
* Fix (1061): double quotes in unauthorized exception (Raphael Deem)
|
||||
* Fix (1062): inject the app in add_task method (Raphael Deem)
|
||||
* Fix: update environment.yml for readthedocs (Eli Uriegas)
|
||||
* Fix: Cancel request task when response timeout is triggered (Jeong YunWon)
|
||||
* Fix (1052): Method not allowed response for RFC7231 compliance (Raphael Deem)
|
||||
* Fix: IPv6 Address and Socket Data Format (Dan Palmer)
|
||||
|
||||
Note: Changelog was unmaintained between 0.1 and 0.7
|
||||
|
||||
Version 0.1
|
||||
===========
|
||||
|
||||
|
||||
0.1.7
|
||||
*****
|
||||
|
||||
* Reversed static url and directory arguments to meet spec
|
||||
|
||||
0.1.6
|
||||
*****
|
||||
|
||||
* Static files
|
||||
* Lazy Cookie Loading
|
||||
|
||||
0.1.5
|
||||
*****
|
||||
|
||||
* Cookies
|
||||
* Blueprint listeners and ordering
|
||||
* Faster Router
|
||||
* Fix: Incomplete file reads on medium+ sized post requests
|
||||
* Breaking: after_start and before_stop now pass sanic as their first argument
|
||||
|
||||
0.1.4
|
||||
*****
|
||||
|
||||
* Multiprocessing
|
||||
|
||||
0.1.3
|
||||
*****
|
||||
|
||||
* Blueprint support
|
||||
* Faster Response processing
|
||||
|
||||
0.1.1 - 0.1.2
|
||||
*************
|
||||
|
||||
* Struggling to update pypi via CI
|
||||
|
||||
0.1.0
|
||||
*****
|
||||
|
||||
* Released to public
|
||||
@@ -1,72 +0,0 @@
|
||||
# Contributing
|
||||
|
||||
Thank you for your interest! Sanic is always looking for contributors. If you
|
||||
don't feel comfortable contributing code, adding docstrings to the source files
|
||||
is very appreciated.
|
||||
|
||||
We are committed to providing a friendly, safe and welcoming environment for all,
|
||||
regardless of gender, sexual orientation, disability, ethnicity, religion,
|
||||
or similar personal characteristic.
|
||||
Our [code of conduct](./CONDUCT.md) sets the standards for behavior.
|
||||
|
||||
## Installation
|
||||
|
||||
To develop on sanic (and mainly to just run the tests) it is highly recommend to
|
||||
install from sources.
|
||||
|
||||
So assume you have already cloned the repo and are in the working directory with
|
||||
a virtual environment already set up, then run:
|
||||
|
||||
```bash
|
||||
python setup.py develop && pip install -r requirements-dev.txt
|
||||
```
|
||||
|
||||
## Running tests
|
||||
|
||||
To run the tests for sanic it is recommended to use tox like so:
|
||||
|
||||
```bash
|
||||
tox
|
||||
```
|
||||
|
||||
See it's that simple!
|
||||
|
||||
## Pull requests!
|
||||
|
||||
So the pull request approval rules are pretty simple:
|
||||
1. All pull requests must pass unit tests.
|
||||
2. All pull requests must be reviewed and approved by at least
|
||||
one current collaborator on the project.
|
||||
3. All pull requests must pass flake8 checks.
|
||||
4. All pull requests must be consistent with the existing code.
|
||||
5. If you decide to remove/change anything from any common interface
|
||||
a deprecation message should accompany it.
|
||||
6. If you implement a new feature you should have at least one unit
|
||||
test to accompany it.
|
||||
7. An example must be one of the following:
|
||||
* Example of how to use Sanic
|
||||
* Example of how to use Sanic extensions
|
||||
* Example of how to use Sanic and asynchronous library
|
||||
|
||||
## Documentation
|
||||
|
||||
Sanic's documentation is built
|
||||
using [sphinx](http://www.sphinx-doc.org/en/1.5.1/). Guides are written in
|
||||
Markdown and can be found in the `docs` folder, while the module reference is
|
||||
automatically generated using `sphinx-apidoc`.
|
||||
|
||||
To generate the documentation from scratch:
|
||||
|
||||
```bash
|
||||
sphinx-apidoc -fo docs/_api/ sanic
|
||||
sphinx-build -b html docs docs/_build
|
||||
```
|
||||
|
||||
The HTML documentation will be created in the `docs/_build` folder.
|
||||
|
||||
## Warning
|
||||
|
||||
One of the main goals of Sanic is speed. Code that lowers the performance of
|
||||
Sanic without significant gains in usability, security, or features may not be
|
||||
merged. Please don't let this intimidate you! If you have any concerns about an
|
||||
idea, open an issue for discussion and help.
|
||||
252
CONTRIBUTING.rst
Normal file
252
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,252 @@
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
Thank you for your interest! Sanic is always looking for contributors. If you
|
||||
don't feel comfortable contributing code, adding docstrings to the source files
|
||||
is very appreciated.
|
||||
|
||||
We are committed to providing a friendly, safe and welcoming environment for all,
|
||||
regardless of gender, sexual orientation, disability, ethnicity, religion,
|
||||
or similar personal characteristic.
|
||||
Our `code of conduct <./CONDUCT.md>`_ sets the standards for behavior.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To develop on sanic (and mainly to just run the tests) it is highly recommend to
|
||||
install from sources.
|
||||
|
||||
So assume you have already cloned the repo and are in the working directory with
|
||||
a virtual environment already set up, then run:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip3 install -e . ".[dev]"
|
||||
|
||||
Dependency Changes
|
||||
------------------
|
||||
|
||||
``Sanic`` doesn't use ``requirements*.txt`` files to manage any kind of dependencies related to it in order to simplify the
|
||||
effort required in managing the dependencies. Please make sure you have read and understood the following section of
|
||||
the document that explains the way ``sanic`` manages dependencies inside the ``setup.py`` file.
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Dependency Type
|
||||
- Usage
|
||||
- Installation
|
||||
* - requirements
|
||||
- Bare minimum dependencies required for sanic to function
|
||||
- ``pip3 install -e .``
|
||||
* - tests_require / extras_require['test']
|
||||
- Dependencies required to run the Unit Tests for ``sanic``
|
||||
- ``pip3 install -e '.[test]'``
|
||||
* - extras_require['dev']
|
||||
- Additional Development requirements to add contributing
|
||||
- ``pip3 install -e '.[dev]'``
|
||||
* - extras_require['docs']
|
||||
- Dependencies required to enable building and enhancing sanic documentation
|
||||
- ``pip3 install -e '.[docs]'``
|
||||
|
||||
|
||||
Running all tests
|
||||
-----------------
|
||||
|
||||
To run the tests for Sanic it is recommended to use tox like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox
|
||||
|
||||
See it's that simple!
|
||||
|
||||
``tox.ini`` contains different environments. Running ``tox`` without any arguments will
|
||||
run all unittests, perform lint and other checks.
|
||||
|
||||
Run unittests
|
||||
-------------
|
||||
|
||||
``tox`` environment -> ``[testenv]`
|
||||
|
||||
To execute only unittests, run ``tox`` with environment like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e py36 -v -- tests/test_config.py
|
||||
# or
|
||||
tox -e py37 -v -- tests/test_config.py
|
||||
|
||||
Run lint checks
|
||||
---------------
|
||||
|
||||
``tox`` environment -> ``[testenv:lint]``
|
||||
|
||||
Permform ``flake8``\ , ``black`` and ``isort`` checks.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e lint
|
||||
|
||||
Run other checks
|
||||
----------------
|
||||
|
||||
``tox`` environment -> ``[testenv:check]``
|
||||
|
||||
Perform other checks.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e check
|
||||
|
||||
Run Static Analysis
|
||||
-------------------
|
||||
|
||||
``tox`` environment -> ``[testenv:security]``
|
||||
|
||||
Perform static analysis security scan
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e security
|
||||
|
||||
Run Documentation sanity check
|
||||
------------------------------
|
||||
|
||||
``tox`` environment -> ``[testenv:docs]``
|
||||
|
||||
Perform sanity check on documentation
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e docs
|
||||
|
||||
|
||||
Code Style
|
||||
----------
|
||||
|
||||
To maintain the code consistency, Sanic uses following tools.
|
||||
|
||||
|
||||
#. `isort <https://github.com/timothycrosley/isort>`_
|
||||
#. `black <https://github.com/python/black>`_
|
||||
#. `flake8 <https://github.com/PyCQA/flake8>`_
|
||||
|
||||
isort
|
||||
*****
|
||||
|
||||
``isort`` sorts Python imports. It divides imports into three
|
||||
categories sorted each in alphabetical order.
|
||||
|
||||
|
||||
#. built-in
|
||||
#. third-party
|
||||
#. project-specific
|
||||
|
||||
black
|
||||
*****
|
||||
|
||||
``black`` is a Python code formatter.
|
||||
|
||||
flake8
|
||||
******
|
||||
|
||||
``flake8`` is a Python style guide that wraps following tools into one.
|
||||
|
||||
|
||||
#. PyFlakes
|
||||
#. pycodestyle
|
||||
#. Ned Batchelder's McCabe script
|
||||
|
||||
``isort``\ , ``black`` and ``flake8`` checks are performed during ``tox`` lint checks.
|
||||
|
||||
Refer `tox <https://tox.readthedocs.io/en/latest/index.html>`_ documentation for more details.
|
||||
|
||||
Pull requests
|
||||
-------------
|
||||
|
||||
So the pull request approval rules are pretty simple:
|
||||
|
||||
#. All pull requests must have a changelog details associated with it.
|
||||
#. All pull requests must pass unit tests.
|
||||
#. All pull requests must be reviewed and approved by at least one current collaborator on the project.
|
||||
#. All pull requests must pass flake8 checks.
|
||||
#. All pull requests must be consistent with the existing code.
|
||||
#. If you decide to remove/change anything from any common interface a deprecation message should accompany it.
|
||||
#. If you implement a new feature you should have at least one unit test to accompany it.
|
||||
#. An example must be one of the following:
|
||||
|
||||
* Example of how to use Sanic
|
||||
* Example of how to use Sanic extensions
|
||||
* Example of how to use Sanic and asynchronous library
|
||||
|
||||
|
||||
Changelog
|
||||
---------
|
||||
|
||||
It is mandatory to add documentation for Change log as part of your Pull request when you fix/contribute something
|
||||
to the ``sanic`` community. This will enable us in generating better and well defined change logs during the
|
||||
release which can aid community users in a great way.
|
||||
|
||||
.. note::
|
||||
|
||||
Single line explaining the details of the PR in brief
|
||||
|
||||
Detailed description of what the PR is about and what changes or enhancements are being done.
|
||||
No need to include examples or any other details here. But it is important that you provide
|
||||
enough context here to let user understand what this change is all about and why it is being
|
||||
introduced into the ``sanic`` codebase.
|
||||
|
||||
Make sure you leave an line space after the first line to make sure the document rendering is clean
|
||||
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Contribution Type
|
||||
- Changelog file name format
|
||||
- Changelog file location
|
||||
* - Features
|
||||
- <git_issue>.feature.rst
|
||||
- ``changelogs``
|
||||
* - Bugfixes
|
||||
- <git_issue>.bugfix.rst
|
||||
- ``changelogs``
|
||||
* - Improved Documentation
|
||||
- <git_issue>.doc.rst
|
||||
- ``changelogs``
|
||||
* - Deprecations and Removals
|
||||
- <git_issue>.removal.rst
|
||||
- ``changelogs``
|
||||
* - Miscellaneous internal changes
|
||||
- <git_issue>.misc.rst
|
||||
- ``changelogs``
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Sanic's documentation is built
|
||||
using `sphinx <http://www.sphinx-doc.org/en/1.5.1/>`_. Guides are written in
|
||||
Markdown and can be found in the ``docs`` folder, while the module reference is
|
||||
automatically generated using ``sphinx-apidoc``.
|
||||
|
||||
To generate the documentation from scratch:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sphinx-apidoc -fo docs/_api/ sanic
|
||||
sphinx-build -b html docs docs/_build
|
||||
|
||||
# There is a simple make command provided to ease the work required in generating
|
||||
# the documentation
|
||||
make docs
|
||||
|
||||
The HTML documentation will be created in the ``docs/_build`` folder.
|
||||
|
||||
.. warning::
|
||||
One of the main goals of Sanic is speed. Code that lowers the performance of
|
||||
Sanic without significant gains in usability, security, or features may not be
|
||||
merged. Please don't let this intimidate you! If you have any concerns about an
|
||||
idea, open an issue for discussion and help.
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016-present Channel Cat
|
||||
Copyright (c) 2016-present Sanic Community
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
18
MANIFEST.in
18
MANIFEST.in
@@ -1,7 +1,15 @@
|
||||
include README.rst
|
||||
include MANIFEST.in
|
||||
# Non Code related contents
|
||||
include LICENSE
|
||||
include setup.py
|
||||
include README.rst
|
||||
include pyproject.toml
|
||||
|
||||
recursive-exclude * __pycache__
|
||||
recursive-exclude * *.py[co]
|
||||
# Setup
|
||||
include setup.py
|
||||
include Makefile
|
||||
|
||||
# Tests
|
||||
include .coveragerc
|
||||
graft tests
|
||||
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[co]
|
||||
95
Makefile
95
Makefile
@@ -1,4 +1,95 @@
|
||||
test:
|
||||
find . -name "*.pyc" -delete
|
||||
.PHONY: help test test-coverage install docker-test black fix-import beautify
|
||||
|
||||
.DEFAULT: help
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo "test"
|
||||
@echo " Run Sanic Unit Tests"
|
||||
@echo "test-coverage"
|
||||
@echo " Run Sanic Unit Tests with Coverage"
|
||||
@echo "install"
|
||||
@echo " Install Sanic"
|
||||
@echo "docker-test"
|
||||
@echo " Run Sanic Unit Tests using Docker"
|
||||
@echo "black"
|
||||
@echo " Analyze and fix linting issues using Black"
|
||||
@echo "fix-import"
|
||||
@echo " Analyze and fix import order using isort"
|
||||
@echo "beautify [sort_imports=1] [include_tests=1]"
|
||||
@echo " Analyze and fix linting issue using black and optionally fix import sort using isort"
|
||||
@echo ""
|
||||
@echo "docs"
|
||||
@echo " Generate Sanic documentation"
|
||||
@echo ""
|
||||
@echo "clean-docs"
|
||||
@echo " Clean Sanic documentation"
|
||||
@echo ""
|
||||
@echo "docs-test"
|
||||
@echo " Test Sanic Documentation for errors"
|
||||
@echo ""
|
||||
@echo "changelog"
|
||||
@echo " Generate changelog for Sanic to prepare for new release"
|
||||
@echo ""
|
||||
@echo "release"
|
||||
@echo " Prepare Sanic for a new changes by version bump and changelog"
|
||||
@echo ""
|
||||
|
||||
|
||||
clean:
|
||||
find . ! -path "./.eggs/*" -name "*.pyc" -exec rm {} \;
|
||||
find . ! -path "./.eggs/*" -name "*.pyo" -exec rm {} \;
|
||||
find . ! -path "./.eggs/*" -name ".coverage" -exec rm {} \;
|
||||
rm -rf build/* > /dev/null 2>&1
|
||||
rm -rf dist/* > /dev/null 2>&1
|
||||
|
||||
test: clean
|
||||
python setup.py test
|
||||
|
||||
test-coverage: clean
|
||||
python setup.py test --pytest-args="--cov sanic --cov-report term --cov-append "
|
||||
|
||||
install:
|
||||
python setup.py install
|
||||
|
||||
docker-test: clean
|
||||
docker build -t sanic/test-image -f docker/Dockerfile .
|
||||
docker run -t sanic/test-image tox
|
||||
|
||||
beautify: black
|
||||
ifdef sort_imports
|
||||
ifdef include_tests
|
||||
$(warning It is suggested that you do not run sort import on tests)
|
||||
isort -rc sanic tests
|
||||
else
|
||||
$(info Sorting Imports)
|
||||
isort -rc sanic tests
|
||||
endif
|
||||
endif
|
||||
|
||||
black:
|
||||
black --config ./.black.toml sanic tests
|
||||
|
||||
fix-import: black
|
||||
isort -rc sanic tests
|
||||
|
||||
|
||||
docs-clean:
|
||||
cd docs && make clean
|
||||
|
||||
docs: docs-clean
|
||||
cd docs && make html
|
||||
|
||||
docs-test: docs-clean
|
||||
cd docs && make dummy
|
||||
|
||||
changelog:
|
||||
python scripts/changelog.py
|
||||
|
||||
release:
|
||||
ifdef version
|
||||
python scripts/release.py --release-version ${version} --generate-changelog
|
||||
else
|
||||
python scripts/release.py --generate-changelog
|
||||
endif
|
||||
|
||||
|
||||
171
README.rst
171
README.rst
@@ -1,15 +1,88 @@
|
||||
Sanic
|
||||
=====
|
||||
.. image:: https://raw.githubusercontent.com/huge-success/sanic-assets/master/png/sanic-framework-logo-400x97.png
|
||||
:alt: Sanic | Build fast. Run fast.
|
||||
|
||||
|Join the chat at https://gitter.im/sanic-python/Lobby| |Build Status| |PyPI| |PyPI version|
|
||||
Sanic | Build fast. Run fast.
|
||||
=============================
|
||||
|
||||
Sanic is a Flask-like Python 3.5+ web server that's written to go fast. It's based on the work done by the amazing folks at magicstack, and was inspired by `this article <https://magic.io/blog/uvloop-blazing-fast-python-networking/>`_.
|
||||
.. start-badges
|
||||
|
||||
On top of being Flask-like, Sanic supports async request handlers. This means you can use the new shiny async/await syntax from Python 3.5, making your code non-blocking and speedy.
|
||||
.. list-table::
|
||||
:stub-columns: 1
|
||||
|
||||
Sanic is developed `on GitHub <https://github.com/channelcat/sanic/>`_. Contributions are welcome!
|
||||
* - Build
|
||||
- | |Build Status| |AppVeyor Build Status| |Codecov|
|
||||
* - Docs
|
||||
- |Documentation|
|
||||
* - Package
|
||||
- | |PyPI| |PyPI version| |Wheel| |Supported implementations| |Code style black|
|
||||
* - Support
|
||||
- | |Forums| |Join the chat at https://gitter.im/sanic-python/Lobby| |Awesome|
|
||||
* - Stats
|
||||
- | |Downloads| |Conda downloads|
|
||||
|
||||
.. |Forums| image:: https://img.shields.io/badge/forums-community-ff0068.svg
|
||||
:target: https://community.sanicframework.org/
|
||||
.. |Join the chat at https://gitter.im/sanic-python/Lobby| image:: https://badges.gitter.im/sanic-python/Lobby.svg
|
||||
:target: https://gitter.im/sanic-python/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||
.. |Codecov| image:: https://codecov.io/gh/huge-success/sanic/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/huge-success/sanic
|
||||
.. |Build Status| image:: https://travis-ci.org/huge-success/sanic.svg?branch=master
|
||||
:target: https://travis-ci.org/huge-success/sanic
|
||||
.. |AppVeyor Build Status| image:: https://ci.appveyor.com/api/projects/status/d8pt3ids0ynexi8c/branch/master?svg=true
|
||||
:target: https://ci.appveyor.com/project/huge-success/sanic
|
||||
.. |Documentation| image:: https://readthedocs.org/projects/sanic/badge/?version=latest
|
||||
:target: http://sanic.readthedocs.io/en/latest/?badge=latest
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/sanic.svg
|
||||
:target: https://pypi.python.org/pypi/sanic/
|
||||
.. |PyPI version| image:: https://img.shields.io/pypi/pyversions/sanic.svg
|
||||
:target: https://pypi.python.org/pypi/sanic/
|
||||
.. |Code style black| image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/ambv/black
|
||||
.. |Wheel| image:: https://img.shields.io/pypi/wheel/sanic.svg
|
||||
:alt: PyPI Wheel
|
||||
:target: https://pypi.python.org/pypi/sanic
|
||||
.. |Supported implementations| image:: https://img.shields.io/pypi/implementation/sanic.svg
|
||||
:alt: Supported implementations
|
||||
:target: https://pypi.python.org/pypi/sanic
|
||||
.. |Awesome| image:: https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg
|
||||
:alt: Awesome Sanic List
|
||||
:target: https://github.com/mekicha/awesome-sanic
|
||||
.. |Downloads| image:: https://pepy.tech/badge/sanic/month
|
||||
:alt: Downloads
|
||||
:target: https://pepy.tech/project/sanic
|
||||
.. |Conda downloads| image:: https://img.shields.io/conda/dn/conda-forge/sanic.svg
|
||||
:alt: Downloads
|
||||
:target: https://anaconda.org/conda-forge/sanic
|
||||
|
||||
.. end-badges
|
||||
|
||||
Sanic is a **Python 3.6+** web server and web framework that's written to go fast. It allows the usage of the ``async/await`` syntax added in Python 3.5, which makes your code non-blocking and speedy.
|
||||
|
||||
`Source code on GitHub <https://github.com/huge-success/sanic/>`_ | `Help and discussion board <https://community.sanicframework.org/>`_.
|
||||
|
||||
The project is maintained by the community, for the community. **Contributions are welcome!**
|
||||
|
||||
The goal of the project is to provide a simple way to get up and running a highly performant HTTP server that is easy to build, to expand, and ultimately to scale.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``pip3 install sanic``
|
||||
|
||||
Sanic makes use of ``uvloop`` and ``ujson`` to help with performance. If you do not want to use those packages, simply add an environmental variable ``SANIC_NO_UVLOOP=true`` or ``SANIC_NO_UJSON=true`` at install time.
|
||||
|
||||
.. code:: shell
|
||||
|
||||
$ export SANIC_NO_UVLOOP=true
|
||||
$ export SANIC_NO_UJSON=true
|
||||
$ pip3 install --no-binary :all: sanic
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to
|
||||
use ``sanic`` with ``ujson`` dependency.
|
||||
|
||||
If you have a project that utilizes Sanic make sure to comment on the `issue <https://github.com/channelcat/sanic/issues/396>`_ that we use to track those projects!
|
||||
|
||||
Hello World Example
|
||||
-------------------
|
||||
@@ -27,17 +100,27 @@ Hello World Example
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8000)
|
||||
|
||||
Sanic can now be easily run using ``python3 hello.py``.
|
||||
|
||||
Installation
|
||||
------------
|
||||
.. code::
|
||||
|
||||
- ``pip install sanic``
|
||||
[2018-12-30 11:37:41 +0200] [13564] [INFO] Goin' Fast @ http://0.0.0.0:8000
|
||||
[2018-12-30 11:37:41 +0200] [13564] [INFO] Starting worker [13564]
|
||||
|
||||
To install sanic without uvloop or ujson using bash, you can provide either or both of these environmental variables
|
||||
using any truthy string like `'y', 'yes', 't', 'true', 'on', '1'` and setting the NO_X to true will stop that features
|
||||
installation.
|
||||
And, we can verify it is working: ``curl localhost:8000 -i``
|
||||
|
||||
- ``SANIC_NO_UVLOOP=true SANIC_NO_UJSON=true pip install sanic``
|
||||
.. code::
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Connection: keep-alive
|
||||
Keep-Alive: 5
|
||||
Content-Length: 17
|
||||
Content-Type: application/json
|
||||
|
||||
{"hello":"world"}
|
||||
|
||||
**Now, let's go build something fast!**
|
||||
|
||||
|
||||
Documentation
|
||||
@@ -45,56 +128,18 @@ Documentation
|
||||
|
||||
`Documentation on Readthedocs <http://sanic.readthedocs.io/>`_.
|
||||
|
||||
.. |Join the chat at https://gitter.im/sanic-python/Lobby| image:: https://badges.gitter.im/sanic-python/Lobby.svg
|
||||
:target: https://gitter.im/sanic-python/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||
.. |Build Status| image:: https://travis-ci.org/channelcat/sanic.svg?branch=master
|
||||
:target: https://travis-ci.org/channelcat/sanic
|
||||
.. |Documentation| image:: https://readthedocs.org/projects/sanic/badge/?version=latest
|
||||
:target: http://sanic.readthedocs.io/en/latest/?badge=latest
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/sanic.svg
|
||||
:target: https://pypi.python.org/pypi/sanic/
|
||||
.. |PyPI version| image:: https://img.shields.io/pypi/pyversions/sanic.svg
|
||||
:target: https://pypi.python.org/pypi/sanic/
|
||||
Changelog
|
||||
---------
|
||||
|
||||
`Release Changelogs <https://github.com/huge-success/sanic/blob/master/CHANGELOG.rst>`_.
|
||||
|
||||
|
||||
Questions and Discussion
|
||||
------------------------
|
||||
|
||||
Examples
|
||||
--------
|
||||
`Non-Core examples <https://github.com/channelcat/sanic/wiki/Examples/>`_. Examples of plugins and Sanic that are outside the scope of Sanic core.
|
||||
`Ask a question or join the conversation <https://community.sanicframework.org/>`_.
|
||||
|
||||
`Extensions <https://github.com/channelcat/sanic/wiki/Extensions/>`_. Sanic extensions created by the community.
|
||||
Contribution
|
||||
------------
|
||||
|
||||
`Projects <https://github.com/channelcat/sanic/wiki/Projects/>`_. Sanic in production use.
|
||||
|
||||
|
||||
TODO
|
||||
----
|
||||
* http2
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
* No wheels for uvloop and httptools on Windows :(
|
||||
|
||||
Final Thoughts
|
||||
--------------
|
||||
|
||||
::
|
||||
|
||||
▄▄▄▄▄
|
||||
▀▀▀██████▄▄▄ _______________
|
||||
▄▄▄▄▄ █████████▄ / \
|
||||
▀▀▀▀█████▌ ▀▐▄ ▀▐█ | Gotta go fast! |
|
||||
▀▀█████▄▄ ▀██████▄██ | _________________/
|
||||
▀▄▄▄▄▄ ▀▀█▄▀█════█▀ |/
|
||||
▀▀▀▄ ▀▀███ ▀ ▄▄
|
||||
▄███▀▀██▄████████▄ ▄▀▀▀▀▀▀█▌
|
||||
██▀▄▄▄██▀▄███▀ ▀▀████ ▄██
|
||||
▄▀▀▀▄██▄▀▀▌████▒▒▒▒▒▒███ ▌▄▄▀
|
||||
▌ ▐▀████▐███▒▒▒▒▒▐██▌
|
||||
▀▄▄▄▄▀ ▀▀████▒▒▒▒▄██▀
|
||||
▀▀█████████▀
|
||||
▄▄██▀██████▀█
|
||||
▄██▀ ▀▀▀ █
|
||||
▄█ ▐▌
|
||||
▄▄▄▄█▌ ▀█▄▄▄▄▀▀▄
|
||||
▌ ▐ ▀▀▄▄▄▀
|
||||
▀▀▄▄▀
|
||||
We are always happy to have new contributions. We have `marked issues good for anyone looking to get started <https://github.com/huge-success/sanic/issues?q=is%3Aopen+is%3Aissue+label%3Abeginner>`_, and welcome `questions on the forums <https://community.sanicframework.org/>`_. Please take a look at our `Contribution guidelines <https://sanic.readthedocs.io/en/latest/sanic/contributing.html>`_.
|
||||
|
||||
25
SECURITY.md
Normal file
25
SECURITY.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Sanic releases long term support release once a year in December. LTS releases receive bug and security updates for **24 months**. Interim releases throughout the year occur every three months, and are supported until the subsequent interim release.
|
||||
|
||||
| Version | LTS | Supported |
|
||||
| ------- | ------------------ | ------------------ |
|
||||
| 19.6.0 | | :white_check_mark: |
|
||||
| 19.3.1 | | :heavy_check_mark: |
|
||||
| 18.12.0 | :heavy_check_mark: | :heavy_check_mark: |
|
||||
| 0.8.3 | | :x: |
|
||||
| 0.7.0 | | :x: |
|
||||
| 0.6.0 | | :x: |
|
||||
| 0.5.4 | | :x: |
|
||||
| 0.4.1 | | :x: |
|
||||
| 0.3.1 | | :x: |
|
||||
| 0.2.0 | | :x: |
|
||||
| 0.1.9 | | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a security vulnerability, we ask that you **do not** create an issue on GitHub. Instead, please [send a message to the core-devs](https://community.sanicframework.org/g/core-devs) on the community forums. Once logged in, you can send a message to the core-devs by clicking the message button.
|
||||
|
||||
This will help to not publicize the issue until the team can address it and resolve it.
|
||||
2
changelogs/.gitignore
vendored
Normal file
2
changelogs/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Except this file
|
||||
!.gitignore
|
||||
0
docs/_static/.gitkeep
vendored
Normal file
0
docs/_static/.gitkeep
vendored
Normal file
11
docs/conf.py
11
docs/conf.py
@@ -10,10 +10,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add support for Markdown documentation using Recommonmark
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
# Add support for auto-doc
|
||||
import recommonmark
|
||||
from recommonmark.transform import AutoStructify
|
||||
|
||||
# Ensure that sanic is present in the path, to allow sphinx-apidoc to
|
||||
@@ -25,12 +23,11 @@ import sanic
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.asyncio']
|
||||
extensions = ['sphinx.ext.autodoc', "recommonmark"]
|
||||
|
||||
templates_path = ['_templates']
|
||||
|
||||
# Enable support for both Restructured Text and Markdown
|
||||
source_parsers = {'.md': CommonMarkParser}
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
# The master toctree document.
|
||||
@@ -38,7 +35,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'Sanic'
|
||||
copyright = '2016, Sanic contributors'
|
||||
copyright = '2018, Sanic contributors'
|
||||
author = 'Sanic contributors'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -149,6 +146,6 @@ suppress_warnings = ['image.nonlocal_uri']
|
||||
def setup(app):
|
||||
app.add_config_value('recommonmark_config', {
|
||||
'enable_eval_rst': True,
|
||||
'enable_auto_doc_ref': True,
|
||||
'enable_auto_doc_ref': False,
|
||||
}, True)
|
||||
app.add_transform(AutoStructify)
|
||||
|
||||
@@ -7,27 +7,33 @@ Guides
|
||||
:maxdepth: 2
|
||||
|
||||
sanic/getting_started
|
||||
sanic/routing
|
||||
sanic/config
|
||||
sanic/logging
|
||||
sanic/request_data
|
||||
sanic/response
|
||||
sanic/cookies
|
||||
sanic/routing
|
||||
sanic/blueprints
|
||||
sanic/static_files
|
||||
sanic/versioning
|
||||
sanic/exceptions
|
||||
sanic/middleware
|
||||
sanic/blueprints
|
||||
sanic/websocket
|
||||
sanic/config
|
||||
sanic/cookies
|
||||
sanic/decorators
|
||||
sanic/streaming
|
||||
sanic/class_based_views
|
||||
sanic/custom_protocol
|
||||
sanic/sockets
|
||||
sanic/ssl
|
||||
sanic/logging
|
||||
sanic/debug_mode
|
||||
sanic/testing
|
||||
sanic/deploying
|
||||
sanic/extensions
|
||||
sanic/examples
|
||||
sanic/changelog
|
||||
sanic/contributing
|
||||
sanic/api_reference
|
||||
sanic/asyncio_python37
|
||||
|
||||
|
||||
Module Documentation
|
||||
|
||||
@@ -20,6 +20,15 @@ sanic.blueprints module
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
sanic.blueprint_group module
|
||||
----------------------------
|
||||
|
||||
.. automodule:: sanic.blueprint_group
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
sanic.config module
|
||||
-------------------
|
||||
|
||||
|
||||
58
docs/sanic/asyncio_python37.rst
Normal file
58
docs/sanic/asyncio_python37.rst
Normal file
@@ -0,0 +1,58 @@
|
||||
Python 3.7 AsyncIO examples
|
||||
###########################
|
||||
|
||||
With Python 3.7 AsyncIO got major update for the following types:
|
||||
|
||||
- asyncio.AbstractEventLoop
|
||||
- asyncio.AbstractServer
|
||||
|
||||
|
||||
This example shows how to use sanic with Python 3.7, to be precise: how to retrieve an asyncio server instance:
|
||||
|
||||
.. code:: python
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
import os
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
return json({"hello": "world"})
|
||||
|
||||
|
||||
server_socket = '/tmp/sanic.sock'
|
||||
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
|
||||
try:
|
||||
os.remove(server_socket)
|
||||
finally:
|
||||
sock.bind(server_socket)
|
||||
|
||||
if __name__ == "__main__":
|
||||
loop = asyncio.get_event_loop()
|
||||
srv_coro = app.create_server(
|
||||
sock=sock,
|
||||
return_asyncio_server=True,
|
||||
asyncio_server_kwargs=dict(
|
||||
start_serving=False
|
||||
)
|
||||
)
|
||||
srv = loop.run_until_complete(srv_coro)
|
||||
try:
|
||||
assert srv.is_serving() is False
|
||||
loop.run_until_complete(srv.start_serving())
|
||||
assert srv.is_serving() is True
|
||||
loop.run_until_complete(srv.serve_forever())
|
||||
except KeyboardInterrupt:
|
||||
srv.close()
|
||||
loop.close()
|
||||
|
||||
|
||||
Please note that uvloop does not support these features yet.
|
||||
@@ -48,7 +48,7 @@ by that blueprint. In this example, the registered routes in the `app.router`
|
||||
will look like:
|
||||
|
||||
```python
|
||||
[Route(handler=<function bp_root at 0x7f908382f9d8>, methods=None, pattern=re.compile('^/$'), parameters=[])]
|
||||
[Route(handler=<function bp_root at 0x7f908382f9d8>, methods=frozenset({'GET'}), pattern=re.compile('^/$'), parameters=[], name='my_blueprint.bp_root', uri='/')]
|
||||
```
|
||||
|
||||
## Blueprint groups and nesting
|
||||
@@ -87,7 +87,7 @@ from sanic import Blueprint
|
||||
from .static import static
|
||||
from .authors import authors
|
||||
|
||||
content = Blueprint.group(assets, authors, url_prefix='/content')
|
||||
content = Blueprint.group(static, authors, url_prefix='/content')
|
||||
```
|
||||
```python
|
||||
# api/info.py
|
||||
@@ -118,16 +118,16 @@ app = Sanic(__name__)
|
||||
app.blueprint(api)
|
||||
```
|
||||
|
||||
## Using blueprints
|
||||
## Using Blueprints
|
||||
|
||||
Blueprints have much the same functionality as an application instance.
|
||||
Blueprints have almost the same functionality as an application instance.
|
||||
|
||||
### WebSocket routes
|
||||
|
||||
WebSocket handlers can be registered on a blueprint using the `@bp.websocket`
|
||||
decorator or `bp.add_websocket_route` method.
|
||||
|
||||
### Middleware
|
||||
### Blueprint Middleware
|
||||
|
||||
Using blueprints allows you to also register middleware globally.
|
||||
|
||||
@@ -145,6 +145,36 @@ async def halt_response(request, response):
|
||||
return text('I halted the response')
|
||||
```
|
||||
|
||||
### Blueprint Group Middleware
|
||||
Using this middleware will ensure that you can apply a common middleware to all the blueprints that form the
|
||||
current blueprint group under consideration.
|
||||
|
||||
```python
|
||||
bp1 = Blueprint('bp1', url_prefix='/bp1')
|
||||
bp2 = Blueprint('bp2', url_prefix='/bp2')
|
||||
|
||||
@bp1.middleware('request')
|
||||
async def bp1_only_middleware(request):
|
||||
print('applied on Blueprint : bp1 Only')
|
||||
|
||||
@bp1.route('/')
|
||||
async def bp1_route(request):
|
||||
return text('bp1')
|
||||
|
||||
@bp2.route('/<param>')
|
||||
async def bp2_route(request, param):
|
||||
return text(param)
|
||||
|
||||
group = Blueprint.group(bp1, bp2)
|
||||
|
||||
@group.middleware('request')
|
||||
async def group_middleware(request):
|
||||
print('common middleware applied for both bp1 and bp2')
|
||||
|
||||
# Register Blueprint group under the app
|
||||
app.blueprint(group)
|
||||
```
|
||||
|
||||
### Exceptions
|
||||
|
||||
Exceptions can be applied exclusively to blueprints globally.
|
||||
@@ -201,7 +231,7 @@ async def close_connection(app, loop):
|
||||
Blueprints can be very useful for API versioning, where one blueprint may point
|
||||
at `/v1/<routes>`, and another pointing at `/v2/<routes>`.
|
||||
|
||||
When a blueprint is initialised, it can take an optional `url_prefix` argument,
|
||||
When a blueprint is initialised, it can take an optional `version` argument,
|
||||
which will be prepended to all routes defined on the blueprint. This feature
|
||||
can be used to implement our API versioning scheme.
|
||||
|
||||
@@ -210,8 +240,8 @@ can be used to implement our API versioning scheme.
|
||||
from sanic.response import text
|
||||
from sanic import Blueprint
|
||||
|
||||
blueprint_v1 = Blueprint('v1', url_prefix='/v1')
|
||||
blueprint_v2 = Blueprint('v2', url_prefix='/v2')
|
||||
blueprint_v1 = Blueprint('v1', url_prefix='/api', version="v1")
|
||||
blueprint_v2 = Blueprint('v2', url_prefix='/api', version="v2")
|
||||
|
||||
@blueprint_v1.route('/')
|
||||
async def api_v1_root(request):
|
||||
@@ -222,7 +252,7 @@ async def api_v2_root(request):
|
||||
return text('Welcome to version 2 of our documentation')
|
||||
```
|
||||
|
||||
When we register our blueprints on the app, the routes `/v1` and `/v2` will now
|
||||
When we register our blueprints on the app, the routes `/v1/api` and `/v2/api` will now
|
||||
point to the individual blueprints, which allows the creation of *sub-sites*
|
||||
for each API version.
|
||||
|
||||
@@ -232,8 +262,8 @@ from sanic import Sanic
|
||||
from blueprints import blueprint_v1, blueprint_v2
|
||||
|
||||
app = Sanic(__name__)
|
||||
app.blueprint(blueprint_v1, url_prefix='/v1')
|
||||
app.blueprint(blueprint_v2, url_prefix='/v2')
|
||||
app.blueprint(blueprint_v1)
|
||||
app.blueprint(blueprint_v2)
|
||||
|
||||
app.run(host='0.0.0.0', port=8000, debug=True)
|
||||
```
|
||||
@@ -246,7 +276,7 @@ takes the format `<blueprint_name>.<handler_name>`. For example:
|
||||
```python
|
||||
@blueprint_v1.route('/')
|
||||
async def root(request):
|
||||
url = request.app.url_for('v1.post_handler', post_id=5) # --> '/v1/post/5'
|
||||
url = request.app.url_for('v1.post_handler', post_id=5) # --> '/v1/api/post/5'
|
||||
return redirect(url)
|
||||
|
||||
|
||||
@@ -254,5 +284,3 @@ async def root(request):
|
||||
async def post_handler(request, post_id):
|
||||
return text('Post {} in Blueprint V1'.format(post_id))
|
||||
```
|
||||
|
||||
|
||||
|
||||
4
docs/sanic/changelog.rst
Normal file
4
docs/sanic/changelog.rst
Normal file
@@ -0,0 +1,4 @@
|
||||
Changelog
|
||||
---------
|
||||
|
||||
.. include:: ../../CHANGELOG.rst
|
||||
@@ -53,6 +53,13 @@ import myapp.default_settings
|
||||
app = Sanic('myapp')
|
||||
app.config.from_object(myapp.default_settings)
|
||||
```
|
||||
or also by path to config:
|
||||
|
||||
```
|
||||
app = Sanic('myapp')
|
||||
app.config.from_object('config.path.config.Class')
|
||||
```
|
||||
|
||||
|
||||
You could use a class or any other object as well.
|
||||
|
||||
@@ -85,27 +92,56 @@ DB_USER = 'appuser'
|
||||
|
||||
Out of the box there are just a few predefined values which can be overwritten when creating the application.
|
||||
|
||||
| Variable | Default | Description |
|
||||
| ------------------ | --------- | --------------------------------------------- |
|
||||
| REQUEST_MAX_SIZE | 100000000 | How big a request may be (bytes) |
|
||||
| REQUEST_TIMEOUT | 60 | How long a request can take to arrive (sec) |
|
||||
| RESPONSE_TIMEOUT | 60 | How long a response can take to process (sec) |
|
||||
| KEEP_ALIVE | True | Disables keep-alive when False |
|
||||
| KEEP_ALIVE_TIMEOUT | 5 | How long to hold a TCP connection open (sec) |
|
||||
| Variable | Default | Description |
|
||||
| ------------------------- | ----------------- | --------------------------------------------------------------------------- |
|
||||
| REQUEST_MAX_SIZE | 100000000 | How big a request may be (bytes) |
|
||||
| REQUEST_BUFFER_QUEUE_SIZE | 100 | Request streaming buffer queue size |
|
||||
| REQUEST_TIMEOUT | 60 | How long a request can take to arrive (sec) |
|
||||
| RESPONSE_TIMEOUT | 60 | How long a response can take to process (sec) |
|
||||
| KEEP_ALIVE | True | Disables keep-alive when False |
|
||||
| KEEP_ALIVE_TIMEOUT | 5 | How long to hold a TCP connection open (sec) |
|
||||
| GRACEFUL_SHUTDOWN_TIMEOUT | 15.0 | How long to wait to force close non-idle connection (sec) |
|
||||
| ACCESS_LOG | True | Disable or enable access log |
|
||||
| PROXIES_COUNT | -1 | The number of proxy servers in front of the app (e.g. nginx; see below) |
|
||||
| FORWARDED_FOR_HEADER | "X-Forwarded-For" | The name of "X-Forwarded-For" HTTP header that contains client and proxy ip |
|
||||
| REAL_IP_HEADER | "X-Real-IP" | The name of "X-Real-IP" HTTP header that contains real client ip |
|
||||
|
||||
### The different Timeout variables:
|
||||
|
||||
A request timeout measures the duration of time between the instant when a new open TCP connection is passed to the Sanic backend server, and the instant when the whole HTTP request is received. If the time taken exceeds the `REQUEST_TIMEOUT` value (in seconds), this is considered a Client Error so Sanic generates a HTTP 408 response and sends that to the client. Adjust this value higher if your clients routinely pass very large request payloads or upload requests very slowly.
|
||||
#### `REQUEST_TIMEOUT`
|
||||
|
||||
A response timeout measures the duration of time between the instant the Sanic server passes the HTTP request to the Sanic App, and the instant a HTTP response is sent to the client. If the time taken exceeds the `RESPONSE_TIMEOUT` value (in seconds), this is considered a Server Error so Sanic generates a HTTP 503 response and sets that to the client. Adjust this value higher if your application is likely to have long-running process that delay the generation of a response.
|
||||
A request timeout measures the duration of time between the instant when a new open TCP connection is passed to the
|
||||
Sanic backend server, and the instant when the whole HTTP request is received. If the time taken exceeds the
|
||||
`REQUEST_TIMEOUT` value (in seconds), this is considered a Client Error so Sanic generates an `HTTP 408` response
|
||||
and sends that to the client. Set this parameter's value higher if your clients routinely pass very large request payloads
|
||||
or upload requests very slowly.
|
||||
|
||||
### What is Keep Alive? And what does the Keep Alive Timeout value do?
|
||||
#### `RESPONSE_TIMEOUT`
|
||||
|
||||
Keep-Alive is a HTTP feature indroduced in HTTP 1.1. When sending a HTTP request, the client (usually a web browser application) can set a Keep-Alive header to indicate for the http server (Sanic) to not close the TCP connection after it has send the response. This allows the client to reuse the existing TCP connection to send subsequent HTTP requests, and ensures more efficient network traffic for both the client and the server.
|
||||
A response timeout measures the duration of time between the instant the Sanic server passes the HTTP request to the
|
||||
Sanic App, and the instant a HTTP response is sent to the client. If the time taken exceeds the `RESPONSE_TIMEOUT`
|
||||
value (in seconds), this is considered a Server Error so Sanic generates an `HTTP 503` response and sends that to the
|
||||
client. Set this parameter's value higher if your application is likely to have long-running process that delay the
|
||||
generation of a response.
|
||||
|
||||
The `KEEP_ALIVE` config variable is set to `True` in Sanic by default. If you don't need this feature in your application, set it to `False` to cause all client connections to close immediately after a response is sent, regardless of the Keep-Alive header on the request.
|
||||
#### `KEEP_ALIVE_TIMEOUT`
|
||||
|
||||
The amount of time the server holds the TCP connection open is decided by the server itself. In Sanic, that value is configured using the `KEEP_ALIVE_TIMEOUT` value. By default, it is set to 5 seconds, this is the same default setting as the Apache HTTP server and is a good balance between allowing enough time for the client to send a new request, and not holding open too many connections at once. Do not exceed 75 seconds unless you know your clients are using a browser which supports TCP connections held open for that long.
|
||||
##### What is Keep Alive? And what does the Keep Alive Timeout value do?
|
||||
|
||||
`Keep-Alive` is a HTTP feature introduced in `HTTP 1.1`. When sending a HTTP request, the client (usually a web browser application)
|
||||
can set a `Keep-Alive` header to indicate the http server (Sanic) to not close the TCP connection after it has send the response.
|
||||
This allows the client to reuse the existing TCP connection to send subsequent HTTP requests, and ensures more efficient
|
||||
network traffic for both the client and the server.
|
||||
|
||||
The `KEEP_ALIVE` config variable is set to `True` in Sanic by default. If you don't need this feature in your application,
|
||||
set it to `False` to cause all client connections to close immediately after a response is sent, regardless of
|
||||
the `Keep-Alive` header on the request.
|
||||
|
||||
The amount of time the server holds the TCP connection open is decided by the server itself.
|
||||
In Sanic, that value is configured using the `KEEP_ALIVE_TIMEOUT` value. By default, it is set to 5 seconds.
|
||||
This is the same default setting as the Apache HTTP server and is a good balance between allowing enough time for
|
||||
the client to send a new request, and not holding open too many connections at once. Do not exceed 75 seconds unless
|
||||
you know your clients are using a browser which supports TCP connections held open for that long.
|
||||
|
||||
For reference:
|
||||
```
|
||||
@@ -117,3 +153,59 @@ Firefox client hard keepalive limit = 115 seconds
|
||||
Opera 11 client hard keepalive limit = 120 seconds
|
||||
Chrome 13+ client keepalive limit > 300+ seconds
|
||||
```
|
||||
|
||||
### Proxy configuration
|
||||
|
||||
When you use a reverse proxy server (e.g. nginx), the value of `request.ip` will contain ip of a proxy, typically `127.0.0.1`. Sanic may be configured to use proxy headers for determining the true client IP, available as `request.remote_addr`. The full external URL is also constructed from header fields if available.
|
||||
|
||||
Without proper precautions, a malicious client may use proxy headers to spoof its own IP. To avoid such issues, Sanic does not use any proxy headers unless explicitly enabled.
|
||||
|
||||
Services behind reverse proxies must configure `FORWARDED_SECRET`, `REAL_IP_HEADER` and/or `PROXIES_COUNT`.
|
||||
|
||||
#### Forwarded header
|
||||
|
||||
```
|
||||
Forwarded: for="1.2.3.4"; proto="https"; host="yoursite.com"; secret="Pr0xy",
|
||||
for="10.0.0.1"; proto="http"; host="proxy.internal"; by="_1234proxy"
|
||||
```
|
||||
|
||||
* Set `FORWARDED_SECRET` to an identifier used by the proxy of interest.
|
||||
|
||||
The secret is used to securely identify a specific proxy server. Given the above header, secret `Pr0xy` would use the information on the first line and secret `_1234proxy` would use the second line. The secret must exactly match the value of `secret` or `by`. A secret in `by` must begin with an underscore and use only characters specified in [RFC 7239 section 6.3](https://tools.ietf.org/html/rfc7239#section-6.3), while `secret` has no such restrictions.
|
||||
|
||||
Sanic ignores any elements without the secret key, and will not even parse the header if no secret is set.
|
||||
|
||||
All other proxy headers are ignored once a trusted forwarded element is found, as it already carries complete information about the client.
|
||||
|
||||
#### Traditional proxy headers
|
||||
|
||||
```
|
||||
X-Real-IP: 1.2.3.4
|
||||
X-Forwarded-For: 1.2.3.4, 10.0.0.1
|
||||
X-Forwarded-Proto: https
|
||||
X-Forwarded-Host: yoursite.com
|
||||
```
|
||||
|
||||
* Set `REAL_IP_HEADER` to `x-real-ip`, `true-client-ip`, `cf-connecting-ip` or other name of such header.
|
||||
* Set `PROXIES_COUNT` to the number of entries expected in `x-forwarded-for` (name configurable via `FORWARDED_FOR_HEADER`).
|
||||
|
||||
If client IP is found by one of these methods, Sanic uses the following headers for URL parts:
|
||||
|
||||
* `x-forwarded-proto`, `x-forwarded-host`, `x-forwarded-port`, `x-forwarded-path` and if necessary, `x-scheme`.
|
||||
|
||||
#### Proxy config if using ...
|
||||
|
||||
* a proxy that supports `forwarded`: set `FORWARDED_SECRET` to the value that the proxy inserts in the header
|
||||
* Apache Traffic Server: `CONFIG proxy.config.http.insert_forwarded STRING for|proto|host|by=_secret`
|
||||
* NGHTTPX: `nghttpx --add-forwarded=for,proto,host,by --forwarded-for=ip --forwarded-by=_secret`
|
||||
* NGINX: after [the official instructions](https://www.nginx.com/resources/wiki/start/topics/examples/forwarded/), add anywhere in your config:
|
||||
|
||||
proxy_set_header Forwarded "$proxy_add_forwarded;by=\"_$server_name\";proto=$scheme;host=\"$http_host\";path=\"$request_uri\";secret=_secret";
|
||||
|
||||
* a custom header with client IP: set `REAL_IP_HEADER` to the name of that header
|
||||
* `x-forwarded-for`: set `PROXIES_COUNT` to `1` for a single proxy, or a greater number to allow Sanic to select the correct IP
|
||||
* no proxies: no configuration required!
|
||||
|
||||
#### Changes in Sanic 19.9
|
||||
|
||||
Earlier Sanic versions had unsafe default settings. From 19.9 onwards proxy settings must be set manually, and support for negative PROXIES_COUNT has been removed.
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
# Contributing
|
||||
|
||||
Thank you for your interest! Sanic is always looking for contributors. If you
|
||||
don't feel comfortable contributing code, adding docstrings to the source files
|
||||
is very appreciated.
|
||||
|
||||
## Installation
|
||||
|
||||
To develop on sanic (and mainly to just run the tests) it is highly recommend to
|
||||
install from sources.
|
||||
|
||||
So assume you have already cloned the repo and are in the working directory with
|
||||
a virtual environment already set up, then run:
|
||||
|
||||
```bash
|
||||
python setup.py develop && pip install -r requirements-dev.txt
|
||||
```
|
||||
|
||||
## Running tests
|
||||
|
||||
To run the tests for sanic it is recommended to use tox like so:
|
||||
|
||||
```bash
|
||||
tox
|
||||
```
|
||||
|
||||
See it's that simple!
|
||||
|
||||
## Pull requests!
|
||||
|
||||
So the pull request approval rules are pretty simple:
|
||||
1. All pull requests must pass unit tests
|
||||
* All pull requests must be reviewed and approved by at least
|
||||
one current collaborator on the project
|
||||
* All pull requests must pass flake8 checks
|
||||
* If you decide to remove/change anything from any common interface
|
||||
a deprecation message should accompany it.
|
||||
* If you implement a new feature you should have at least one unit
|
||||
test to accompany it.
|
||||
|
||||
## Documentation
|
||||
|
||||
Sanic's documentation is built
|
||||
using [sphinx](http://www.sphinx-doc.org/en/1.5.1/). Guides are written in
|
||||
Markdown and can be found in the `docs` folder, while the module reference is
|
||||
automatically generated using `sphinx-apidoc`.
|
||||
|
||||
To generate the documentation from scratch:
|
||||
|
||||
```bash
|
||||
sphinx-apidoc -fo docs/_api/ sanic
|
||||
sphinx-build -b html docs docs/_build
|
||||
```
|
||||
|
||||
The HTML documentation will be created in the `docs/_build` folder.
|
||||
|
||||
## Warning
|
||||
|
||||
One of the main goals of Sanic is speed. Code that lowers the performance of
|
||||
Sanic without significant gains in usability, security, or features may not be
|
||||
merged. Please don't let this intimidate you! If you have any concerns about an
|
||||
idea, open an issue for discussion and help.
|
||||
1
docs/sanic/contributing.rst
Normal file
1
docs/sanic/contributing.rst
Normal file
@@ -0,0 +1 @@
|
||||
.. include:: ../../CONTRIBUTING.rst
|
||||
@@ -1,72 +0,0 @@
|
||||
# Custom Protocols
|
||||
|
||||
*Note: this is advanced usage, and most readers will not need such functionality.*
|
||||
|
||||
You can change the behavior of Sanic's protocol by specifying a custom
|
||||
protocol, which should be a subclass
|
||||
of
|
||||
[asyncio.protocol](https://docs.python.org/3/library/asyncio-protocol.html#protocol-classes).
|
||||
This protocol can then be passed as the keyword argument `protocol` to the `sanic.run` method.
|
||||
|
||||
The constructor of the custom protocol class receives the following keyword
|
||||
arguments from Sanic.
|
||||
|
||||
- `loop`: an `asyncio`-compatible event loop.
|
||||
- `connections`: a `set` to store protocol objects. When Sanic receives
|
||||
`SIGINT` or `SIGTERM`, it executes `protocol.close_if_idle` for all protocol
|
||||
objects stored in this set.
|
||||
- `signal`: a `sanic.server.Signal` object with the `stopped` attribute. When
|
||||
Sanic receives `SIGINT` or `SIGTERM`, `signal.stopped` is assigned `True`.
|
||||
- `request_handler`: a coroutine that takes a `sanic.request.Request` object
|
||||
and a `response` callback as arguments.
|
||||
- `error_handler`: a `sanic.exceptions.Handler` which is called when exceptions
|
||||
are raised.
|
||||
- `request_timeout`: the number of seconds before a request times out.
|
||||
- `request_max_size`: an integer specifying the maximum size of a request, in bytes.
|
||||
|
||||
## Example
|
||||
|
||||
An error occurs in the default protocol if a handler function does not return
|
||||
an `HTTPResponse` object.
|
||||
|
||||
By overriding the `write_response` protocol method, if a handler returns a
|
||||
string it will be converted to an `HTTPResponse object`.
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
from sanic.server import HttpProtocol
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
class CustomHttpProtocol(HttpProtocol):
|
||||
|
||||
def __init__(self, *, loop, request_handler, error_handler,
|
||||
signal, connections, request_timeout, request_max_size):
|
||||
super().__init__(
|
||||
loop=loop, request_handler=request_handler,
|
||||
error_handler=error_handler, signal=signal,
|
||||
connections=connections, request_timeout=request_timeout,
|
||||
request_max_size=request_max_size)
|
||||
|
||||
def write_response(self, response):
|
||||
if isinstance(response, str):
|
||||
response = text(response)
|
||||
self.transport.write(
|
||||
response.output(self.request.version)
|
||||
)
|
||||
self.transport.close()
|
||||
|
||||
|
||||
@app.route('/')
|
||||
async def string(request):
|
||||
return 'string'
|
||||
|
||||
|
||||
@app.route('/1')
|
||||
async def response(request):
|
||||
return text('response')
|
||||
|
||||
app.run(host='0.0.0.0', port=8000, protocol=CustomHttpProtocol)
|
||||
```
|
||||
76
docs/sanic/custom_protocol.rst
Normal file
76
docs/sanic/custom_protocol.rst
Normal file
@@ -0,0 +1,76 @@
|
||||
Custom Protocols
|
||||
================
|
||||
|
||||
.. note::
|
||||
|
||||
This is advanced usage, and most readers will not need such functionality.
|
||||
|
||||
You can change the behavior of Sanic's protocol by specifying a custom
|
||||
protocol, which should be a subclass
|
||||
of `asyncio.protocol <https://docs.python.org/3/library/asyncio-protocol.html#protocol-classes>`_.
|
||||
This protocol can then be passed as the keyword argument ``protocol`` to the ``sanic.run`` method.
|
||||
|
||||
The constructor of the custom protocol class receives the following keyword
|
||||
arguments from Sanic.
|
||||
|
||||
- ``loop``: an ``asyncio``-compatible event loop.
|
||||
- ``connections``: a ``set`` to store protocol objects. When Sanic receives
|
||||
``SIGINT`` or ``SIGTERM``, it executes ``protocol.close_if_idle`` for all protocol
|
||||
objects stored in this set.
|
||||
- ``signal``: a ``sanic.server.Signal`` object with the ``stopped`` attribute. When
|
||||
Sanic receives ``SIGINT`` or ``SIGTERM``, ``signal.stopped`` is assigned ``True``.
|
||||
- ``request_handler``: a coroutine that takes a ``sanic.request.Request`` object
|
||||
and a ``response`` callback as arguments.
|
||||
- ``error_handler``: a ``sanic.exceptions.Handler`` which is called when exceptions
|
||||
are raised.
|
||||
- ``request_timeout``: the number of seconds before a request times out.
|
||||
- ``request_max_size``: an integer specifying the maximum size of a request, in bytes.
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
An error occurs in the default protocol if a handler function does not return
|
||||
an ``HTTPResponse`` object.
|
||||
|
||||
By overriding the ``write_response`` protocol method, if a handler returns a
|
||||
string it will be converted to an ``HTTPResponse object``.
|
||||
|
||||
.. code:: python
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.server import HttpProtocol
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
class CustomHttpProtocol(HttpProtocol):
|
||||
|
||||
def __init__(self, *, loop, request_handler, error_handler,
|
||||
signal, connections, request_timeout, request_max_size):
|
||||
super().__init__(
|
||||
loop=loop, request_handler=request_handler,
|
||||
error_handler=error_handler, signal=signal,
|
||||
connections=connections, request_timeout=request_timeout,
|
||||
request_max_size=request_max_size)
|
||||
|
||||
def write_response(self, response):
|
||||
if isinstance(response, str):
|
||||
response = text(response)
|
||||
self.transport.write(
|
||||
response.output(self.request.version)
|
||||
)
|
||||
self.transport.close()
|
||||
|
||||
|
||||
@app.route('/')
|
||||
async def string(request):
|
||||
return 'string'
|
||||
|
||||
|
||||
@app.route('/1')
|
||||
async def response(request):
|
||||
return text('response')
|
||||
|
||||
app.run(host='0.0.0.0', port=8000, protocol=CustomHttpProtocol)
|
||||
|
||||
@@ -34,6 +34,6 @@ def authorized():
|
||||
@app.route("/")
|
||||
@authorized()
|
||||
async def test(request):
|
||||
return json({status: 'authorized'})
|
||||
return json({'status': 'authorized'})
|
||||
```
|
||||
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
# Deploying
|
||||
|
||||
Deploying Sanic is made simple by the inbuilt webserver. After defining an
|
||||
instance of `sanic.Sanic`, we can call the `run` method with the following
|
||||
Deploying Sanic is very simple using one of three options: the inbuilt webserver,
|
||||
an [ASGI webserver](https://asgi.readthedocs.io/en/latest/implementations.html), or `gunicorn`.
|
||||
It is also very common to place Sanic behind a reverse proxy, like `nginx`.
|
||||
|
||||
## Running via Sanic webserver
|
||||
|
||||
After defining an instance of `sanic.Sanic`, we can call the `run` method with the following
|
||||
keyword arguments:
|
||||
|
||||
- `host` *(default `"127.0.0.1"`)*: Address to host the server on.
|
||||
@@ -15,8 +20,15 @@ keyword arguments:
|
||||
- `protocol` *(default `HttpProtocol`)*: Subclass
|
||||
of
|
||||
[asyncio.protocol](https://docs.python.org/3/library/asyncio-protocol.html#protocol-classes).
|
||||
- `access_log` *(default `True`)*: Enables log on handling requests (significantly slows server).
|
||||
|
||||
## Workers
|
||||
```python
|
||||
app.run(host='0.0.0.0', port=1337, access_log=False)
|
||||
```
|
||||
|
||||
In the above example, we decided to turn off the access log in order to increase performance.
|
||||
|
||||
### Workers
|
||||
|
||||
By default, Sanic listens in the main process using only one CPU core. To crank
|
||||
up the juice, just specify the number of workers in the `run` arguments.
|
||||
@@ -28,9 +40,9 @@ app.run(host='0.0.0.0', port=1337, workers=4)
|
||||
Sanic will automatically spin up multiple processes and route traffic between
|
||||
them. We recommend as many workers as you have available cores.
|
||||
|
||||
## Running via command
|
||||
### Running via command
|
||||
|
||||
If you like using command line arguments, you can launch a Sanic server by
|
||||
If you like using command line arguments, you can launch a Sanic webserver by
|
||||
executing the module. For example, if you initialized Sanic as `app` in a file
|
||||
named `server.py`, you could run the server like so:
|
||||
|
||||
@@ -45,6 +57,33 @@ if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=1337, workers=4)
|
||||
```
|
||||
|
||||
## Running via ASGI
|
||||
|
||||
Sanic is also ASGI-compliant. This means you can use your preferred ASGI webserver
|
||||
to run Sanic. The three main implementations of ASGI are
|
||||
[Daphne](http://github.com/django/daphne), [Uvicorn](https://www.uvicorn.org/),
|
||||
and [Hypercorn](https://pgjones.gitlab.io/hypercorn/index.html).
|
||||
|
||||
Follow their documentation for the proper way to run them, but it should look
|
||||
something like:
|
||||
|
||||
```
|
||||
daphne myapp:app
|
||||
uvicorn myapp:app
|
||||
hypercorn myapp:app
|
||||
```
|
||||
|
||||
A couple things to note when using ASGI:
|
||||
|
||||
1. When using the Sanic webserver, websockets will run using the [`websockets`](https://websockets.readthedocs.io/) package. In ASGI mode, there is no need for this package since websockets are managed in the ASGI server.
|
||||
1. The ASGI [lifespan protocol](https://asgi.readthedocs.io/en/latest/specs/lifespan.html) supports
|
||||
only two server events: startup and shutdown. Sanic has four: before startup, after startup,
|
||||
before shutdown, and after shutdown. Therefore, in ASGI mode, the startup and shutdown events will
|
||||
run consecutively and not actually around the server process beginning and ending (since that
|
||||
is now controlled by the ASGI server). Therefore, it is best to use `after_server_start` and
|
||||
`before_server_stop`.
|
||||
1. ASGI mode is still in "beta" as of Sanic v19.6.
|
||||
|
||||
## Running via Gunicorn
|
||||
|
||||
[Gunicorn](http://gunicorn.org/) ‘Green Unicorn’ is a WSGI HTTP Server for UNIX.
|
||||
@@ -63,16 +102,90 @@ of the memory leak.
|
||||
|
||||
See the [Gunicorn Docs](http://docs.gunicorn.org/en/latest/settings.html#max-requests) for more information.
|
||||
|
||||
## Asynchronous support
|
||||
This is suitable if you *need* to share the sanic process with other applications, in particular the `loop`.
|
||||
However be advised that this method does not support using multiple processes, and is not the preferred way
|
||||
## Other deployment considerations
|
||||
|
||||
### Running behind a reverse proxy
|
||||
|
||||
Sanic can be used with a reverse proxy (e.g. nginx). There's a simple example of nginx configuration:
|
||||
|
||||
```
|
||||
server {
|
||||
listen 80;
|
||||
server_name example.org;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you want to get real client ip, you should configure `X-Real-IP` and `X-Forwarded-For` HTTP headers and set `app.config.PROXIES_COUNT` to `1`; see the configuration page for more information.
|
||||
|
||||
### Disable debug logging for performance
|
||||
|
||||
To improve the performance add `debug=False` and `access_log=False` in the `run` arguments.
|
||||
|
||||
```python
|
||||
app.run(host='0.0.0.0', port=1337, workers=4, debug=False, access_log=False)
|
||||
```
|
||||
|
||||
Running via Gunicorn you can set Environment variable `SANIC_ACCESS_LOG="False"`
|
||||
|
||||
```
|
||||
env SANIC_ACCESS_LOG="False" gunicorn myapp:app --bind 0.0.0.0:1337 --worker-class sanic.worker.GunicornWorker --log-level warning
|
||||
```
|
||||
|
||||
Or you can rewrite app config directly
|
||||
|
||||
```python
|
||||
app.config.ACCESS_LOG = False
|
||||
```
|
||||
|
||||
### Asynchronous support and sharing the loop
|
||||
|
||||
This is suitable if you *need* to share the Sanic process with other applications, in particular the `loop`.
|
||||
However, be advised that this method does not support using multiple processes, and is not the preferred way
|
||||
to run the app in general.
|
||||
|
||||
Here is an incomplete example (please see `run_async.py` in examples for something more practical):
|
||||
|
||||
```python
|
||||
server = app.create_server(host="0.0.0.0", port=8000)
|
||||
server = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
task = asyncio.ensure_future(server)
|
||||
loop.run_forever()
|
||||
```
|
||||
|
||||
Caveat: using this method, calling `app.create_server()` will trigger "before_server_start" server events, but not
|
||||
"after_server_start", "before_server_stop", or "after_server_stop" server events.
|
||||
|
||||
For more advanced use-cases, you can trigger these events using the AsyncioServer object, returned by awaiting
|
||||
the server task.
|
||||
|
||||
Here is an incomplete example (please see `run_async_advanced.py` in examples for something more complete):
|
||||
|
||||
```python
|
||||
serv_coro = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
serv_task = asyncio.ensure_future(serv_coro, loop=loop)
|
||||
server = loop.run_until_complete(serv_task)
|
||||
server.after_start()
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt as e:
|
||||
loop.stop()
|
||||
finally:
|
||||
server.before_stop()
|
||||
|
||||
# Wait for server to close
|
||||
close_task = server.close()
|
||||
loop.run_until_complete(close_task)
|
||||
|
||||
# Complete all tasks on the loop
|
||||
for connection in server.connections:
|
||||
connection.close_if_idle()
|
||||
server.after_stop()
|
||||
```
|
||||
167
docs/sanic/examples.rst
Normal file
167
docs/sanic/examples.rst
Normal file
@@ -0,0 +1,167 @@
|
||||
Examples
|
||||
========
|
||||
|
||||
This section of the documentation is a simple collection of example code that can help you get a quick start
|
||||
on your application development. Most of these examples are categorized and provide you with a link to the
|
||||
working code example in the `Sanic Repository <https://github.com/huge-success/sanic/tree/master/examples>`_
|
||||
|
||||
|
||||
Basic Examples
|
||||
--------------
|
||||
|
||||
This section of the examples are a collection of code that provide a simple use case example of the sanic application.
|
||||
|
||||
Simple Apps
|
||||
~~~~~~~~~~~~
|
||||
|
||||
A simple sanic application with a single ``async`` method with ``text`` and ``json`` type response.
|
||||
|
||||
|
||||
.. literalinclude:: ../../examples/teapot.py
|
||||
|
||||
.. literalinclude:: ../../examples/simple_server.py
|
||||
|
||||
|
||||
Simple App with ``Sanic Views``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Showcasing the simple mechanism of using :class:`sanic.viewes.HTTPMethodView` as well as a way to extend the same
|
||||
into providing a custom ``async`` behavior for ``view``.
|
||||
|
||||
.. literalinclude:: ../../examples/simple_async_view.py
|
||||
|
||||
|
||||
URL Redirect
|
||||
~~~~~~~~~~~~
|
||||
|
||||
.. literalinclude:: ../../examples/redirect_example.py
|
||||
|
||||
|
||||
Named URL redirection
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``Sanic`` provides an easy to use way of redirecting the requests via a helper method called ``url_for`` that takes a
|
||||
unique url name as argument and returns you the actual route assigned for it. This will help in simplifying the
|
||||
efforts required in redirecting the user between different section of the application.
|
||||
|
||||
.. literalinclude:: ../../examples/url_for_example.py
|
||||
|
||||
Blueprints
|
||||
~~~~~~~~~~
|
||||
``Sanic`` provides an amazing feature to group your APIs and routes under a logical collection that can easily be
|
||||
imported and plugged into any of your sanic application and it's called ``blueprints``
|
||||
|
||||
.. literalinclude:: ../../examples/blueprints.py
|
||||
|
||||
Logging Enhancements
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Even though ``Sanic`` comes with a battery of Logging support it allows the end users to customize the way logging
|
||||
is handled in the application runtime.
|
||||
|
||||
.. literalinclude:: ../../examples/override_logging.py
|
||||
|
||||
The following sample provides an example code that demonstrates the usage of :func:`sanic.app.Sanic.middleware` in order
|
||||
to provide a mechanism to assign a unique request ID for each of the incoming requests and log them via
|
||||
`aiotask-context <https://github.com/Skyscanner/aiotask-context>`_.
|
||||
|
||||
|
||||
.. literalinclude:: ../../examples/log_request_id.py
|
||||
|
||||
Sanic Streaming Support
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``Sanic`` framework comes with in-built support for streaming large files and the following code explains the process
|
||||
to setup a ``Sanic`` application with streaming support.
|
||||
|
||||
.. literalinclude:: ../../examples/request_stream/server.py
|
||||
|
||||
Sample Client app to show the usage of streaming application by a client code.
|
||||
|
||||
.. literalinclude:: ../../examples/request_stream/client.py
|
||||
|
||||
Sanic Concurrency Support
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
``Sanic`` supports the ability to start an app with multiple worker support. However, it's important to be able to limit
|
||||
the concurrency per process/loop in order to ensure an efficient execution. The following section of the code provides a
|
||||
brief example of how to limit the concurrency with the help of :class:`asyncio.Semaphore`
|
||||
|
||||
.. literalinclude:: ../../examples/limit_concurrency.py
|
||||
|
||||
|
||||
Sanic Deployment via Docker
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Deploying a ``sanic`` app via ``docker`` and ``docker-compose`` is an easy task to achieve and the following example
|
||||
provides a deployment of the sample ``simple_server.py``
|
||||
|
||||
.. literalinclude:: ../../examples/Dockerfile
|
||||
|
||||
.. literalinclude:: ../../examples/docker-compose.yml
|
||||
|
||||
|
||||
Monitoring and Error Handling
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
``Sanic`` provides an extendable bare minimum implementation of a global exception handler via
|
||||
:class:`sanic.handlers.ErrorHandler`. This example shows how to extend it to enable some custom behaviors.
|
||||
|
||||
.. literalinclude:: ../../examples/exception_monitoring.py
|
||||
|
||||
Monitoring using external Service Providers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* `LogDNA <https://logdna.com/>`_
|
||||
|
||||
.. literalinclude:: ../../examples/logdna_example.py
|
||||
|
||||
* `RayGun <https://raygun.com/>`_
|
||||
|
||||
.. literalinclude:: ../../examples/raygun_example.py
|
||||
|
||||
* `Rollbar <https://rollbar.com>`_
|
||||
|
||||
.. literalinclude:: ../../examples/rollbar_example.py
|
||||
|
||||
* `Sentry <http://sentry.io>`_
|
||||
|
||||
.. literalinclude:: ../../examples/sentry_example.py
|
||||
|
||||
|
||||
Security
|
||||
~~~~~~~~
|
||||
|
||||
The following sample code shows a simple decorator based authentication and authorization mechanism that can be setup
|
||||
to secure your ``sanic`` api endpoints.
|
||||
|
||||
.. literalinclude:: ../../examples/authorized_sanic.py
|
||||
|
||||
Sanic Websocket
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
``Sanic`` provides an ability to easily add a route and map it to a ``websocket`` handlers.
|
||||
|
||||
.. literalinclude:: ../../examples/websocket.html
|
||||
.. literalinclude:: ../../examples/websocket.py
|
||||
|
||||
vhost Suppport
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
.. literalinclude:: ../../examples/vhosts.py
|
||||
|
||||
Unit Testing With Parallel Test Run Support
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The following example shows you how to get up and running with unit testing ``sanic`` application with parallel test
|
||||
execution support provided by the ``pytest-xdist`` plugin.
|
||||
|
||||
.. literalinclude:: ../../examples/pytest_xdist.py
|
||||
|
||||
|
||||
Amending Request Object
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``request`` object in ``Sanic`` is a kind of ``dict`` object, this means that ``request`` object can be manipulated as a regular ``dict`` object.
|
||||
|
||||
.. literalinclude:: ../../examples/amending_request_object.py
|
||||
|
||||
For more examples and useful samples please visit the `Huge-Sanic's GitHub Page <https://github.com/huge-success/sanic/tree/master/examples>`_
|
||||
@@ -47,6 +47,36 @@ async def ignore_404s(request, exception):
|
||||
return text("Yep, I totally found the page: {}".format(request.url))
|
||||
```
|
||||
|
||||
You can also add an exception handler as such:
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
async def server_error_handler(request, exception):
|
||||
return text("Oops, server error", status=500)
|
||||
|
||||
app = Sanic()
|
||||
app.error_handler.add(Exception, server_error_handler)
|
||||
```
|
||||
|
||||
In some cases, you might want to add some more error handling
|
||||
functionality to what is provided by default. In that case, you
|
||||
can subclass Sanic's default error handler as such:
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
from sanic.handlers import ErrorHandler
|
||||
|
||||
class CustomErrorHandler(ErrorHandler):
|
||||
def default(self, request, exception):
|
||||
''' handles errors that have no error handlers assigned '''
|
||||
# You custom error handling logic...
|
||||
return super().default(request, exception)
|
||||
|
||||
app = Sanic()
|
||||
app.error_handler = CustomErrorHandler()
|
||||
```
|
||||
|
||||
## Useful exceptions
|
||||
|
||||
Some of the most useful exceptions are presented below:
|
||||
|
||||
@@ -1,34 +1,3 @@
|
||||
# Extensions
|
||||
|
||||
A list of Sanic extensions created by the community.
|
||||
- [Sanic-Plugins-Framework](https://github.com/ashleysommer/sanicpluginsframework): Library for easily creating and using Sanic plugins.
|
||||
- [Sessions](https://github.com/subyraman/sanic_session): Support for sessions.
|
||||
Allows using redis, memcache or an in memory store.
|
||||
- [CORS](https://github.com/ashleysommer/sanic-cors): A port of flask-cors.
|
||||
- [Compress](https://github.com/subyraman/sanic_compress): Allows you to easily gzip Sanic responses. A port of Flask-Compress.
|
||||
- [Jinja2](https://github.com/lixxu/sanic-jinja2): Support for Jinja2 template.
|
||||
- [Sanic JWT](https://github.com/ahopkins/sanic-jwt): Authentication, JWT, and permission scoping for Sanic.
|
||||
- [OpenAPI/Swagger](https://github.com/channelcat/sanic-openapi): OpenAPI support, plus a Swagger UI.
|
||||
- [Pagination](https://github.com/lixxu/python-paginate): Simple pagination support.
|
||||
- [Motor](https://github.com/lixxu/sanic-motor): Simple motor wrapper.
|
||||
- [Sanic CRUD](https://github.com/Typhon66/sanic_crud): CRUD REST API generation with peewee models.
|
||||
- [UserAgent](https://github.com/lixxu/sanic-useragent): Add `user_agent` to request
|
||||
- [Limiter](https://github.com/bohea/sanic-limiter): Rate limiting for sanic.
|
||||
- [Sanic EnvConfig](https://github.com/jamesstidard/sanic-envconfig): Pull environment variables into your sanic config.
|
||||
- [Babel](https://github.com/lixxu/sanic-babel): Adds i18n/l10n support to Sanic applications with the help of the
|
||||
`Babel` library
|
||||
- [Dispatch](https://github.com/ashleysommer/sanic-dispatcher): A dispatcher inspired by `DispatcherMiddleware` in werkzeug. Can act as a Sanic-to-WSGI adapter.
|
||||
- [Sanic-OAuth](https://github.com/Sniedes722/Sanic-OAuth): OAuth Library for connecting to & creating your own token providers.
|
||||
- [sanic-oauth](https://gitlab.com/SirEdvin/sanic-oauth): OAuth Library with many provider and OAuth1/OAuth2 support.
|
||||
- [Sanic-nginx-docker-example](https://github.com/itielshwartz/sanic-nginx-docker-example): Simple and easy to use example of Sanic behined nginx using docker-compose.
|
||||
- [sanic-graphql](https://github.com/graphql-python/sanic-graphql): GraphQL integration with Sanic
|
||||
- [sanic-prometheus](https://github.com/dkruchinin/sanic-prometheus): Prometheus metrics for Sanic
|
||||
- [Sanic-RestPlus](https://github.com/ashleysommer/sanic-restplus): A port of Flask-RestPlus for Sanic. Full-featured REST API with SwaggerUI generation.
|
||||
- [sanic-transmute](https://github.com/yunstanford/sanic-transmute): A Sanic extension that generates APIs from python function and classes, and also generates Swagger UI/documentation automatically.
|
||||
- [pytest-sanic](https://github.com/yunstanford/pytest-sanic): A pytest plugin for Sanic. It helps you to test your code asynchronously.
|
||||
- [jinja2-sanic](https://github.com/yunstanford/jinja2-sanic): a jinja2 template renderer for Sanic.([Documentation](http://jinja2-sanic.readthedocs.io/en/latest/))
|
||||
- [GINO](https://github.com/fantix/gino): An asyncio ORM on top of SQLAlchemy core, delivered with a Sanic extension. ([Documentation](https://python-gino.readthedocs.io/))
|
||||
- [Sanic-Auth](https://github.com/pyx/sanic-auth): A minimal backend agnostic session-based user authentication mechanism for Sanic.
|
||||
- [Sanic-CookieSession](https://github.com/pyx/sanic-cookiesession): A client-side only, cookie-based session, similar to the built-in session in Flask.
|
||||
- [Sanic-WTF](https://github.com/pyx/sanic-wtf): Sanic-WTF makes using WTForms with Sanic and CSRF (Cross-Site Request Forgery) protection a little bit easier.
|
||||
- [sanic-sse](https://github.com/inn0kenty/sanic_sse): [Server-Sent Events](https://en.wikipedia.org/wiki/Server-sent_events) implementation for Sanic.
|
||||
Moved to the [awesome-sanic](https://github.com/mekicha/awesome-sanic) list.
|
||||
@@ -1,11 +1,33 @@
|
||||
# Getting Started
|
||||
|
||||
Make sure you have both [pip](https://pip.pypa.io/en/stable/installing/) and at
|
||||
least version 3.5 of Python before starting. Sanic uses the new `async`/`await`
|
||||
least version 3.6 of Python before starting. Sanic uses the new `async`/`await`
|
||||
syntax, so earlier versions of python won't work.
|
||||
|
||||
1. Install Sanic: `python3 -m pip install sanic`
|
||||
2. Create a file called `main.py` with the following code:
|
||||
## 1. Install Sanic
|
||||
|
||||
> If you are running on a clean install of Fedora 28 or above, please make sure you have the ``redhat-rpm-config`` package installed in case if you want to use ``sanic`` with ``ujson`` dependency.
|
||||
|
||||
```bash
|
||||
pip3 install sanic
|
||||
```
|
||||
|
||||
To install sanic without `uvloop` or `ujson` using bash, you can provide either or both of these environmental variables
|
||||
using any truthy string like `'y', 'yes', 't', 'true', 'on', '1'` and setting the `SANIC_NO_X` (`X` = `UVLOOP`/`UJSON`)
|
||||
to true will stop that features installation.
|
||||
|
||||
```bash
|
||||
SANIC_NO_UVLOOP=true SANIC_NO_UJSON=true pip3 install sanic
|
||||
```
|
||||
|
||||
You can also install Sanic from [`conda-forge`](https://anaconda.org/conda-forge/sanic)
|
||||
|
||||
```bash
|
||||
conda config --add channels conda-forge
|
||||
conda install sanic
|
||||
```
|
||||
|
||||
## 2. Create a file called `main.py`
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
@@ -20,9 +42,16 @@ syntax, so earlier versions of python won't work.
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
```
|
||||
|
||||
3. Run the server: `python3 main.py`
|
||||
4. Open the address `http://0.0.0.0:8000` in your web browser. You should see
|
||||
the message *Hello world!*.
|
||||
|
||||
## 3. Run the server
|
||||
|
||||
```
|
||||
python3 main.py
|
||||
```
|
||||
|
||||
## 4. Check your browser
|
||||
|
||||
Open the address `http://0.0.0.0:8000` in your web browser. You should see
|
||||
the message *Hello world!*.
|
||||
|
||||
You now have a working Sanic server!
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
Sanic
|
||||
=================================
|
||||
|
||||
Sanic is a Flask-like Python 3.5+ web server that's written to go fast. It's based on the work done by the amazing folks at magicstack, and was inspired by `this article <https://magic.io/blog/uvloop-blazing-fast-python-networking/>`_.
|
||||
Sanic is a Python 3.6+ web server and web framework that's written to go fast. It allows the usage of the async/await syntax added in Python 3.5, which makes your code non-blocking and speedy.
|
||||
|
||||
On top of being Flask-like, Sanic supports async request handlers. This means you can use the new shiny async/await syntax from Python 3.5, making your code non-blocking and speedy.
|
||||
The goal of the project is to provide a simple way to get up and running a highly performant HTTP server that is easy to build, to expand, and ultimately to scale.
|
||||
|
||||
Sanic is developed `on GitHub <https://github.com/channelcat/sanic/>`_. Contributions are welcome!
|
||||
|
||||
@@ -22,4 +22,8 @@ Sanic aspires to be simple
|
||||
return json({"hello": "world"})
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
|
||||
.. note::
|
||||
|
||||
Sanic does not support Python 3.5 from version 19.6 and forward. However, version 18.12LTS is supported thru December 2020. Official Python support for version 3.5 is set to expire in September 2020.
|
||||
@@ -1,85 +0,0 @@
|
||||
# Logging
|
||||
|
||||
|
||||
Sanic allows you to do different types of logging (access log, error log) on the requests based on the [python3 logging API](https://docs.python.org/3/howto/logging.html). You should have some basic knowledge on python3 logging if you want to create a new configuration.
|
||||
|
||||
### Quick Start
|
||||
|
||||
A simple example using default settings would be like this:
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic('test')
|
||||
|
||||
@app.route('/')
|
||||
async def test(request):
|
||||
return response.text('Hello World!')
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(debug=True, access_log=True)
|
||||
```
|
||||
|
||||
To use your own logging config, simply use `logging.config.dictConfig`, or
|
||||
pass `log_config` when you initialize `Sanic` app:
|
||||
|
||||
```python
|
||||
app = Sanic('test', log_config=LOGGING_CONFIG)
|
||||
```
|
||||
|
||||
And to close logging, simply assign access_log=False:
|
||||
|
||||
```python
|
||||
if __name__ == "__main__":
|
||||
app.run(access_log=False)
|
||||
```
|
||||
|
||||
This would skip calling logging functions when handling requests.
|
||||
And you could even do further in production to gain extra speed:
|
||||
|
||||
```python
|
||||
if __name__ == "__main__":
|
||||
# disable debug messages
|
||||
app.run(debug=False, access_log=False)
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
By default, log_config parameter is set to use sanic.log.LOGGING_CONFIG_DEFAULTS dictionary for configuration.
|
||||
|
||||
There are three `loggers` used in sanic, and **must be defined if you want to create your own logging configuration**:
|
||||
|
||||
- root:<br>
|
||||
Used to log internal messages.
|
||||
|
||||
- sanic.error:<br>
|
||||
Used to log error logs.
|
||||
|
||||
- sanic.access:<br>
|
||||
Used to log access logs.
|
||||
|
||||
#### Log format:
|
||||
|
||||
In addition to default parameters provided by python (asctime, levelname, message),
|
||||
Sanic provides additional parameters for access logger with:
|
||||
|
||||
- host (str)<br>
|
||||
request.ip
|
||||
|
||||
|
||||
- request (str)<br>
|
||||
request.method + " " + request.url
|
||||
|
||||
|
||||
- status (int)<br>
|
||||
response.status
|
||||
|
||||
|
||||
- byte (int)<br>
|
||||
len(response.body)
|
||||
|
||||
|
||||
The default access log format is
|
||||
```python
|
||||
%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: %(request)s %(message)s %(status)d %(byte)d
|
||||
```
|
||||
103
docs/sanic/logging.rst
Normal file
103
docs/sanic/logging.rst
Normal file
@@ -0,0 +1,103 @@
|
||||
Logging
|
||||
=======
|
||||
|
||||
Sanic allows you to do different types of logging (access log, error
|
||||
log) on the requests based on the `python3 logging API`_. You should
|
||||
have some basic knowledge on python3 logging if you want to create a new
|
||||
configuration.
|
||||
|
||||
Quick Start
|
||||
~~~~~~~~~~~
|
||||
|
||||
A simple example using default settings would be like this:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.log import logger
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic('test')
|
||||
|
||||
@app.route('/')
|
||||
async def test(request):
|
||||
logger.info('Here is your log')
|
||||
return text('Hello World!')
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(debug=True, access_log=True)
|
||||
|
||||
After the server is running, you can see some messages looks like:
|
||||
|
||||
::
|
||||
|
||||
[2018-11-06 21:16:53 +0800] [24622] [INFO] Goin' Fast @ http://127.0.0.1:8000
|
||||
[2018-11-06 21:16:53 +0800] [24667] [INFO] Starting worker [24667]
|
||||
|
||||
You can send a request to server and it will print the log messages:
|
||||
|
||||
::
|
||||
|
||||
[2018-11-06 21:18:53 +0800] [25685] [INFO] Here is your log
|
||||
[2018-11-06 21:18:53 +0800] - (sanic.access)[INFO][127.0.0.1:57038]: GET http://localhost:8000/ 200 12
|
||||
|
||||
To use your own logging config, simply use
|
||||
``logging.config.dictConfig``, or pass ``log_config`` when you
|
||||
initialize ``Sanic`` app:
|
||||
|
||||
.. code:: python
|
||||
|
||||
app = Sanic('test', log_config=LOGGING_CONFIG)
|
||||
|
||||
And to close logging, simply assign access_log=False:
|
||||
|
||||
.. code:: python
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(access_log=False)
|
||||
|
||||
This would skip calling logging functions when handling requests. And
|
||||
you could even do further in production to gain extra speed:
|
||||
|
||||
.. code:: python
|
||||
|
||||
if __name__ == "__main__":
|
||||
# disable debug messages
|
||||
app.run(debug=False, access_log=False)
|
||||
|
||||
Configuration
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
By default, ``log_config`` parameter is set to use
|
||||
``sanic.log.LOGGING_CONFIG_DEFAULTS`` dictionary for configuration.
|
||||
|
||||
There are three ``loggers`` used in sanic, and **must be defined if you
|
||||
want to create your own logging configuration**:
|
||||
|
||||
================ ==============================
|
||||
Logger Name Usecase
|
||||
================ ==============================
|
||||
``sanic.root`` Used to log internal messages.
|
||||
``sanic.error`` Used to log error logs.
|
||||
``sanic.access`` Used to log access logs.
|
||||
================ ==============================
|
||||
|
||||
Log format:
|
||||
^^^^^^^^^^^
|
||||
|
||||
In addition to default parameters provided by python (``asctime``,
|
||||
``levelname``, ``message``), Sanic provides additional parameters for
|
||||
access logger with:
|
||||
|
||||
===================== ========================================== ========
|
||||
Log Context Parameter Parameter Value Datatype
|
||||
===================== ========================================== ========
|
||||
``host`` ``request.ip`` str
|
||||
``request`` ``request.method`` + " " + ``request.url`` str
|
||||
``status`` ``response.status`` int
|
||||
``byte`` ``len(response.body)`` int
|
||||
===================== ========================================== ========
|
||||
|
||||
The default access log format is ``%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: %(request)s %(message)s %(status)d %(byte)d``
|
||||
|
||||
.. _python3 logging API: https://docs.python.org/3/howto/logging.html
|
||||
@@ -17,7 +17,7 @@ string representing its type: `'request'` or `'response'`.
|
||||
|
||||
The simplest middleware doesn't modify the request or response at all:
|
||||
|
||||
```python
|
||||
```
|
||||
@app.middleware('request')
|
||||
async def print_on_request(request):
|
||||
print("I print when a request is received by the server")
|
||||
@@ -33,25 +33,41 @@ Middleware can modify the request or response parameter it is given, *as long
|
||||
as it does not return it*. The following example shows a practical use-case for
|
||||
this.
|
||||
|
||||
```python
|
||||
```
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
@app.middleware('request')
|
||||
async def add_key(request):
|
||||
# Arbitrary data may be stored in request context:
|
||||
request.ctx.foo = 'bar'
|
||||
|
||||
|
||||
@app.middleware('response')
|
||||
async def custom_banner(request, response):
|
||||
response.headers["Server"] = "Fake-Server"
|
||||
|
||||
|
||||
@app.middleware('response')
|
||||
async def prevent_xss(request, response):
|
||||
response.headers["x-xss-protection"] = "1; mode=block"
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def index(request):
|
||||
return sanic.response.text(request.ctx.foo)
|
||||
|
||||
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
```
|
||||
|
||||
The above code will apply the two middleware in order. First, the middleware
|
||||
**custom_banner** will change the HTTP response header *Server* to
|
||||
*Fake-Server*, and the second middleware **prevent_xss** will add the HTTP
|
||||
header for preventing Cross-Site-Scripting (XSS) attacks. These two functions
|
||||
are invoked *after* a user function returns a response.
|
||||
The three middlewares are executed in order:
|
||||
|
||||
1. The first request middleware **add_key** adds a new key `foo` into request context.
|
||||
2. Request is routed to handler **index**, which gets the key from context and returns a text response.
|
||||
3. The first response middleware **custom_banner** changes the HTTP response header *Server* to
|
||||
say *Fake-Server*
|
||||
4. The second response middleware **prevent_xss** adds the HTTP header for preventing Cross-Site-Scripting (XSS) attacks.
|
||||
|
||||
## Responding early
|
||||
|
||||
@@ -60,7 +76,7 @@ and the response will be returned. If this occurs to a request before the
|
||||
relevant user route handler is reached, the handler will never be called.
|
||||
Returning a response will also prevent any further middleware from running.
|
||||
|
||||
```python
|
||||
```
|
||||
@app.middleware('request')
|
||||
async def halt_request(request):
|
||||
return text('I halted the request')
|
||||
@@ -70,6 +86,16 @@ async def halt_response(request, response):
|
||||
return text('I halted the response')
|
||||
```
|
||||
|
||||
## Custom context
|
||||
|
||||
Arbitrary data may be stored in `request.ctx`. A typical use case
|
||||
would be to store the user object acquired from database in an authentication
|
||||
middleware. Keys added are accessible to all later middleware as well as
|
||||
the handler over the duration of the request.
|
||||
|
||||
Custom context is reserved for applications and extensions. Sanic itself makes
|
||||
no use of it.
|
||||
|
||||
## Listeners
|
||||
|
||||
If you want to execute startup/teardown code as your server starts or closes, you can use the following listeners:
|
||||
@@ -79,11 +105,11 @@ If you want to execute startup/teardown code as your server starts or closes, yo
|
||||
- `before_server_stop`
|
||||
- `after_server_stop`
|
||||
|
||||
These listeners are implemented as decorators on functions which accept the app object as well as the asyncio loop.
|
||||
These listeners are implemented as decorators on functions which accept the app object as well as the asyncio loop.
|
||||
|
||||
For example:
|
||||
|
||||
```python
|
||||
```
|
||||
@app.listener('before_server_start')
|
||||
async def setup_db(app, loop):
|
||||
app.db = await db_setup()
|
||||
@@ -101,16 +127,16 @@ async def close_db(app, loop):
|
||||
await app.db.close()
|
||||
```
|
||||
|
||||
It's also possible to register a listener using the `register_listener` method.
|
||||
It's also possible to register a listener using the `register_listener` method.
|
||||
This may be useful if you define your listeners in another module besides
|
||||
the one you instantiate your app in.
|
||||
|
||||
```python
|
||||
```
|
||||
app = Sanic()
|
||||
|
||||
|
||||
async def setup_db(app, loop):
|
||||
app.db = await db_setup()
|
||||
|
||||
|
||||
app.register_listener(setup_db, 'before_server_start')
|
||||
|
||||
```
|
||||
@@ -118,7 +144,7 @@ app.register_listener(setup_db, 'before_server_start')
|
||||
If you want to schedule a background task to run after the loop has started,
|
||||
Sanic provides the `add_task` method to easily do so.
|
||||
|
||||
```python
|
||||
```
|
||||
async def notify_server_started_after_five_seconds():
|
||||
await asyncio.sleep(5)
|
||||
print('Server successfully started!')
|
||||
@@ -128,7 +154,7 @@ app.add_task(notify_server_started_after_five_seconds())
|
||||
|
||||
Sanic will attempt to automatically inject the app, passing it as an argument to the task:
|
||||
|
||||
```python
|
||||
```
|
||||
async def notify_server_started_after_five_seconds(app):
|
||||
await asyncio.sleep(5)
|
||||
print(app.name)
|
||||
@@ -138,7 +164,7 @@ app.add_task(notify_server_started_after_five_seconds)
|
||||
|
||||
Or you can pass the app explicitly for the same effect:
|
||||
|
||||
```python
|
||||
```
|
||||
async def notify_server_started_after_five_seconds(app):
|
||||
await asyncio.sleep(5)
|
||||
print(app.name)
|
||||
|
||||
@@ -19,6 +19,8 @@ The following variables are accessible as properties on `Request` objects:
|
||||
URL that resembles `?key1=value1&key2=value2`. If that URL were to be parsed,
|
||||
the `args` dictionary would look like `{'key1': ['value1'], 'key2': ['value2']}`.
|
||||
The request's `query_string` variable holds the unparsed string value.
|
||||
Property is providing the default parsing strategy. If you would like to change it look to the section below
|
||||
(`Changing the default parsing rules of the queryset`).
|
||||
|
||||
```python
|
||||
from sanic.response import json
|
||||
@@ -28,9 +30,54 @@ The following variables are accessible as properties on `Request` objects:
|
||||
return json({ "parsed": True, "args": request.args, "url": request.url, "query_string": request.query_string })
|
||||
```
|
||||
|
||||
- `raw_args` (dict) - On many cases you would need to access the url arguments in
|
||||
a less packed dictionary. For same previous URL `?key1=value1&key2=value2`, the
|
||||
`raw_args` dictionary would look like `{'key1': 'value1', 'key2': 'value2'}`.
|
||||
- `query_args` (list) - On many cases you would need to access the url arguments in
|
||||
a less packed form. `query_args` is the list of `(key, value)` tuples.
|
||||
Property is providing the default parsing strategy. If you would like to change it look to the section below
|
||||
(`Changing the default parsing rules of the queryset`).
|
||||
For the same previous URL queryset `?key1=value1&key2=value2`, the
|
||||
`query_args` list would look like `[('key1', 'value1'), ('key2', 'value2')]`.
|
||||
And in case of the multiple params with the same key like `?key1=value1&key2=value2&key1=value3`
|
||||
the `query_args` list would look like `[('key1', 'value1'), ('key2', 'value2'), ('key1', 'value3')]`.
|
||||
|
||||
The difference between Request.args and Request.query_args
|
||||
for the queryset `?key1=value1&key2=value2&key1=value3`
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
@app.route("/test_request_args")
|
||||
async def test_request_args(request):
|
||||
return json({
|
||||
"parsed": True,
|
||||
"url": request.url,
|
||||
"query_string": request.query_string,
|
||||
"args": request.args,
|
||||
"raw_args": request.raw_args,
|
||||
"query_args": request.query_args,
|
||||
})
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
```
|
||||
|
||||
Output
|
||||
|
||||
```
|
||||
{
|
||||
"parsed":true,
|
||||
"url":"http:\/\/0.0.0.0:8000\/test_request_args?key1=value1&key2=value2&key1=value3",
|
||||
"query_string":"key1=value1&key2=value2&key1=value3",
|
||||
"args":{"key1":["value1","value3"],"key2":["value2"]},
|
||||
"raw_args":{"key1":"value1","key2":"value2"},
|
||||
"query_args":[["key1","value1"],["key2","value2"],["key1","value3"]]
|
||||
}
|
||||
```
|
||||
|
||||
`raw_args` contains only the first entry of `key1`. Will be deprecated in the future versions.
|
||||
|
||||
- `files` (dictionary of `File` objects) - List of files that have a name, body, and type
|
||||
|
||||
@@ -98,13 +145,62 @@ The following variables are accessible as properties on `Request` objects:
|
||||
|
||||
```
|
||||
- `url`: The full URL of the request, ie: `http://localhost:8000/posts/1/?foo=bar`
|
||||
- `scheme`: The URL scheme associated with the request: `http` or `https`
|
||||
- `host`: The host associated with the request: `localhost:8080`
|
||||
- `scheme`: The URL scheme associated with the request: 'http|https|ws|wss' or arbitrary value given by the headers.
|
||||
- `host`: The host associated with the request(which in the `Host` header): `localhost:8080`
|
||||
- `server_name`: The hostname of the server, without port number. the value is seeked in this order: `config.SERVER_NAME`, `x-forwarded-host` header, :func:`Request.host`
|
||||
- `server_port`: Like `server_name`. Seeked in this order: `x-forwarded-port` header, :func:`Request.host`, actual port used by the transport layer socket.
|
||||
- `path`: The path of the request: `/posts/1/`
|
||||
- `query_string`: The query string of the request: `foo=bar` or a blank string `''`
|
||||
- `uri_template`: Template for matching route handler: `/posts/<id>/`
|
||||
- `token`: The value of Authorization header: `Basic YWRtaW46YWRtaW4=`
|
||||
|
||||
- `url_for`: Just like `sanic.Sanic.url_for`, but automatically determine `scheme` and `netloc` base on the request. Since this method is aiming to generate correct schema & netloc, `_external` is implied.
|
||||
|
||||
|
||||
## Changing the default parsing rules of the queryset
|
||||
|
||||
The default parameters that are using internally in `args` and `query_args` properties to parse queryset:
|
||||
|
||||
- `keep_blank_values` (bool): `False` - flag indicating whether blank values in
|
||||
percent-encoded queries should be treated as blank strings.
|
||||
A true value indicates that blanks should be retained as blank
|
||||
strings. The default false value indicates that blank values
|
||||
are to be ignored and treated as if they were not included.
|
||||
- `strict_parsing` (bool): `False` - flag indicating what to do with parsing errors. If
|
||||
false (the default), errors are silently ignored. If true,
|
||||
errors raise a ValueError exception.
|
||||
- `encoding` and `errors` (str): 'utf-8' and 'replace' - specify how to decode percent-encoded sequences
|
||||
into Unicode characters, as accepted by the bytes.decode() method.
|
||||
|
||||
If you would like to change that default parameters you could call `get_args` and `get_query_args` methods
|
||||
with the new values.
|
||||
|
||||
For the queryset `/?test1=value1&test2=&test3=value3`:
|
||||
|
||||
```python
|
||||
from sanic.response import json
|
||||
|
||||
@app.route("/query_string")
|
||||
def query_string(request):
|
||||
args_with_blank_values = request.get_args(keep_blank_values=True)
|
||||
return json({
|
||||
"parsed": True,
|
||||
"url": request.url,
|
||||
"args_with_blank_values": args_with_blank_values,
|
||||
"query_string": request.query_string
|
||||
})
|
||||
```
|
||||
|
||||
The output will be:
|
||||
|
||||
```
|
||||
{
|
||||
"parsed": true,
|
||||
"url": "http:\/\/0.0.0.0:8000\/query_string?test1=value1&test2=&test3=value3",
|
||||
"args_with_blank_values": {"test1": ["value1"], "test2": "", "test3": ["value3"]},
|
||||
"query_string": "test1=value1&test2=&test3=value3"
|
||||
}
|
||||
```
|
||||
|
||||
## Accessing values using `get` and `getlist`
|
||||
|
||||
@@ -126,3 +222,40 @@ args.get('titles') # => 'Post 1'
|
||||
|
||||
args.getlist('titles') # => ['Post 1', 'Post 2']
|
||||
```
|
||||
|
||||
## Accessing the handler name with the request.endpoint attribute
|
||||
|
||||
The `request.endpoint` attribute holds the handler's name. For instance, the below
|
||||
route will return "hello".
|
||||
|
||||
```python
|
||||
from sanic.response import text
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic()
|
||||
|
||||
@app.get("/")
|
||||
def hello(request):
|
||||
return text(request.endpoint)
|
||||
```
|
||||
|
||||
Or, with a blueprint it will be include both, separated by a period. For example,
|
||||
the below route would return foo.bar:
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
from sanic import Blueprint
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
app = Sanic(__name__)
|
||||
blueprint = Blueprint('foo')
|
||||
|
||||
@blueprint.get('/')
|
||||
async def bar(request):
|
||||
return text(request.endpoint)
|
||||
|
||||
app.blueprint(blueprint)
|
||||
|
||||
app.run(host="0.0.0.0", port=8000, debug=True)
|
||||
```
|
||||
|
||||
@@ -55,11 +55,13 @@ from sanic import response
|
||||
@app.route("/streaming")
|
||||
async def index(request):
|
||||
async def streaming_fn(response):
|
||||
response.write('foo')
|
||||
response.write('bar')
|
||||
await response.write('foo')
|
||||
await response.write('bar')
|
||||
return response.stream(streaming_fn, content_type='text/plain')
|
||||
```
|
||||
|
||||
See [Streaming](streaming.md) for more information.
|
||||
|
||||
## File Streaming
|
||||
For large files, a combination of File and Streaming above
|
||||
```python
|
||||
|
||||
@@ -41,27 +41,75 @@ inside the quotes. If the parameter does not match the specified type, Sanic
|
||||
will throw a `NotFound` exception, resulting in a `404: Page not found` error
|
||||
on the URL.
|
||||
|
||||
### Supported types
|
||||
|
||||
* `string`
|
||||
* "Bob"
|
||||
* "Python 3"
|
||||
* `int`
|
||||
* 10
|
||||
* 20
|
||||
* 30
|
||||
* -10
|
||||
* (No floats work here)
|
||||
* `number`
|
||||
* 1
|
||||
* 1.5
|
||||
* 10
|
||||
* -10
|
||||
* `alpha`
|
||||
* "Bob"
|
||||
* "Python"
|
||||
* (If it contains a symbol or a non alphanumeric character it will fail)
|
||||
* `path`
|
||||
* "hello"
|
||||
* "hello.text"
|
||||
* "hello world"
|
||||
* `uuid`
|
||||
* 123a123a-a12a-1a1a-a1a1-1a12a1a12345 (UUIDv4 Support)
|
||||
* `regex expression`
|
||||
|
||||
If no type is set then a string is expected. The argument given to the function will always be a string, independent of the type.
|
||||
|
||||
```python
|
||||
from sanic.response import text
|
||||
|
||||
@app.route('/number/<integer_arg:int>')
|
||||
@app.route('/string/<string_arg:string>')
|
||||
async def string_handler(request, string_arg):
|
||||
return text('String - {}'.format(string_arg))
|
||||
|
||||
@app.route('/int/<integer_arg:int>')
|
||||
async def integer_handler(request, integer_arg):
|
||||
return text('Integer - {}'.format(integer_arg))
|
||||
return text('Integer - {}'.format(integer_arg))
|
||||
|
||||
@app.route('/number/<number_arg:number>')
|
||||
async def number_handler(request, number_arg):
|
||||
return text('Number - {}'.format(number_arg))
|
||||
return text('Number - {}'.format(number_arg))
|
||||
|
||||
@app.route('/alpha/<alpha_arg:alpha>')
|
||||
async def number_handler(request, alpha_arg):
|
||||
return text('Alpha - {}'.format(alpha_arg))
|
||||
|
||||
@app.route('/path/<path_arg:path>')
|
||||
async def number_handler(request, path_arg):
|
||||
return text('Path - {}'.format(path_arg))
|
||||
|
||||
@app.route('/uuid/<uuid_arg:uuid>')
|
||||
async def number_handler(request, uuid_arg):
|
||||
return text('Uuid - {}'.format(uuid_arg))
|
||||
|
||||
@app.route('/person/<name:[A-z]+>')
|
||||
async def person_handler(request, name):
|
||||
return text('Person - {}'.format(name))
|
||||
return text('Person - {}'.format(name))
|
||||
|
||||
@app.route('/folder/<folder_id:[A-z0-9]{0,4}>')
|
||||
async def folder_handler(request, folder_id):
|
||||
return text('Folder - {}'.format(folder_id))
|
||||
return text('Folder - {}'.format(folder_id))
|
||||
|
||||
```
|
||||
|
||||
**Warning** `str` is not a valid type tag. If you want `str` recognition then you must use `string`
|
||||
|
||||
## HTTP request types
|
||||
|
||||
By default, a route defined on a URL will be available for only GET requests to that URL.
|
||||
@@ -155,33 +203,39 @@ async def post_handler(request, post_id):
|
||||
Other things to keep in mind when using `url_for`:
|
||||
|
||||
- Keyword arguments passed to `url_for` that are not request parameters will be included in the URL's query string. For example:
|
||||
|
||||
```python
|
||||
url = app.url_for('post_handler', post_id=5, arg_one='one', arg_two='two')
|
||||
# /posts/5?arg_one=one&arg_two=two
|
||||
```
|
||||
|
||||
- Multivalue argument can be passed to `url_for`. For example:
|
||||
|
||||
```python
|
||||
url = app.url_for('post_handler', post_id=5, arg_one=['one', 'two'])
|
||||
# /posts/5?arg_one=one&arg_one=two
|
||||
```
|
||||
- Also some special arguments (`_anchor`, `_external`, `_scheme`, `_method`, `_server`) passed to `url_for` will have special url building (`_method` is not support now and will be ignored). For example:
|
||||
|
||||
- Also some special arguments (`_anchor`, `_external`, `_scheme`, `_method`, `_server`) passed to `url_for` will have special url building (`_method` is not supported now and will be ignored). For example:
|
||||
|
||||
```python
|
||||
url = app.url_for('post_handler', post_id=5, arg_one='one', _anchor='anchor')
|
||||
# /posts/5?arg_one=one#anchor
|
||||
|
||||
url = app.url_for('post_handler', post_id=5, arg_one='one', _external=True)
|
||||
# //server/posts/5?arg_one=one
|
||||
# _external requires passed argument _server or SERVER_NAME in app.config or url will be same as no _external
|
||||
# _external requires you to pass an argument _server or set SERVER_NAME in app.config if not url will be same as no _external
|
||||
|
||||
url = app.url_for('post_handler', post_id=5, arg_one='one', _scheme='http', _external=True)
|
||||
# http://server/posts/5?arg_one=one
|
||||
# when specifying _scheme, _external must be True
|
||||
|
||||
# you can pass all special arguments one time
|
||||
# you can pass all special arguments at once
|
||||
url = app.url_for('post_handler', post_id=5, arg_one=['one', 'two'], arg_two=2, _anchor='anchor', _scheme='http', _external=True, _server='another_server:8888')
|
||||
# http://another_server:8888/posts/5?arg_one=one&arg_one=two&arg_two=2#anchor
|
||||
```
|
||||
- All valid parameters must be passed to `url_for` to build a URL. If a parameter is not supplied, or if a parameter does not match the specified type, a `URLBuildError` will be thrown.
|
||||
|
||||
- All valid parameters must be passed to `url_for` to build a URL. If a parameter is not supplied, or if a parameter does not match the specified type, a `URLBuildError` will be raised.
|
||||
|
||||
## WebSocket routes
|
||||
|
||||
@@ -209,7 +263,7 @@ async def feed(request, ws):
|
||||
app.add_websocket_route(my_websocket_handler, '/feed')
|
||||
```
|
||||
|
||||
Handlers for a WebSocket route are passed the request as first argument, and a
|
||||
Handlers to a WebSocket route are invoked with the request as first argument, and a
|
||||
WebSocket protocol object as second argument. The protocol object has `send`
|
||||
and `recv` methods to send and receive data respectively.
|
||||
|
||||
@@ -241,9 +295,49 @@ def handler(request):
|
||||
app.blueprint(bp)
|
||||
```
|
||||
|
||||
The behavior of how the `strict_slashes` flag follows a defined hierarchy which decides if a specific route
|
||||
falls under the `strict_slashes` behavior.
|
||||
|
||||
```bash
|
||||
|___ Route
|
||||
|___ Blueprint
|
||||
|___ Application
|
||||
```
|
||||
|
||||
Above hierarchy defines how the `strict_slashes` flag will behave. The first non `None` value of the `strict_slashes`
|
||||
found in the above order will be applied to the route in question.
|
||||
|
||||
```python
|
||||
from sanic import Sanic, Blueprint
|
||||
from sanic.response import text
|
||||
|
||||
app = Sanic("sample_strict_slashes", strict_slashes=True)
|
||||
|
||||
@app.get("/r1")
|
||||
def r1(request):
|
||||
return text("strict_slashes is applicable from App level")
|
||||
|
||||
@app.get("/r2", strict_slashes=False)
|
||||
def r2(request):
|
||||
return text("strict_slashes is not applicable due to False value set in route level")
|
||||
|
||||
bp = Blueprint("bp", strict_slashes=False)
|
||||
|
||||
@bp.get("/r3", strict_slashes=True)
|
||||
def r3(request):
|
||||
return text("strict_slashes applicable from blueprint route level")
|
||||
|
||||
bp1 = Blueprint("bp1", strict_slashes=True)
|
||||
|
||||
@bp.get("/r4")
|
||||
def r3(request):
|
||||
return text("strict_slashes applicable from blueprint level")
|
||||
```
|
||||
|
||||
## User defined route name
|
||||
|
||||
You can pass `name` to change the route name to avoid using the default name (`handler.__name__`).
|
||||
A custom route name can be used by passing a `name` argument while registering the route which will
|
||||
override the default route name generated using the `handler.__name__` attribute.
|
||||
|
||||
```python
|
||||
|
||||
@@ -305,8 +399,8 @@ def handler(request):
|
||||
|
||||
## Build URL for static files
|
||||
|
||||
You can use `url_for` for static file url building now.
|
||||
If it's for file directly, `filename` can be ignored.
|
||||
Sanic supports using `url_for` method to build static file urls. In case if the static url
|
||||
is pointing to a directory, `filename` parameter to the `url_for` can be ignored. q
|
||||
|
||||
```python
|
||||
|
||||
|
||||
66
docs/sanic/sockets.rst
Normal file
66
docs/sanic/sockets.rst
Normal file
@@ -0,0 +1,66 @@
|
||||
Sockets
|
||||
=======
|
||||
|
||||
Sanic can use the python
|
||||
`socket module <https://docs.python.org/3/library/socket.html>`_ to accommodate
|
||||
non IPv4 sockets.
|
||||
|
||||
IPv6 example:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
import socket
|
||||
|
||||
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
|
||||
sock.bind(('::', 7777))
|
||||
|
||||
app = Sanic()
|
||||
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
return json({"hello": "world"})
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(sock=sock)
|
||||
|
||||
to test IPv6 ``curl -g -6 "http://[::1]:7777/"``
|
||||
|
||||
|
||||
UNIX socket example:
|
||||
|
||||
.. code:: python
|
||||
|
||||
import signal
|
||||
import sys
|
||||
import socket
|
||||
import os
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
|
||||
server_socket = '/tmp/sanic.sock'
|
||||
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.bind(server_socket)
|
||||
|
||||
app = Sanic()
|
||||
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
return json({"hello": "world"})
|
||||
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
print('Exiting')
|
||||
os.unlink(server_socket)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(sock=sock)
|
||||
|
||||
to test UNIX: ``curl -v --unix-socket /tmp/sanic.sock http://localhost/hello``
|
||||
@@ -1,7 +1,7 @@
|
||||
# Static Files
|
||||
|
||||
Static files and directories, such as an image file, are served by Sanic when
|
||||
registered with the `app.static` method. The method takes an endpoint URL and a
|
||||
registered with the `app.static()` method. The method takes an endpoint URL and a
|
||||
filename. The file specified will then be accessible via the given endpoint.
|
||||
|
||||
```python
|
||||
@@ -34,6 +34,10 @@ app.url_for('static', name='another', filename='any') == '/another.png'
|
||||
bp = Blueprint('bp', url_prefix='/bp')
|
||||
bp.static('/static', './static')
|
||||
|
||||
# specify a different content_type for your files
|
||||
# such as adding 'charset'
|
||||
app.static('/', '/public/index.html', content_type="text/html; charset=utf-8")
|
||||
|
||||
# servers the file directly
|
||||
bp.static('/the_best.png', '/home/ubuntu/test.png', name='best_png')
|
||||
app.blueprint(bp)
|
||||
@@ -43,3 +47,41 @@ app.url_for('static', name='bp.best_png') == '/bp/test_best.png'
|
||||
|
||||
app.run(host="0.0.0.0", port=8000)
|
||||
```
|
||||
|
||||
> **Note:** Sanic does not provide directory index when you serve a static directory.
|
||||
|
||||
## Virtual Host
|
||||
|
||||
The `app.static()` method also support **virtual host**. You can serve your static files with specific **virtual host** with `host` argument. For example:
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
app.static('/static', './static')
|
||||
app.static('/example_static', './example_static', host='www.example.com')
|
||||
```
|
||||
|
||||
## Streaming Large File
|
||||
|
||||
In some cases, you might server large file(ex: videos, images, etc.) with Sanic. You can choose to use **streaming file** rather than download directly.
|
||||
|
||||
Here is an example:
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
app.static('/large_video.mp4', '/home/ubuntu/large_video.mp4', stream_large_files=True)
|
||||
```
|
||||
|
||||
When `stream_large_files` is `True`, Sanic will use `file_stream()` instead of `file()` to serve static files. This will use **1KB** as the default chunk size. And, if needed, you can also use a custom chunk size. For example:
|
||||
```python
|
||||
from sanic import Sanic
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
chunk_size = 1024 * 1024 * 8 # Set chunk size to 8KB
|
||||
app.static('/large_video.mp4', '/home/ubuntu/large_video.mp4', stream_large_files=chunk_size)
|
||||
```
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Request Streaming
|
||||
|
||||
Sanic allows you to get request data by stream, as below. When the request ends, `request.stream.get()` returns `None`. Only post, put and patch decorator have stream argument.
|
||||
Sanic allows you to get request data by stream, as below. When the request ends, `await request.stream.read()` returns `None`. Only post, put and patch decorator have stream argument.
|
||||
|
||||
```python
|
||||
from sanic import Sanic
|
||||
@@ -22,7 +22,7 @@ class SimpleView(HTTPMethodView):
|
||||
async def post(self, request):
|
||||
result = ''
|
||||
while True:
|
||||
body = await request.stream.get()
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
result += body.decode('utf-8')
|
||||
@@ -33,29 +33,42 @@ class SimpleView(HTTPMethodView):
|
||||
async def handler(request):
|
||||
async def streaming(response):
|
||||
while True:
|
||||
body = await request.stream.get()
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
body = body.decode('utf-8').replace('1', 'A')
|
||||
response.write(body)
|
||||
await response.write(body)
|
||||
return stream(streaming)
|
||||
|
||||
|
||||
@bp.put('/bp_stream', stream=True)
|
||||
async def bp_handler(request):
|
||||
async def bp_put_handler(request):
|
||||
result = ''
|
||||
while True:
|
||||
body = await request.stream.get()
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
result += body.decode('utf-8').replace('1', 'A')
|
||||
return text(result)
|
||||
|
||||
|
||||
# You can also use `bp.add_route()` with stream argument
|
||||
async def bp_post_handler(request):
|
||||
result = ''
|
||||
while True:
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
result += body.decode('utf-8').replace('1', 'A')
|
||||
return text(result)
|
||||
|
||||
bp.add_route(bp_post_handler, '/bp_stream', methods=['POST'], stream=True)
|
||||
|
||||
|
||||
async def post_handler(request):
|
||||
result = ''
|
||||
while True:
|
||||
body = await request.stream.get()
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
result += body.decode('utf-8')
|
||||
@@ -85,8 +98,8 @@ app = Sanic(__name__)
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
async def sample_streaming_fn(response):
|
||||
response.write('foo,')
|
||||
response.write('bar')
|
||||
await response.write('foo,')
|
||||
await response.write('bar')
|
||||
|
||||
return stream(sample_streaming_fn, content_type='text/csv')
|
||||
```
|
||||
@@ -100,7 +113,31 @@ async def index(request):
|
||||
conn = await asyncpg.connect(database='test')
|
||||
async with conn.transaction():
|
||||
async for record in conn.cursor('SELECT generate_series(0, 10)'):
|
||||
response.write(record[0])
|
||||
await response.write(record[0])
|
||||
|
||||
return stream(stream_from_db)
|
||||
```
|
||||
|
||||
If a client supports HTTP/1.1, Sanic will use [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding); you can explicitly enable or disable it using `chunked` option of the `stream` function.
|
||||
|
||||
## File Streaming
|
||||
|
||||
Sanic provides `sanic.response.file_stream` function that is useful when you want to send a large file. It returns a `StreamingHTTPResponse` object and will use chunked transfer encoding by default; for this reason Sanic doesn't add `Content-Length` HTTP header in the response. If you want to use this header, you can disable chunked transfer encoding and add it manually:
|
||||
|
||||
```python
|
||||
from aiofiles import os as async_os
|
||||
from sanic.response import file_stream
|
||||
|
||||
@app.route("/")
|
||||
async def index(request):
|
||||
file_path = "/srv/www/whatever.png"
|
||||
|
||||
file_stat = await async_os.stat(file_path)
|
||||
headers = {"Content-Length": str(file_stat.st_size)}
|
||||
|
||||
return await file_stream(
|
||||
file_path,
|
||||
headers=headers,
|
||||
chunked=False,
|
||||
)
|
||||
```
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Testing
|
||||
|
||||
Sanic endpoints can be tested locally using the `test_client` object, which
|
||||
depends on the additional [aiohttp](https://aiohttp.readthedocs.io/en/stable/)
|
||||
library.
|
||||
depends on the additional [`requests-async`](https://github.com/encode/requests-async)
|
||||
library, which implements an API that mirrors the `requests` library.
|
||||
|
||||
The `test_client` exposes `get`, `post`, `put`, `delete`, `patch`, `head` and `options` methods
|
||||
for you to run against your application. A simple example (using pytest) is like follows:
|
||||
@@ -21,7 +21,7 @@ def test_index_put_not_allowed():
|
||||
```
|
||||
|
||||
Internally, each time you call one of the `test_client` methods, the Sanic app is run at `127.0.0.1:42101` and
|
||||
your test request is executed against your application, using `aiohttp`.
|
||||
your test request is executed against your application, using `requests-async`.
|
||||
|
||||
The `test_client` methods accept the following arguments and keyword arguments:
|
||||
|
||||
@@ -33,7 +33,7 @@ The `test_client` methods accept the following arguments and keyword arguments:
|
||||
- `server_kwargs` *(default `{}`) a dict of additional arguments to pass into `app.run` before the test request is run.
|
||||
- `debug` *(default `False`)* A boolean which determines whether to run the server in debug mode.
|
||||
|
||||
The function further takes the `*request_args` and `**request_kwargs`, which are passed directly to the aiohttp ClientSession request.
|
||||
The function further takes the `*request_args` and `**request_kwargs`, which are passed directly to the request.
|
||||
|
||||
For example, to supply data to a GET request, you would do the following:
|
||||
|
||||
@@ -55,8 +55,25 @@ def test_post_json_request_includes_data():
|
||||
|
||||
|
||||
More information about
|
||||
the available arguments to aiohttp can be found
|
||||
[in the documentation for ClientSession](https://aiohttp.readthedocs.io/en/stable/client_reference.html#client-session).
|
||||
the available arguments to `requests-async` can be found
|
||||
[in the documentation for `requests`](https://2.python-requests.org/en/master/).
|
||||
|
||||
|
||||
## Using a random port
|
||||
|
||||
If you need to test using a free unpriveleged port chosen by the kernel
|
||||
instead of the default with `SanicTestClient`, you can do so by specifying
|
||||
`port=None`. On most systems the port will be in the range 1024 to 65535.
|
||||
|
||||
```python
|
||||
# Import the Sanic app, usually created with Sanic(__name__)
|
||||
from external_server import app
|
||||
from sanic.testing import SanicTestClient
|
||||
|
||||
def test_index_returns_200():
|
||||
request, response = SanicTestClient(app, port=None).get('/')
|
||||
assert response.status == 200
|
||||
```
|
||||
|
||||
|
||||
## pytest-sanic
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
WebSocket
|
||||
=========
|
||||
|
||||
Sanic supports websockets, to setup a WebSocket:
|
||||
Sanic provides an easy to use abstraction on top of `websockets`.
|
||||
Sanic Supports websocket versions 7 and 8.
|
||||
|
||||
To setup a WebSocket:
|
||||
|
||||
.. code:: python
|
||||
|
||||
@@ -35,7 +38,7 @@ decorator:
|
||||
app.add_websocket_route(feed, '/feed')
|
||||
|
||||
|
||||
Handlers for a WebSocket route are passed the request as first argument, and a
|
||||
Handlers for a WebSocket route is invoked with the request as first argument, and a
|
||||
WebSocket protocol object as second argument. The protocol object has ``send``
|
||||
and ``recv`` methods to send and receive data respectively.
|
||||
|
||||
@@ -43,6 +46,7 @@ and ``recv`` methods to send and receive data respectively.
|
||||
You could setup your own WebSocket configuration through ``app.config``, like
|
||||
|
||||
.. code:: python
|
||||
|
||||
app.config.WEBSOCKET_MAX_SIZE = 2 ** 20
|
||||
app.config.WEBSOCKET_MAX_QUEUE = 32
|
||||
app.config.WEBSOCKET_READ_LIMIT = 2 ** 16
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
name: py35
|
||||
name: py36
|
||||
dependencies:
|
||||
- openssl=1.0.2g=0
|
||||
- pip=8.1.1=py35_0
|
||||
- python=3.5.1=0
|
||||
- readline=6.2=2
|
||||
- setuptools=20.3=py35_0
|
||||
- sqlite=3.9.2=0
|
||||
- tk=8.5.18=0
|
||||
- wheel=0.29.0=py35_0
|
||||
- xz=5.0.5=1
|
||||
- zlib=1.2.8=0
|
||||
- pip=18.1=py36_0
|
||||
- python=3.6=0
|
||||
- setuptools=40.4.3=py36_0
|
||||
- pip:
|
||||
- httptools>=0.0.10
|
||||
- uvloop>=0.5.3
|
||||
- httptools>=0.0.9
|
||||
- ujson>=1.35
|
||||
- aiofiles>=0.3.0
|
||||
- websockets>=3.2
|
||||
- websockets>=6.0,<7.0
|
||||
- multidict>=4.0,<5.0
|
||||
- sphinx==1.8.3
|
||||
- sphinx_rtd_theme==0.4.2
|
||||
- recommonmark==0.5.0
|
||||
- requests-async==0.5.0
|
||||
- sphinxcontrib-asyncio>=0.2.0
|
||||
- https://github.com/channelcat/docutils-fork/zipball/master
|
||||
- docutils==0.14
|
||||
- pygments==2.3.1
|
||||
|
||||
30
examples/amending_request_object.py
Normal file
30
examples/amending_request_object.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from sanic import Sanic
|
||||
from sanic.response import text
|
||||
from random import randint
|
||||
|
||||
app = Sanic()
|
||||
|
||||
|
||||
@app.middleware('request')
|
||||
def append_request(request):
|
||||
# Add new key with random value
|
||||
request['num'] = randint(0, 100)
|
||||
|
||||
|
||||
@app.get('/pop')
|
||||
def pop_handler(request):
|
||||
# Pop key from request object
|
||||
num = request.pop('num')
|
||||
return text(num)
|
||||
|
||||
|
||||
@app.get('/key_exist')
|
||||
def key_exist_handler(request):
|
||||
# Check the key is exist or not
|
||||
if 'num' in request:
|
||||
return text('num exist in request')
|
||||
|
||||
return text('num does not exist in reqeust')
|
||||
|
||||
|
||||
app.run(host="0.0.0.0", port=8000, debug=True)
|
||||
@@ -76,7 +76,7 @@ async def test(request):
|
||||
|
||||
if __name__ == '__main__':
|
||||
asyncio.set_event_loop(uvloop.new_event_loop())
|
||||
server = app.create_server(host="0.0.0.0", port=8000)
|
||||
server = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.set_task_factory(context.task_factory)
|
||||
task = asyncio.ensure_future(server)
|
||||
|
||||
61
examples/logdna_example.py
Normal file
61
examples/logdna_example.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import logging
|
||||
import socket
|
||||
from os import getenv
|
||||
from platform import node
|
||||
from uuid import getnode as get_mac
|
||||
|
||||
from logdna import LogDNAHandler
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
from sanic.request import Request
|
||||
|
||||
log = logging.getLogger('logdna')
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def get_my_ip_address(remote_server="google.com"):
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
|
||||
s.connect((remote_server, 80))
|
||||
return s.getsockname()[0]
|
||||
|
||||
|
||||
def get_mac_address():
|
||||
h = iter(hex(get_mac())[2:].zfill(12))
|
||||
return ":".join(i + next(h) for i in h)
|
||||
|
||||
|
||||
logdna_options = {
|
||||
"app": __name__,
|
||||
"index_meta": True,
|
||||
"hostname": node(),
|
||||
"ip": get_my_ip_address(),
|
||||
"mac": get_mac_address()
|
||||
}
|
||||
|
||||
logdna_handler = LogDNAHandler(getenv("LOGDNA_API_KEY"), options=logdna_options)
|
||||
|
||||
logdna = logging.getLogger(__name__)
|
||||
logdna.setLevel(logging.INFO)
|
||||
logdna.addHandler(logdna_handler)
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
@app.middleware
|
||||
def log_request(request: Request):
|
||||
logdna.info("I was Here with a new Request to URL: {}".format(request.url))
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def default(request):
|
||||
return json({
|
||||
"response": "I was here"
|
||||
})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(
|
||||
host="0.0.0.0",
|
||||
port=getenv("PORT", 8080)
|
||||
)
|
||||
37
examples/raygun_example.py
Normal file
37
examples/raygun_example.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from os import getenv
|
||||
|
||||
from raygun4py.raygunprovider import RaygunSender
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.exceptions import SanicException
|
||||
from sanic.handlers import ErrorHandler
|
||||
|
||||
|
||||
class RaygunExceptionReporter(ErrorHandler):
|
||||
|
||||
def __init__(self, raygun_api_key=None):
|
||||
super().__init__()
|
||||
if raygun_api_key is None:
|
||||
raygun_api_key = getenv("RAYGUN_API_KEY")
|
||||
|
||||
self.sender = RaygunSender(raygun_api_key)
|
||||
|
||||
def default(self, request, exception):
|
||||
self.sender.send_exception(exception=exception)
|
||||
return super().default(request, exception)
|
||||
|
||||
|
||||
raygun_error_reporter = RaygunExceptionReporter()
|
||||
app = Sanic(__name__, error_handler=raygun_error_reporter)
|
||||
|
||||
|
||||
@app.route("/raise")
|
||||
async def test(request):
|
||||
raise SanicException('You Broke It!')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(
|
||||
host="0.0.0.0",
|
||||
port=getenv("PORT", 8080)
|
||||
)
|
||||
@@ -30,7 +30,7 @@ async def handler(request):
|
||||
if body is None:
|
||||
break
|
||||
body = body.decode('utf-8').replace('1', 'A')
|
||||
response.write(body)
|
||||
await response.write(body)
|
||||
return stream(streaming)
|
||||
|
||||
|
||||
|
||||
30
examples/rollbar_example.py
Normal file
30
examples/rollbar_example.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import rollbar
|
||||
|
||||
from sanic.handlers import ErrorHandler
|
||||
from sanic import Sanic
|
||||
from sanic.exceptions import SanicException
|
||||
from os import getenv
|
||||
|
||||
rollbar.init(getenv("ROLLBAR_API_KEY"))
|
||||
|
||||
|
||||
class RollbarExceptionHandler(ErrorHandler):
|
||||
|
||||
def default(self, request, exception):
|
||||
rollbar.report_message(str(exception))
|
||||
return super().default(request, exception)
|
||||
|
||||
|
||||
app = Sanic(__name__, error_handler=RollbarExceptionHandler())
|
||||
|
||||
|
||||
@app.route("/raise")
|
||||
def create_error(request):
|
||||
raise SanicException("I was here and I don't like where I am")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(
|
||||
host="0.0.0.0",
|
||||
port=getenv("PORT", 8080)
|
||||
)
|
||||
88
examples/run_asgi.py
Normal file
88
examples/run_asgi.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
1. Create a simple Sanic app
|
||||
0. Run with an ASGI server:
|
||||
$ uvicorn run_asgi:app
|
||||
or
|
||||
$ hypercorn run_asgi:app
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from sanic import Sanic, response
|
||||
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
@app.route("/text")
|
||||
def handler_text(request):
|
||||
return response.text("Hello")
|
||||
|
||||
|
||||
@app.route("/json")
|
||||
def handler_json(request):
|
||||
return response.json({"foo": "bar"})
|
||||
|
||||
|
||||
@app.websocket("/ws")
|
||||
async def handler_ws(request, ws):
|
||||
name = "<someone>"
|
||||
while True:
|
||||
data = f"Hello {name}"
|
||||
await ws.send(data)
|
||||
name = await ws.recv()
|
||||
|
||||
if not name:
|
||||
break
|
||||
|
||||
|
||||
@app.route("/file")
|
||||
async def handler_file(request):
|
||||
return await response.file(Path("../") / "setup.py")
|
||||
|
||||
|
||||
@app.route("/file_stream")
|
||||
async def handler_file_stream(request):
|
||||
return await response.file_stream(
|
||||
Path("../") / "setup.py", chunk_size=1024
|
||||
)
|
||||
|
||||
|
||||
@app.route("/stream", stream=True)
|
||||
async def handler_stream(request):
|
||||
while True:
|
||||
body = await request.stream.read()
|
||||
if body is None:
|
||||
break
|
||||
body = body.decode("utf-8").replace("1", "A")
|
||||
# await response.write(body)
|
||||
return response.stream(body)
|
||||
|
||||
|
||||
@app.listener("before_server_start")
|
||||
async def listener_before_server_start(*args, **kwargs):
|
||||
print("before_server_start")
|
||||
|
||||
|
||||
@app.listener("after_server_start")
|
||||
async def listener_after_server_start(*args, **kwargs):
|
||||
print("after_server_start")
|
||||
|
||||
|
||||
@app.listener("before_server_stop")
|
||||
async def listener_before_server_stop(*args, **kwargs):
|
||||
print("before_server_stop")
|
||||
|
||||
|
||||
@app.listener("after_server_stop")
|
||||
async def listener_after_server_stop(*args, **kwargs):
|
||||
print("after_server_stop")
|
||||
|
||||
|
||||
@app.middleware("request")
|
||||
async def print_on_request(request):
|
||||
print("print_on_request")
|
||||
|
||||
|
||||
@app.middleware("response")
|
||||
async def print_on_response(request, response):
|
||||
print("print_on_response")
|
||||
@@ -12,7 +12,7 @@ async def test(request):
|
||||
return response.json({"answer": "42"})
|
||||
|
||||
asyncio.set_event_loop(uvloop.new_event_loop())
|
||||
server = app.create_server(host="0.0.0.0", port=8000)
|
||||
server = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
task = asyncio.ensure_future(server)
|
||||
signal(SIGINT, lambda s, f: loop.stop())
|
||||
|
||||
38
examples/run_async_advanced.py
Normal file
38
examples/run_async_advanced.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from sanic import Sanic
|
||||
from sanic import response
|
||||
from signal import signal, SIGINT
|
||||
import asyncio
|
||||
import uvloop
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
@app.listener('after_server_start')
|
||||
async def after_start_test(app, loop):
|
||||
print("Async Server Started!")
|
||||
|
||||
@app.route("/")
|
||||
async def test(request):
|
||||
return response.json({"answer": "42"})
|
||||
|
||||
asyncio.set_event_loop(uvloop.new_event_loop())
|
||||
serv_coro = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
serv_task = asyncio.ensure_future(serv_coro, loop=loop)
|
||||
signal(SIGINT, lambda s, f: loop.stop())
|
||||
server = loop.run_until_complete(serv_task)
|
||||
server.after_start()
|
||||
try:
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt as e:
|
||||
loop.stop()
|
||||
finally:
|
||||
server.before_stop()
|
||||
|
||||
# Wait for server to close
|
||||
close_task = server.close()
|
||||
loop.run_until_complete(close_task)
|
||||
|
||||
# Complete all tasks on the loop
|
||||
for connection in server.connections:
|
||||
connection.close_if_idle()
|
||||
server.after_stop()
|
||||
35
examples/sentry_example.py
Normal file
35
examples/sentry_example.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from os import getenv
|
||||
|
||||
from sentry_sdk import init as sentry_init
|
||||
from sentry_sdk.integrations.sanic import SanicIntegration
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
sentry_init(
|
||||
dsn=getenv("SENTRY_DSN"),
|
||||
integrations=[SanicIntegration()],
|
||||
)
|
||||
|
||||
app = Sanic(__name__)
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@app.route("/working")
|
||||
async def working_path(request):
|
||||
return json({
|
||||
"response": "Working API Response"
|
||||
})
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@app.route("/raise-error")
|
||||
async def raise_error(request):
|
||||
raise Exception("Testing Sentry Integration")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(
|
||||
host="0.0.0.0",
|
||||
port=getenv("PORT", 8080)
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
aiofiles
|
||||
aiohttp>=2.3.0,<=3.2.1
|
||||
chardet<=2.3.0
|
||||
beautifulsoup4
|
||||
coverage
|
||||
httptools
|
||||
flake8
|
||||
pytest==3.3.2
|
||||
tox
|
||||
ujson; sys_platform != "win32" and implementation_name == "cpython"
|
||||
uvloop; sys_platform != "win32" and implementation_name == "cpython"
|
||||
gunicorn
|
||||
multidict>=4.0,<5.0
|
||||
@@ -1,4 +0,0 @@
|
||||
sphinx
|
||||
sphinx_rtd_theme
|
||||
recommonmark
|
||||
sphinxcontrib-asyncio
|
||||
@@ -1,6 +0,0 @@
|
||||
aiofiles
|
||||
httptools
|
||||
ujson; sys_platform != "win32" and implementation_name == "cpython"
|
||||
uvloop; sys_platform != "win32" and implementation_name == "cpython"
|
||||
websockets>=5.0,<6.0
|
||||
multidict>=4.0,<5.0
|
||||
@@ -1,6 +1,6 @@
|
||||
from sanic.__version__ import __version__
|
||||
from sanic.app import Sanic
|
||||
from sanic.blueprints import Blueprint
|
||||
|
||||
__version__ = '0.8.0'
|
||||
|
||||
__all__ = ['Sanic', 'Blueprint']
|
||||
__all__ = ["Sanic", "Blueprint", "__version__"]
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
from argparse import ArgumentParser
|
||||
from importlib import import_module
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from sanic.log import logger
|
||||
from sanic.app import Sanic
|
||||
from sanic.log import logger
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser(prog='sanic')
|
||||
parser.add_argument('--host', dest='host', type=str, default='127.0.0.1')
|
||||
parser.add_argument('--port', dest='port', type=int, default=8000)
|
||||
parser.add_argument('--cert', dest='cert', type=str,
|
||||
help='location of certificate for SSL')
|
||||
parser.add_argument('--key', dest='key', type=str,
|
||||
help='location of keyfile for SSL.')
|
||||
parser.add_argument('--workers', dest='workers', type=int, default=1, )
|
||||
parser.add_argument('--debug', dest='debug', action="store_true")
|
||||
parser.add_argument('module')
|
||||
parser = ArgumentParser(prog="sanic")
|
||||
parser.add_argument("--host", dest="host", type=str, default="127.0.0.1")
|
||||
parser.add_argument("--port", dest="port", type=int, default=8000)
|
||||
parser.add_argument(
|
||||
"--cert", dest="cert", type=str, help="location of certificate for SSL"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--key", dest="key", type=str, help="location of keyfile for SSL."
|
||||
)
|
||||
parser.add_argument("--workers", dest="workers", type=int, default=1)
|
||||
parser.add_argument("--debug", dest="debug", action="store_true")
|
||||
parser.add_argument("module")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
@@ -25,20 +29,32 @@ if __name__ == "__main__":
|
||||
module = import_module(module_name)
|
||||
app = getattr(module, app_name, None)
|
||||
if not isinstance(app, Sanic):
|
||||
raise ValueError("Module is not a Sanic app, it is a {}. "
|
||||
"Perhaps you meant {}.app?"
|
||||
.format(type(app).__name__, args.module))
|
||||
raise ValueError(
|
||||
"Module is not a Sanic app, it is a {}. "
|
||||
"Perhaps you meant {}.app?".format(
|
||||
type(app).__name__, args.module
|
||||
)
|
||||
)
|
||||
if args.cert is not None or args.key is not None:
|
||||
ssl = {'cert': args.cert, 'key': args.key}
|
||||
ssl = {
|
||||
"cert": args.cert,
|
||||
"key": args.key,
|
||||
} # type: Optional[Dict[str, Any]]
|
||||
else:
|
||||
ssl = None
|
||||
|
||||
app.run(host=args.host, port=args.port,
|
||||
workers=args.workers, debug=args.debug, ssl=ssl)
|
||||
app.run(
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
workers=args.workers,
|
||||
debug=args.debug,
|
||||
ssl=ssl,
|
||||
)
|
||||
except ImportError as e:
|
||||
logger.error("No module named {} found.\n"
|
||||
" Example File: project/sanic_server.py -> app\n"
|
||||
" Example Module: project.sanic_server.app"
|
||||
.format(e.name))
|
||||
except ValueError as e:
|
||||
logger.error("{}".format(e))
|
||||
logger.error(
|
||||
"No module named {} found.\n"
|
||||
" Example File: project/sanic_server.py -> app\n"
|
||||
" Example Module: project.sanic_server.app".format(e.name)
|
||||
)
|
||||
except ValueError:
|
||||
logger.exception("Failed to run app")
|
||||
|
||||
1
sanic/__version__.py
Normal file
1
sanic/__version__.py
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = "19.9.0"
|
||||
1086
sanic/app.py
1086
sanic/app.py
File diff suppressed because it is too large
Load Diff
393
sanic/asgi.py
Normal file
393
sanic/asgi.py
Normal file
@@ -0,0 +1,393 @@
|
||||
import asyncio
|
||||
import warnings
|
||||
|
||||
from inspect import isawaitable
|
||||
from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from urllib.parse import quote
|
||||
|
||||
from requests_async import ASGISession # type: ignore
|
||||
|
||||
import sanic.app # noqa
|
||||
|
||||
from sanic.compat import Header
|
||||
from sanic.exceptions import InvalidUsage, ServerError
|
||||
from sanic.log import logger
|
||||
from sanic.request import Request
|
||||
from sanic.response import HTTPResponse, StreamingHTTPResponse
|
||||
from sanic.server import StreamBuffer
|
||||
from sanic.websocket import WebSocketConnection
|
||||
|
||||
|
||||
ASGIScope = MutableMapping[str, Any]
|
||||
ASGIMessage = MutableMapping[str, Any]
|
||||
ASGISend = Callable[[ASGIMessage], Awaitable[None]]
|
||||
ASGIReceive = Callable[[], Awaitable[ASGIMessage]]
|
||||
|
||||
|
||||
class MockProtocol:
|
||||
def __init__(self, transport: "MockTransport", loop):
|
||||
self.transport = transport
|
||||
self._not_paused = asyncio.Event(loop=loop)
|
||||
self._not_paused.set()
|
||||
self._complete = asyncio.Event(loop=loop)
|
||||
|
||||
def pause_writing(self) -> None:
|
||||
self._not_paused.clear()
|
||||
|
||||
def resume_writing(self) -> None:
|
||||
self._not_paused.set()
|
||||
|
||||
async def complete(self) -> None:
|
||||
self._not_paused.set()
|
||||
await self.transport.send(
|
||||
{"type": "http.response.body", "body": b"", "more_body": False}
|
||||
)
|
||||
|
||||
@property
|
||||
def is_complete(self) -> bool:
|
||||
return self._complete.is_set()
|
||||
|
||||
async def push_data(self, data: bytes) -> None:
|
||||
if not self.is_complete:
|
||||
await self.transport.send(
|
||||
{"type": "http.response.body", "body": data, "more_body": True}
|
||||
)
|
||||
|
||||
async def drain(self) -> None:
|
||||
await self._not_paused.wait()
|
||||
|
||||
|
||||
class MockTransport:
|
||||
_protocol: Optional[MockProtocol]
|
||||
|
||||
def __init__(
|
||||
self, scope: ASGIScope, receive: ASGIReceive, send: ASGISend
|
||||
) -> None:
|
||||
self.scope = scope
|
||||
self._receive = receive
|
||||
self._send = send
|
||||
self._protocol = None
|
||||
self.loop = None
|
||||
|
||||
def get_protocol(self) -> MockProtocol:
|
||||
if not self._protocol:
|
||||
self._protocol = MockProtocol(self, self.loop)
|
||||
return self._protocol
|
||||
|
||||
def get_extra_info(self, info: str) -> Union[str, bool, None]:
|
||||
if info == "peername":
|
||||
return self.scope.get("server")
|
||||
elif info == "sslcontext":
|
||||
return self.scope.get("scheme") in ["https", "wss"]
|
||||
return None
|
||||
|
||||
def get_websocket_connection(self) -> WebSocketConnection:
|
||||
try:
|
||||
return self._websocket_connection
|
||||
except AttributeError:
|
||||
raise InvalidUsage("Improper websocket connection.")
|
||||
|
||||
def create_websocket_connection(
|
||||
self, send: ASGISend, receive: ASGIReceive
|
||||
) -> WebSocketConnection:
|
||||
self._websocket_connection = WebSocketConnection(send, receive)
|
||||
return self._websocket_connection
|
||||
|
||||
def add_task(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
async def send(self, data) -> None:
|
||||
# TODO:
|
||||
# - Validation on data and that it is formatted properly and is valid
|
||||
await self._send(data)
|
||||
|
||||
async def receive(self) -> ASGIMessage:
|
||||
return await self._receive()
|
||||
|
||||
|
||||
class Lifespan:
|
||||
def __init__(self, asgi_app: "ASGIApp") -> None:
|
||||
self.asgi_app = asgi_app
|
||||
|
||||
if "before_server_start" in self.asgi_app.sanic_app.listeners:
|
||||
warnings.warn(
|
||||
'You have set a listener for "before_server_start" '
|
||||
"in ASGI mode. "
|
||||
"It will be executed as early as possible, but not before "
|
||||
"the ASGI server is started."
|
||||
)
|
||||
if "after_server_stop" in self.asgi_app.sanic_app.listeners:
|
||||
warnings.warn(
|
||||
'You have set a listener for "after_server_stop" '
|
||||
"in ASGI mode. "
|
||||
"It will be executed as late as possible, but not after "
|
||||
"the ASGI server is stopped."
|
||||
)
|
||||
|
||||
async def startup(self) -> None:
|
||||
"""
|
||||
Gather the listeners to fire on server start.
|
||||
Because we are using a third-party server and not Sanic server, we do
|
||||
not have access to fire anything BEFORE the server starts.
|
||||
Therefore, we fire before_server_start and after_server_start
|
||||
in sequence since the ASGI lifespan protocol only supports a single
|
||||
startup event.
|
||||
"""
|
||||
listeners = self.asgi_app.sanic_app.listeners.get(
|
||||
"before_server_start", []
|
||||
) + self.asgi_app.sanic_app.listeners.get("after_server_start", [])
|
||||
|
||||
for handler in listeners:
|
||||
response = handler(
|
||||
self.asgi_app.sanic_app, self.asgi_app.sanic_app.loop
|
||||
)
|
||||
if isawaitable(response):
|
||||
await response
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""
|
||||
Gather the listeners to fire on server stop.
|
||||
Because we are using a third-party server and not Sanic server, we do
|
||||
not have access to fire anything AFTER the server stops.
|
||||
Therefore, we fire before_server_stop and after_server_stop
|
||||
in sequence since the ASGI lifespan protocol only supports a single
|
||||
shutdown event.
|
||||
"""
|
||||
listeners = self.asgi_app.sanic_app.listeners.get(
|
||||
"before_server_stop", []
|
||||
) + self.asgi_app.sanic_app.listeners.get("after_server_stop", [])
|
||||
|
||||
for handler in listeners:
|
||||
response = handler(
|
||||
self.asgi_app.sanic_app, self.asgi_app.sanic_app.loop
|
||||
)
|
||||
if isawaitable(response):
|
||||
await response
|
||||
|
||||
async def __call__(
|
||||
self, scope: ASGIScope, receive: ASGIReceive, send: ASGISend
|
||||
) -> None:
|
||||
message = await receive()
|
||||
if message["type"] == "lifespan.startup":
|
||||
await self.startup()
|
||||
await send({"type": "lifespan.startup.complete"})
|
||||
|
||||
message = await receive()
|
||||
if message["type"] == "lifespan.shutdown":
|
||||
await self.shutdown()
|
||||
await send({"type": "lifespan.shutdown.complete"})
|
||||
|
||||
|
||||
class ASGIApp:
|
||||
sanic_app: Union[ASGISession, "sanic.app.Sanic"]
|
||||
request: Request
|
||||
transport: MockTransport
|
||||
do_stream: bool
|
||||
lifespan: Lifespan
|
||||
ws: Optional[WebSocketConnection]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.ws = None
|
||||
|
||||
@classmethod
|
||||
async def create(
|
||||
cls, sanic_app, scope: ASGIScope, receive: ASGIReceive, send: ASGISend
|
||||
) -> "ASGIApp":
|
||||
instance = cls()
|
||||
instance.sanic_app = sanic_app
|
||||
instance.transport = MockTransport(scope, receive, send)
|
||||
instance.transport.loop = sanic_app.loop
|
||||
setattr(instance.transport, "add_task", sanic_app.loop.create_task)
|
||||
|
||||
headers = Header(
|
||||
[
|
||||
(key.decode("latin-1"), value.decode("latin-1"))
|
||||
for key, value in scope.get("headers", [])
|
||||
]
|
||||
)
|
||||
instance.do_stream = (
|
||||
True if headers.get("expect") == "100-continue" else False
|
||||
)
|
||||
instance.lifespan = Lifespan(instance)
|
||||
|
||||
if scope["type"] == "lifespan":
|
||||
await instance.lifespan(scope, receive, send)
|
||||
else:
|
||||
url_bytes = scope.get("root_path", "") + quote(scope["path"])
|
||||
url_bytes = url_bytes.encode("latin-1")
|
||||
url_bytes += b"?" + scope["query_string"]
|
||||
|
||||
if scope["type"] == "http":
|
||||
version = scope["http_version"]
|
||||
method = scope["method"]
|
||||
elif scope["type"] == "websocket":
|
||||
version = "1.1"
|
||||
method = "GET"
|
||||
|
||||
instance.ws = instance.transport.create_websocket_connection(
|
||||
send, receive
|
||||
)
|
||||
await instance.ws.accept()
|
||||
else:
|
||||
pass
|
||||
# TODO:
|
||||
# - close connection
|
||||
|
||||
request_class = sanic_app.request_class or Request
|
||||
instance.request = request_class(
|
||||
url_bytes,
|
||||
headers,
|
||||
version,
|
||||
method,
|
||||
instance.transport,
|
||||
sanic_app,
|
||||
)
|
||||
|
||||
if sanic_app.is_request_stream:
|
||||
is_stream_handler = sanic_app.router.is_stream_handler(
|
||||
instance.request
|
||||
)
|
||||
if is_stream_handler:
|
||||
instance.request.stream = StreamBuffer(
|
||||
sanic_app.config.REQUEST_BUFFER_QUEUE_SIZE
|
||||
)
|
||||
instance.do_stream = True
|
||||
|
||||
return instance
|
||||
|
||||
async def read_body(self) -> bytes:
|
||||
"""
|
||||
Read and return the entire body from an incoming ASGI message.
|
||||
"""
|
||||
body = b""
|
||||
more_body = True
|
||||
while more_body:
|
||||
message = await self.transport.receive()
|
||||
body += message.get("body", b"")
|
||||
more_body = message.get("more_body", False)
|
||||
|
||||
return body
|
||||
|
||||
async def stream_body(self) -> None:
|
||||
"""
|
||||
Read and stream the body in chunks from an incoming ASGI message.
|
||||
"""
|
||||
more_body = True
|
||||
|
||||
while more_body:
|
||||
message = await self.transport.receive()
|
||||
chunk = message.get("body", b"")
|
||||
await self.request.stream.put(chunk)
|
||||
|
||||
more_body = message.get("more_body", False)
|
||||
|
||||
await self.request.stream.put(None)
|
||||
|
||||
async def __call__(self) -> None:
|
||||
"""
|
||||
Handle the incoming request.
|
||||
"""
|
||||
if not self.do_stream:
|
||||
self.request.body = await self.read_body()
|
||||
else:
|
||||
self.sanic_app.loop.create_task(self.stream_body())
|
||||
|
||||
handler = self.sanic_app.handle_request
|
||||
callback = None if self.ws else self.stream_callback
|
||||
await handler(self.request, None, callback)
|
||||
|
||||
async def stream_callback(self, response: HTTPResponse) -> None:
|
||||
"""
|
||||
Write the response.
|
||||
"""
|
||||
headers: List[Tuple[bytes, bytes]] = []
|
||||
cookies: Dict[str, str] = {}
|
||||
try:
|
||||
cookies = {
|
||||
v.key: v
|
||||
for _, v in list(
|
||||
filter(
|
||||
lambda item: item[0].lower() == "set-cookie",
|
||||
response.headers.items(),
|
||||
)
|
||||
)
|
||||
}
|
||||
headers += [
|
||||
(str(name).encode("latin-1"), str(value).encode("latin-1"))
|
||||
for name, value in response.headers.items()
|
||||
if name.lower() not in ["set-cookie"]
|
||||
]
|
||||
except AttributeError:
|
||||
logger.error(
|
||||
"Invalid response object for url %s, "
|
||||
"Expected Type: HTTPResponse, Actual Type: %s",
|
||||
self.request.url,
|
||||
type(response),
|
||||
)
|
||||
exception = ServerError("Invalid response type")
|
||||
response = self.sanic_app.error_handler.response(
|
||||
self.request, exception
|
||||
)
|
||||
headers = [
|
||||
(str(name).encode("latin-1"), str(value).encode("latin-1"))
|
||||
for name, value in response.headers.items()
|
||||
if name not in (b"Set-Cookie",)
|
||||
]
|
||||
|
||||
if "content-length" not in response.headers and not isinstance(
|
||||
response, StreamingHTTPResponse
|
||||
):
|
||||
headers += [
|
||||
(b"content-length", str(len(response.body)).encode("latin-1"))
|
||||
]
|
||||
|
||||
if "content-type" not in response.headers:
|
||||
headers += [
|
||||
(b"content-type", str(response.content_type).encode("latin-1"))
|
||||
]
|
||||
|
||||
if response.cookies:
|
||||
cookies.update(
|
||||
{
|
||||
v.key: v
|
||||
for _, v in response.cookies.items()
|
||||
if v.key not in cookies.keys()
|
||||
}
|
||||
)
|
||||
|
||||
headers += [
|
||||
(b"set-cookie", cookie.encode("utf-8"))
|
||||
for k, cookie in cookies.items()
|
||||
]
|
||||
|
||||
await self.transport.send(
|
||||
{
|
||||
"type": "http.response.start",
|
||||
"status": response.status,
|
||||
"headers": headers,
|
||||
}
|
||||
)
|
||||
|
||||
if isinstance(response, StreamingHTTPResponse):
|
||||
response.protocol = self.transport.get_protocol()
|
||||
await response.stream()
|
||||
await response.protocol.complete()
|
||||
|
||||
else:
|
||||
await self.transport.send(
|
||||
{
|
||||
"type": "http.response.body",
|
||||
"body": response.body,
|
||||
"more_body": False,
|
||||
}
|
||||
)
|
||||
121
sanic/blueprint_group.py
Normal file
121
sanic/blueprint_group.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from collections.abc import MutableSequence
|
||||
|
||||
|
||||
class BlueprintGroup(MutableSequence):
|
||||
"""
|
||||
This class provides a mechanism to implement a Blueprint Group
|
||||
using the :meth:`~sanic.blueprints.Blueprint.group` method in
|
||||
:class:`~sanic.blueprints.Blueprint`. To avoid having to re-write
|
||||
some of the existing implementation, this class provides a custom
|
||||
iterator implementation that will let you use the object of this
|
||||
class as a list/tuple inside the existing implementation.
|
||||
"""
|
||||
|
||||
__slots__ = ("_blueprints", "_url_prefix")
|
||||
|
||||
def __init__(self, url_prefix=None):
|
||||
"""
|
||||
Create a new Blueprint Group
|
||||
|
||||
:param url_prefix: URL: to be prefixed before all the Blueprint Prefix
|
||||
"""
|
||||
self._blueprints = []
|
||||
self._url_prefix = url_prefix
|
||||
|
||||
@property
|
||||
def url_prefix(self):
|
||||
"""
|
||||
Retrieve the URL prefix being used for the Current Blueprint Group
|
||||
:return: string with url prefix
|
||||
"""
|
||||
return self._url_prefix
|
||||
|
||||
@property
|
||||
def blueprints(self):
|
||||
"""
|
||||
Retrieve a list of all the available blueprints under this group.
|
||||
:return: List of Blueprint instance
|
||||
"""
|
||||
return self._blueprints
|
||||
|
||||
def __iter__(self):
|
||||
"""Tun the class Blueprint Group into an Iterable item"""
|
||||
return iter(self._blueprints)
|
||||
|
||||
def __getitem__(self, item):
|
||||
"""
|
||||
This method returns a blueprint inside the group specified by
|
||||
an index value. This will enable indexing, splice and slicing
|
||||
of the blueprint group like we can do with regular list/tuple.
|
||||
|
||||
This method is provided to ensure backward compatibility with
|
||||
any of the pre-existing usage that might break.
|
||||
|
||||
:param item: Index of the Blueprint item in the group
|
||||
:return: Blueprint object
|
||||
"""
|
||||
return self._blueprints[item]
|
||||
|
||||
def __setitem__(self, index, item) -> None:
|
||||
"""
|
||||
Abstract method implemented to turn the `BlueprintGroup` class
|
||||
into a list like object to support all the existing behavior.
|
||||
|
||||
This method is used to perform the list's indexed setter operation.
|
||||
|
||||
:param index: Index to use for inserting a new Blueprint item
|
||||
:param item: New `Blueprint` object.
|
||||
:return: None
|
||||
"""
|
||||
self._blueprints[index] = item
|
||||
|
||||
def __delitem__(self, index) -> None:
|
||||
"""
|
||||
Abstract method implemented to turn the `BlueprintGroup` class
|
||||
into a list like object to support all the existing behavior.
|
||||
|
||||
This method is used to delete an item from the list of blueprint
|
||||
groups like it can be done on a regular list with index.
|
||||
|
||||
:param index: Index to use for removing a new Blueprint item
|
||||
:return: None
|
||||
"""
|
||||
del self._blueprints[index]
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""
|
||||
Get the Length of the blueprint group object.
|
||||
:return: Length of Blueprint group object
|
||||
"""
|
||||
return len(self._blueprints)
|
||||
|
||||
def insert(self, index: int, item: object) -> None:
|
||||
"""
|
||||
The Abstract class `MutableSequence` leverages this insert method to
|
||||
perform the `BlueprintGroup.append` operation.
|
||||
|
||||
:param index: Index to use for removing a new Blueprint item
|
||||
:param item: New `Blueprint` object.
|
||||
:return: None
|
||||
"""
|
||||
self._blueprints.insert(index, item)
|
||||
|
||||
def middleware(self, *args, **kwargs):
|
||||
"""
|
||||
A decorator that can be used to implement a Middleware plugin to
|
||||
all of the Blueprints that belongs to this specific Blueprint Group.
|
||||
|
||||
In case of nested Blueprint Groups, the same middleware is applied
|
||||
across each of the Blueprints recursively.
|
||||
|
||||
:param args: Optional positional Parameters to be use middleware
|
||||
:param kwargs: Optional Keyword arg to use with Middleware
|
||||
:return: Partial function to apply the middleware
|
||||
"""
|
||||
kwargs["bp_group"] = True
|
||||
|
||||
def register_middleware_for_blueprints(fn):
|
||||
for blueprint in self.blueprints:
|
||||
blueprint.middleware(fn, *args, **kwargs)
|
||||
|
||||
return register_middleware_for_blueprints
|
||||
@@ -1,28 +1,55 @@
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
from sanic.blueprint_group import BlueprintGroup
|
||||
from sanic.constants import HTTP_METHODS
|
||||
from sanic.views import CompositionView
|
||||
|
||||
FutureRoute = namedtuple('Route',
|
||||
['handler', 'uri', 'methods', 'host',
|
||||
'strict_slashes', 'stream', 'version', 'name'])
|
||||
FutureListener = namedtuple('Listener', ['handler', 'uri', 'methods', 'host'])
|
||||
FutureMiddleware = namedtuple('Route', ['middleware', 'args', 'kwargs'])
|
||||
FutureException = namedtuple('Route', ['handler', 'args', 'kwargs'])
|
||||
FutureStatic = namedtuple('Route',
|
||||
['uri', 'file_or_directory', 'args', 'kwargs'])
|
||||
|
||||
FutureRoute = namedtuple(
|
||||
"FutureRoute",
|
||||
[
|
||||
"handler",
|
||||
"uri",
|
||||
"methods",
|
||||
"host",
|
||||
"strict_slashes",
|
||||
"stream",
|
||||
"version",
|
||||
"name",
|
||||
],
|
||||
)
|
||||
FutureListener = namedtuple(
|
||||
"FutureListener", ["handler", "uri", "methods", "host"]
|
||||
)
|
||||
FutureMiddleware = namedtuple(
|
||||
"FutureMiddleware", ["middleware", "args", "kwargs"]
|
||||
)
|
||||
FutureException = namedtuple("FutureException", ["handler", "args", "kwargs"])
|
||||
FutureStatic = namedtuple(
|
||||
"FutureStatic", ["uri", "file_or_directory", "args", "kwargs"]
|
||||
)
|
||||
|
||||
|
||||
class Blueprint:
|
||||
def __init__(self, name,
|
||||
url_prefix=None,
|
||||
host=None, version=None,
|
||||
strict_slashes=False):
|
||||
"""Create a new blueprint
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
url_prefix=None,
|
||||
host=None,
|
||||
version=None,
|
||||
strict_slashes=None,
|
||||
):
|
||||
"""
|
||||
In *Sanic* terminology, a **Blueprint** is a logical collection of
|
||||
URLs that perform a specific set of tasks which can be identified by
|
||||
a unique name.
|
||||
|
||||
:param name: unique name of the blueprint
|
||||
:param url_prefix: URL to be prefixed before all route URLs
|
||||
:param strict_slashes: strict to trailing slash
|
||||
:param host: IP Address of FQDN for the sanic server to use.
|
||||
:param version: Blueprint Version
|
||||
:param strict_slashes: Enforce the API urls are requested with a
|
||||
training */*
|
||||
"""
|
||||
self.name = name
|
||||
self.url_prefix = url_prefix
|
||||
@@ -38,30 +65,44 @@ class Blueprint:
|
||||
self.strict_slashes = strict_slashes
|
||||
|
||||
@staticmethod
|
||||
def group(*blueprints, url_prefix=''):
|
||||
"""Create a list of blueprints, optionally
|
||||
grouping them under a general URL prefix.
|
||||
def group(*blueprints, url_prefix=""):
|
||||
"""
|
||||
Create a list of blueprints, optionally grouping them under a
|
||||
general URL prefix.
|
||||
|
||||
:param blueprints: blueprints to be registered as a group
|
||||
:param url_prefix: URL route to be prepended to all sub-prefixes
|
||||
"""
|
||||
|
||||
def chain(nested):
|
||||
"""itertools.chain() but leaves strings untouched"""
|
||||
for i in nested:
|
||||
if isinstance(i, (list, tuple)):
|
||||
yield from chain(i)
|
||||
elif isinstance(i, BlueprintGroup):
|
||||
yield from i.blueprints
|
||||
else:
|
||||
yield i
|
||||
bps = []
|
||||
|
||||
bps = BlueprintGroup(url_prefix=url_prefix)
|
||||
for bp in chain(blueprints):
|
||||
if bp.url_prefix is None:
|
||||
bp.url_prefix = ""
|
||||
bp.url_prefix = url_prefix + bp.url_prefix
|
||||
bps.append(bp)
|
||||
return bps
|
||||
|
||||
def register(self, app, options):
|
||||
"""Register the blueprint to the sanic app."""
|
||||
"""
|
||||
Register the blueprint to the sanic app.
|
||||
|
||||
url_prefix = options.get('url_prefix', self.url_prefix)
|
||||
:param app: Instance of :class:`sanic.app.Sanic` class
|
||||
:param options: Options to be used while registering the
|
||||
blueprint into the app.
|
||||
*url_prefix* - URL Prefix to override the blueprint prefix
|
||||
"""
|
||||
|
||||
url_prefix = options.get("url_prefix", self.url_prefix)
|
||||
|
||||
# Routes
|
||||
for future in self.routes:
|
||||
@@ -73,14 +114,15 @@ class Blueprint:
|
||||
|
||||
version = future.version or self.version
|
||||
|
||||
app.route(uri=uri[1:] if uri.startswith('//') else uri,
|
||||
methods=future.methods,
|
||||
host=future.host or self.host,
|
||||
strict_slashes=future.strict_slashes,
|
||||
stream=future.stream,
|
||||
version=version,
|
||||
name=future.name,
|
||||
)(future.handler)
|
||||
app.route(
|
||||
uri=uri[1:] if uri.startswith("//") else uri,
|
||||
methods=future.methods,
|
||||
host=future.host or self.host,
|
||||
strict_slashes=future.strict_slashes,
|
||||
stream=future.stream,
|
||||
version=version,
|
||||
name=future.name,
|
||||
)(future.handler)
|
||||
|
||||
for future in self.websocket_routes:
|
||||
# attach the blueprint name to the handler so that it can be
|
||||
@@ -88,18 +130,19 @@ class Blueprint:
|
||||
future.handler.__blueprintname__ = self.name
|
||||
# Prepend the blueprint URI prefix if available
|
||||
uri = url_prefix + future.uri if url_prefix else future.uri
|
||||
app.websocket(uri=uri,
|
||||
host=future.host or self.host,
|
||||
strict_slashes=future.strict_slashes,
|
||||
name=future.name,
|
||||
)(future.handler)
|
||||
app.websocket(
|
||||
uri=uri,
|
||||
host=future.host or self.host,
|
||||
strict_slashes=future.strict_slashes,
|
||||
name=future.name,
|
||||
)(future.handler)
|
||||
|
||||
# Middleware
|
||||
for future in self.middlewares:
|
||||
if future.args or future.kwargs:
|
||||
app.register_middleware(future.middleware,
|
||||
*future.args,
|
||||
**future.kwargs)
|
||||
app.register_middleware(
|
||||
future.middleware, *future.args, **future.kwargs
|
||||
)
|
||||
else:
|
||||
app.register_middleware(future.middleware)
|
||||
|
||||
@@ -111,48 +154,85 @@ class Blueprint:
|
||||
for future in self.statics:
|
||||
# Prepend the blueprint URI prefix if available
|
||||
uri = url_prefix + future.uri if url_prefix else future.uri
|
||||
app.static(uri, future.file_or_directory,
|
||||
*future.args, **future.kwargs)
|
||||
app.static(
|
||||
uri, future.file_or_directory, *future.args, **future.kwargs
|
||||
)
|
||||
|
||||
# Event listeners
|
||||
for event, listeners in self.listeners.items():
|
||||
for listener in listeners:
|
||||
app.listener(event)(listener)
|
||||
|
||||
def route(self, uri, methods=frozenset({'GET'}), host=None,
|
||||
strict_slashes=None, stream=False, version=None, name=None):
|
||||
def route(
|
||||
self,
|
||||
uri,
|
||||
methods=frozenset({"GET"}),
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
stream=False,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""Create a blueprint route from a decorated function.
|
||||
|
||||
:param uri: endpoint at which the route will be accessible.
|
||||
:param methods: list of acceptable HTTP methods.
|
||||
:param host: IP Address of FQDN for the sanic server to use.
|
||||
:param strict_slashes: Enforce the API urls are requested with a
|
||||
training */*
|
||||
:param stream: If the route should provide a streaming support
|
||||
:param version: Blueprint Version
|
||||
:param name: Unique name to identify the Route
|
||||
|
||||
:return a decorated method that when invoked will return an object
|
||||
of type :class:`FutureRoute`
|
||||
"""
|
||||
if strict_slashes is None:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
def decorator(handler):
|
||||
route = FutureRoute(
|
||||
handler, uri, methods, host, strict_slashes, stream, version,
|
||||
name)
|
||||
handler,
|
||||
uri,
|
||||
methods,
|
||||
host,
|
||||
strict_slashes,
|
||||
stream,
|
||||
version,
|
||||
name,
|
||||
)
|
||||
self.routes.append(route)
|
||||
return handler
|
||||
|
||||
return decorator
|
||||
|
||||
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None,
|
||||
strict_slashes=None, version=None, name=None):
|
||||
def add_route(
|
||||
self,
|
||||
handler,
|
||||
uri,
|
||||
methods=frozenset({"GET"}),
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
version=None,
|
||||
name=None,
|
||||
stream=False,
|
||||
):
|
||||
"""Create a blueprint route from a function.
|
||||
|
||||
:param handler: function for handling uri requests. Accepts function,
|
||||
or class instance with a view_class method.
|
||||
:param uri: endpoint at which the route will be accessible.
|
||||
:param methods: list of acceptable HTTP methods.
|
||||
:param host:
|
||||
:param strict_slashes:
|
||||
:param version:
|
||||
:param host: IP Address of FQDN for the sanic server to use.
|
||||
:param strict_slashes: Enforce the API urls are requested with a
|
||||
training */*
|
||||
:param version: Blueprint Version
|
||||
:param name: user defined route name for url_for
|
||||
:param stream: boolean specifying if the handler is a stream handler
|
||||
:return: function or class instance
|
||||
"""
|
||||
# Handle HTTPMethodView differently
|
||||
if hasattr(handler, 'view_class'):
|
||||
if hasattr(handler, "view_class"):
|
||||
methods = set()
|
||||
|
||||
for method in HTTP_METHODS:
|
||||
@@ -166,34 +246,52 @@ class Blueprint:
|
||||
if isinstance(handler, CompositionView):
|
||||
methods = handler.handlers.keys()
|
||||
|
||||
self.route(uri=uri, methods=methods, host=host,
|
||||
strict_slashes=strict_slashes, version=version,
|
||||
name=name)(handler)
|
||||
self.route(
|
||||
uri=uri,
|
||||
methods=methods,
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
stream=stream,
|
||||
version=version,
|
||||
name=name,
|
||||
)(handler)
|
||||
return handler
|
||||
|
||||
def websocket(self, uri, host=None, strict_slashes=None, version=None,
|
||||
name=None):
|
||||
def websocket(
|
||||
self, uri, host=None, strict_slashes=None, version=None, name=None
|
||||
):
|
||||
"""Create a blueprint websocket route from a decorated function.
|
||||
|
||||
:param uri: endpoint at which the route will be accessible.
|
||||
:param host: IP Address of FQDN for the sanic server to use.
|
||||
:param strict_slashes: Enforce the API urls are requested with a
|
||||
training */*
|
||||
:param version: Blueprint Version
|
||||
:param name: Unique name to identify the Websocket Route
|
||||
"""
|
||||
if strict_slashes is None:
|
||||
strict_slashes = self.strict_slashes
|
||||
|
||||
def decorator(handler):
|
||||
route = FutureRoute(handler, uri, [], host, strict_slashes,
|
||||
False, version, name)
|
||||
route = FutureRoute(
|
||||
handler, uri, [], host, strict_slashes, False, version, name
|
||||
)
|
||||
self.websocket_routes.append(route)
|
||||
return handler
|
||||
|
||||
return decorator
|
||||
|
||||
def add_websocket_route(self, handler, uri, host=None, version=None,
|
||||
name=None):
|
||||
def add_websocket_route(
|
||||
self, handler, uri, host=None, version=None, name=None
|
||||
):
|
||||
"""Create a blueprint websocket route from a function.
|
||||
|
||||
:param handler: function for handling uri requests. Accepts function,
|
||||
or class instance with a view_class method.
|
||||
:param uri: endpoint at which the route will be accessible.
|
||||
:param host: IP Address of FQDN for the sanic server to use.
|
||||
:param version: Blueprint Version
|
||||
:param name: Unique name to identify the Websocket Route
|
||||
:return: function or class instance
|
||||
"""
|
||||
self.websocket(uri=uri, host=host, version=version, name=name)(handler)
|
||||
@@ -204,13 +302,23 @@ class Blueprint:
|
||||
|
||||
:param event: Event to listen to.
|
||||
"""
|
||||
|
||||
def decorator(listener):
|
||||
self.listeners[event].append(listener)
|
||||
return listener
|
||||
|
||||
return decorator
|
||||
|
||||
def middleware(self, *args, **kwargs):
|
||||
"""Create a blueprint middleware from a decorated function."""
|
||||
"""
|
||||
Create a blueprint middleware from a decorated function.
|
||||
|
||||
:param args: Positional arguments to be used while invoking the
|
||||
middleware
|
||||
:param kwargs: optional keyword args that can be used with the
|
||||
middleware.
|
||||
"""
|
||||
|
||||
def register_middleware(_middleware):
|
||||
future_middleware = FutureMiddleware(_middleware, args, kwargs)
|
||||
self.middlewares.append(future_middleware)
|
||||
@@ -222,14 +330,32 @@ class Blueprint:
|
||||
args = []
|
||||
return register_middleware(middleware)
|
||||
else:
|
||||
return register_middleware
|
||||
if kwargs.get("bp_group") and callable(args[0]):
|
||||
middleware = args[0]
|
||||
args = args[1:]
|
||||
kwargs.pop("bp_group")
|
||||
return register_middleware(middleware)
|
||||
else:
|
||||
return register_middleware
|
||||
|
||||
def exception(self, *args, **kwargs):
|
||||
"""Create a blueprint exception from a decorated function."""
|
||||
"""
|
||||
This method enables the process of creating a global exception
|
||||
handler for the current blueprint under question.
|
||||
|
||||
:param args: List of Python exceptions to be caught by the handler
|
||||
:param kwargs: Additional optional arguments to be passed to the
|
||||
exception handler
|
||||
|
||||
:return a decorated method to handle global exceptions for any
|
||||
route registered under this blueprint.
|
||||
"""
|
||||
|
||||
def decorator(handler):
|
||||
exception = FutureException(handler, args, kwargs)
|
||||
self.exceptions.append(exception)
|
||||
return handler
|
||||
|
||||
return decorator
|
||||
|
||||
def static(self, uri, file_or_directory, *args, **kwargs):
|
||||
@@ -238,12 +364,12 @@ class Blueprint:
|
||||
:param uri: endpoint at which the route will be accessible.
|
||||
:param file_or_directory: Static asset.
|
||||
"""
|
||||
name = kwargs.pop('name', 'static')
|
||||
if not name.startswith(self.name + '.'):
|
||||
name = '{}.{}'.format(self.name, name)
|
||||
name = kwargs.pop("name", "static")
|
||||
if not name.startswith(self.name + "."):
|
||||
name = "{}.{}".format(self.name, name)
|
||||
kwargs.update(name=name)
|
||||
|
||||
strict_slashes = kwargs.get('strict_slashes')
|
||||
strict_slashes = kwargs.get("strict_slashes")
|
||||
if strict_slashes is None and self.strict_slashes is not None:
|
||||
kwargs.update(strict_slashes=self.strict_slashes)
|
||||
|
||||
@@ -251,44 +377,184 @@ class Blueprint:
|
||||
self.statics.append(static)
|
||||
|
||||
# Shorthand method decorators
|
||||
def get(self, uri, host=None, strict_slashes=None, version=None,
|
||||
name=None):
|
||||
return self.route(uri, methods=["GET"], host=host,
|
||||
strict_slashes=strict_slashes, version=version,
|
||||
name=name)
|
||||
def get(
|
||||
self, uri, host=None, strict_slashes=None, version=None, name=None
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **GET** *HTTP* method
|
||||
|
||||
def post(self, uri, host=None, strict_slashes=None, stream=False,
|
||||
version=None, name=None):
|
||||
return self.route(uri, methods=["POST"], host=host,
|
||||
strict_slashes=strict_slashes, stream=stream,
|
||||
version=version, name=name)
|
||||
:param uri: URL to be tagged to **GET** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"GET"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def put(self, uri, host=None, strict_slashes=None, stream=False,
|
||||
version=None, name=None):
|
||||
return self.route(uri, methods=["PUT"], host=host,
|
||||
strict_slashes=strict_slashes, stream=stream,
|
||||
version=version, name=name)
|
||||
def post(
|
||||
self,
|
||||
uri,
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
stream=False,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **POST** *HTTP* method
|
||||
|
||||
def head(self, uri, host=None, strict_slashes=None, version=None,
|
||||
name=None):
|
||||
return self.route(uri, methods=["HEAD"], host=host,
|
||||
strict_slashes=strict_slashes, version=version,
|
||||
name=name)
|
||||
:param uri: URL to be tagged to **POST** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"POST"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
stream=stream,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def options(self, uri, host=None, strict_slashes=None, version=None,
|
||||
name=None):
|
||||
return self.route(uri, methods=["OPTIONS"], host=host,
|
||||
strict_slashes=strict_slashes, version=version,
|
||||
name=name)
|
||||
def put(
|
||||
self,
|
||||
uri,
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
stream=False,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **PUT** *HTTP* method
|
||||
|
||||
def patch(self, uri, host=None, strict_slashes=None, stream=False,
|
||||
version=None, name=None):
|
||||
return self.route(uri, methods=["PATCH"], host=host,
|
||||
strict_slashes=strict_slashes, stream=stream,
|
||||
version=version, name=name)
|
||||
:param uri: URL to be tagged to **PUT** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"PUT"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
stream=stream,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def delete(self, uri, host=None, strict_slashes=None, version=None,
|
||||
name=None):
|
||||
return self.route(uri, methods=["DELETE"], host=host,
|
||||
strict_slashes=strict_slashes, version=version,
|
||||
name=name)
|
||||
def head(
|
||||
self, uri, host=None, strict_slashes=None, version=None, name=None
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **HEAD** *HTTP* method
|
||||
|
||||
:param uri: URL to be tagged to **HEAD** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"HEAD"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def options(
|
||||
self, uri, host=None, strict_slashes=None, version=None, name=None
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **OPTIONS** *HTTP* method
|
||||
|
||||
:param uri: URL to be tagged to **OPTIONS** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"OPTIONS"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def patch(
|
||||
self,
|
||||
uri,
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
stream=False,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **PATCH** *HTTP* method
|
||||
|
||||
:param uri: URL to be tagged to **PATCH** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"PATCH"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
stream=stream,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
def delete(
|
||||
self, uri, host=None, strict_slashes=None, version=None, name=None
|
||||
):
|
||||
"""
|
||||
Add an API URL under the **DELETE** *HTTP* method
|
||||
|
||||
:param uri: URL to be tagged to **DELETE** method of *HTTP*
|
||||
:param host: Host IP or FQDN for the service to use
|
||||
:param strict_slashes: Instruct :class:`sanic.app.Sanic` to check
|
||||
if the request URLs need to terminate with a */*
|
||||
:param version: API Version
|
||||
:param name: Unique name that can be used to identify the Route
|
||||
:return: Object decorated with :func:`route` method
|
||||
"""
|
||||
return self.route(
|
||||
uri,
|
||||
methods=frozenset({"DELETE"}),
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
version=version,
|
||||
name=name,
|
||||
)
|
||||
|
||||
6
sanic/compat.py
Normal file
6
sanic/compat.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from multidict import CIMultiDict # type: ignore
|
||||
|
||||
|
||||
class Header(CIMultiDict):
|
||||
def get_all(self, key):
|
||||
return self.getall(key, default=[])
|
||||
119
sanic/config.py
119
sanic/config.py
@@ -1,45 +1,47 @@
|
||||
import os
|
||||
import types
|
||||
|
||||
from sanic.exceptions import PyFileError
|
||||
from sanic.helpers import import_string
|
||||
|
||||
SANIC_PREFIX = 'SANIC_'
|
||||
|
||||
SANIC_PREFIX = "SANIC_"
|
||||
BASE_LOGO = """
|
||||
|
||||
Sanic
|
||||
Build Fast. Run Fast.
|
||||
|
||||
"""
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
"REQUEST_MAX_SIZE": 100000000, # 100 megabytes
|
||||
"REQUEST_BUFFER_QUEUE_SIZE": 100,
|
||||
"REQUEST_TIMEOUT": 60, # 60 seconds
|
||||
"RESPONSE_TIMEOUT": 60, # 60 seconds
|
||||
"KEEP_ALIVE": True,
|
||||
"KEEP_ALIVE_TIMEOUT": 5, # 5 seconds
|
||||
"WEBSOCKET_MAX_SIZE": 2 ** 20, # 1 megabytes
|
||||
"WEBSOCKET_MAX_QUEUE": 32,
|
||||
"WEBSOCKET_READ_LIMIT": 2 ** 16,
|
||||
"WEBSOCKET_WRITE_LIMIT": 2 ** 16,
|
||||
"GRACEFUL_SHUTDOWN_TIMEOUT": 15.0, # 15 sec
|
||||
"ACCESS_LOG": True,
|
||||
"FORWARDED_SECRET": None,
|
||||
"REAL_IP_HEADER": None,
|
||||
"PROXIES_COUNT": None,
|
||||
"FORWARDED_FOR_HEADER": "X-Forwarded-For",
|
||||
}
|
||||
|
||||
|
||||
class Config(dict):
|
||||
def __init__(self, defaults=None, load_env=True, keep_alive=True):
|
||||
super().__init__(defaults or {})
|
||||
self.LOGO = """
|
||||
▄▄▄▄▄
|
||||
▀▀▀██████▄▄▄ _______________
|
||||
▄▄▄▄▄ █████████▄ / \\
|
||||
▀▀▀▀█████▌ ▀▐▄ ▀▐█ | Gotta go fast! |
|
||||
▀▀█████▄▄ ▀██████▄██ | _________________/
|
||||
▀▄▄▄▄▄ ▀▀█▄▀█════█▀ |/
|
||||
▀▀▀▄ ▀▀███ ▀ ▄▄
|
||||
▄███▀▀██▄████████▄ ▄▀▀▀▀▀▀█▌
|
||||
██▀▄▄▄██▀▄███▀ ▀▀████ ▄██
|
||||
▄▀▀▀▄██▄▀▀▌████▒▒▒▒▒▒███ ▌▄▄▀
|
||||
▌ ▐▀████▐███▒▒▒▒▒▐██▌
|
||||
▀▄▄▄▄▀ ▀▀████▒▒▒▒▄██▀
|
||||
▀▀█████████▀
|
||||
▄▄██▀██████▀█
|
||||
▄██▀ ▀▀▀ █
|
||||
▄█ ▐▌
|
||||
▄▄▄▄█▌ ▀█▄▄▄▄▀▀▄
|
||||
▌ ▐ ▀▀▄▄▄▀
|
||||
▀▀▄▄▀
|
||||
"""
|
||||
self.REQUEST_MAX_SIZE = 100000000 # 100 megabytes
|
||||
self.REQUEST_TIMEOUT = 60 # 60 seconds
|
||||
self.RESPONSE_TIMEOUT = 60 # 60 seconds
|
||||
self.KEEP_ALIVE = keep_alive
|
||||
self.KEEP_ALIVE_TIMEOUT = 5 # 5 seconds
|
||||
self.WEBSOCKET_MAX_SIZE = 2 ** 20 # 1 megabytes
|
||||
self.WEBSOCKET_MAX_QUEUE = 32
|
||||
self.WEBSOCKET_READ_LIMIT = 2 ** 16
|
||||
self.WEBSOCKET_WRITE_LIMIT = 2 ** 16
|
||||
self.GRACEFUL_SHUTDOWN_TIMEOUT = 15.0 # 15 sec
|
||||
self.ACCESS_LOG = True
|
||||
def __init__(self, defaults=None, load_env=True, keep_alive=None):
|
||||
defaults = defaults or {}
|
||||
super().__init__({**DEFAULT_CONFIG, **defaults})
|
||||
|
||||
self.LOGO = BASE_LOGO
|
||||
|
||||
if keep_alive is not None:
|
||||
self.KEEP_ALIVE = keep_alive
|
||||
|
||||
if load_env:
|
||||
prefix = SANIC_PREFIX if load_env is True else load_env
|
||||
@@ -63,9 +65,10 @@ class Config(dict):
|
||||
"""
|
||||
config_file = os.environ.get(variable_name)
|
||||
if not config_file:
|
||||
raise RuntimeError('The environment variable %r is not set and '
|
||||
'thus configuration could not be loaded.' %
|
||||
variable_name)
|
||||
raise RuntimeError(
|
||||
"The environment variable %r is not set and "
|
||||
"thus configuration could not be loaded." % variable_name
|
||||
)
|
||||
return self.from_pyfile(config_file)
|
||||
|
||||
def from_pyfile(self, filename):
|
||||
@@ -74,15 +77,20 @@ class Config(dict):
|
||||
|
||||
:param filename: an absolute path to the config file
|
||||
"""
|
||||
module = types.ModuleType('config')
|
||||
module = types.ModuleType("config")
|
||||
module.__file__ = filename
|
||||
try:
|
||||
with open(filename) as config_file:
|
||||
exec(compile(config_file.read(), filename, 'exec'),
|
||||
module.__dict__)
|
||||
exec( # nosec
|
||||
compile(config_file.read(), filename, "exec"),
|
||||
module.__dict__,
|
||||
)
|
||||
except IOError as e:
|
||||
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
|
||||
e.strerror = "Unable to load configuration file (%s)" % e.strerror
|
||||
raise
|
||||
except Exception as e:
|
||||
raise PyFileError(filename) from e
|
||||
|
||||
self.from_object(module)
|
||||
return True
|
||||
|
||||
@@ -96,6 +104,9 @@ class Config(dict):
|
||||
from yourapplication import default_config
|
||||
app.config.from_object(default_config)
|
||||
|
||||
or also:
|
||||
app.config.from_object('myproject.config.MyConfigClass')
|
||||
|
||||
You should not use this function to load the actual configuration but
|
||||
rather configuration defaults. The actual config should be loaded
|
||||
with :meth:`from_pyfile` and ideally from a location not within the
|
||||
@@ -103,6 +114,8 @@ class Config(dict):
|
||||
|
||||
:param obj: an object holding the configuration
|
||||
"""
|
||||
if isinstance(obj, str):
|
||||
obj = import_string(obj)
|
||||
for key in dir(obj):
|
||||
if key.isupper():
|
||||
self[key] = getattr(obj, key)
|
||||
@@ -121,4 +134,24 @@ class Config(dict):
|
||||
try:
|
||||
self[config_key] = float(v)
|
||||
except ValueError:
|
||||
self[config_key] = v
|
||||
try:
|
||||
self[config_key] = strtobool(v)
|
||||
except ValueError:
|
||||
self[config_key] = v
|
||||
|
||||
|
||||
def strtobool(val):
|
||||
"""
|
||||
This function was borrowed from distutils.utils. While distutils
|
||||
is part of stdlib, it feels odd to use distutils in main application code.
|
||||
|
||||
The function was modified to walk its talk and actually return bool
|
||||
and not int.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ("y", "yes", "t", "true", "on", "1"):
|
||||
return True
|
||||
elif val in ("n", "no", "f", "false", "off", "0"):
|
||||
return False
|
||||
else:
|
||||
raise ValueError("invalid truth value %r" % (val,))
|
||||
|
||||
@@ -1 +1 @@
|
||||
HTTP_METHODS = ('GET', 'POST', 'PUT', 'HEAD', 'OPTIONS', 'PATCH', 'DELETE')
|
||||
HTTP_METHODS = ("GET", "POST", "PUT", "HEAD", "OPTIONS", "PATCH", "DELETE")
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import re
|
||||
import string
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
DEFAULT_MAX_AGE = 0
|
||||
|
||||
# ------------------------------------------------------------ #
|
||||
# SimpleCookie
|
||||
# ------------------------------------------------------------ #
|
||||
@@ -8,18 +13,16 @@ import string
|
||||
# Straight up copied this section of dark magic from SimpleCookie
|
||||
|
||||
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:"
|
||||
_UnescapedChars = _LegalChars + ' ()/<=>?@[]{}'
|
||||
_UnescapedChars = _LegalChars + " ()/<=>?@[]{}"
|
||||
|
||||
_Translator = {n: '\\%03o' % n
|
||||
for n in set(range(256)) - set(map(ord, _UnescapedChars))}
|
||||
_Translator.update({
|
||||
ord('"'): '\\"',
|
||||
ord('\\'): '\\\\',
|
||||
})
|
||||
_Translator = {
|
||||
n: "\\%03o" % n for n in set(range(256)) - set(map(ord, _UnescapedChars))
|
||||
}
|
||||
_Translator.update({ord('"'): '\\"', ord("\\"): "\\\\"})
|
||||
|
||||
|
||||
def _quote(str):
|
||||
"""Quote a string for use in a cookie header.
|
||||
r"""Quote a string for use in a cookie header.
|
||||
If the string does not need to be double-quoted, then just return the
|
||||
string. Otherwise, surround the string in doublequotes and quote
|
||||
(with a \) special characters.
|
||||
@@ -30,7 +33,7 @@ def _quote(str):
|
||||
return '"' + str.translate(_Translator) + '"'
|
||||
|
||||
|
||||
_is_legal_key = re.compile('[%s]+' % re.escape(_LegalChars)).fullmatch
|
||||
_is_legal_key = re.compile("[%s]+" % re.escape(_LegalChars)).fullmatch
|
||||
|
||||
# ------------------------------------------------------------ #
|
||||
# Custom SimpleCookie
|
||||
@@ -53,7 +56,7 @@ class CookieJar(dict):
|
||||
# If this cookie doesn't exist, add it to the header keys
|
||||
if not self.cookie_headers.get(key):
|
||||
cookie = Cookie(key, value)
|
||||
cookie['path'] = '/'
|
||||
cookie["path"] = "/"
|
||||
self.cookie_headers[key] = self.header_key
|
||||
self.headers.add(self.header_key, cookie)
|
||||
return super().__setitem__(key, cookie)
|
||||
@@ -62,8 +65,8 @@ class CookieJar(dict):
|
||||
|
||||
def __delitem__(self, key):
|
||||
if key not in self.cookie_headers:
|
||||
self[key] = ''
|
||||
self[key]['max-age'] = 0
|
||||
self[key] = ""
|
||||
self[key]["max-age"] = 0
|
||||
else:
|
||||
cookie_header = self.cookie_headers[key]
|
||||
# remove it from header
|
||||
@@ -77,6 +80,7 @@ class CookieJar(dict):
|
||||
|
||||
class Cookie(dict):
|
||||
"""A stripped down version of Morsel from SimpleCookie #gottagofast"""
|
||||
|
||||
_keys = {
|
||||
"expires": "expires",
|
||||
"path": "Path",
|
||||
@@ -88,7 +92,7 @@ class Cookie(dict):
|
||||
"version": "Version",
|
||||
"samesite": "SameSite",
|
||||
}
|
||||
_flags = {'secure', 'httponly'}
|
||||
_flags = {"secure", "httponly"}
|
||||
|
||||
def __init__(self, key, value):
|
||||
if key in self._keys:
|
||||
@@ -103,27 +107,44 @@ class Cookie(dict):
|
||||
if key not in self._keys:
|
||||
raise KeyError("Unknown cookie property")
|
||||
if value is not False:
|
||||
if key.lower() == "max-age":
|
||||
if not str(value).isdigit():
|
||||
value = DEFAULT_MAX_AGE
|
||||
elif key.lower() == "expires":
|
||||
if not isinstance(value, datetime):
|
||||
raise TypeError(
|
||||
"Cookie 'expires' property must be a datetime"
|
||||
)
|
||||
return super().__setitem__(key, value)
|
||||
|
||||
def encode(self, encoding):
|
||||
output = ['%s=%s' % (self.key, _quote(self.value))]
|
||||
"""
|
||||
Encode the cookie content in a specific type of encoding instructed
|
||||
by the developer. Leverages the :func:`str.encode` method provided
|
||||
by python.
|
||||
|
||||
This method can be used to encode and embed ``utf-8`` content into
|
||||
the cookies.
|
||||
|
||||
:param encoding: Encoding to be used with the cookie
|
||||
:return: Cookie encoded in a codec of choosing.
|
||||
:except: UnicodeEncodeError
|
||||
"""
|
||||
output = ["%s=%s" % (self.key, _quote(self.value))]
|
||||
for key, value in self.items():
|
||||
if key == 'max-age':
|
||||
if key == "max-age":
|
||||
try:
|
||||
output.append('%s=%d' % (self._keys[key], value))
|
||||
output.append("%s=%d" % (self._keys[key], value))
|
||||
except TypeError:
|
||||
output.append('%s=%s' % (self._keys[key], value))
|
||||
elif key == 'expires':
|
||||
try:
|
||||
output.append('%s=%s' % (
|
||||
self._keys[key],
|
||||
value.strftime("%a, %d-%b-%Y %T GMT")
|
||||
))
|
||||
except AttributeError:
|
||||
output.append('%s=%s' % (self._keys[key], value))
|
||||
output.append("%s=%s" % (self._keys[key], value))
|
||||
elif key == "expires":
|
||||
output.append(
|
||||
"%s=%s"
|
||||
% (self._keys[key], value.strftime("%a, %d-%b-%Y %T GMT"))
|
||||
)
|
||||
elif key in self._flags and self[key]:
|
||||
output.append(self._keys[key])
|
||||
else:
|
||||
output.append('%s=%s' % (self._keys[key], value))
|
||||
output.append("%s=%s" % (self._keys[key], value))
|
||||
|
||||
return "; ".join(output).encode(encoding)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from sanic.http import STATUS_CODES
|
||||
from sanic.helpers import STATUS_CODES
|
||||
|
||||
TRACEBACK_STYLE = '''
|
||||
|
||||
TRACEBACK_STYLE = """
|
||||
<style>
|
||||
body {
|
||||
padding: 20px;
|
||||
@@ -61,9 +62,9 @@ TRACEBACK_STYLE = '''
|
||||
font-size: 14px;
|
||||
}
|
||||
</style>
|
||||
'''
|
||||
"""
|
||||
|
||||
TRACEBACK_WRAPPER_HTML = '''
|
||||
TRACEBACK_WRAPPER_HTML = """
|
||||
<html>
|
||||
<head>
|
||||
{style}
|
||||
@@ -78,27 +79,27 @@ TRACEBACK_WRAPPER_HTML = '''
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
"""
|
||||
|
||||
TRACEBACK_WRAPPER_INNER_HTML = '''
|
||||
TRACEBACK_WRAPPER_INNER_HTML = """
|
||||
<h1>{exc_name}</h1>
|
||||
<h3><code>{exc_value}</code></h3>
|
||||
<div class="tb-wrapper">
|
||||
<p class="tb-header">Traceback (most recent call last):</p>
|
||||
{frame_html}
|
||||
</div>
|
||||
'''
|
||||
"""
|
||||
|
||||
TRACEBACK_BORDER = '''
|
||||
TRACEBACK_BORDER = """
|
||||
<div class="tb-border">
|
||||
<b><i>
|
||||
The above exception was the direct cause of the
|
||||
following exception:
|
||||
</i></b>
|
||||
</div>
|
||||
'''
|
||||
"""
|
||||
|
||||
TRACEBACK_LINE_HTML = '''
|
||||
TRACEBACK_LINE_HTML = """
|
||||
<div class="frame-line">
|
||||
<p class="frame-descriptor">
|
||||
File {0.filename}, line <i>{0.lineno}</i>,
|
||||
@@ -106,15 +107,15 @@ TRACEBACK_LINE_HTML = '''
|
||||
</p>
|
||||
<p class="frame-code"><code>{0.line}</code></p>
|
||||
</div>
|
||||
'''
|
||||
"""
|
||||
|
||||
INTERNAL_SERVER_ERROR_HTML = '''
|
||||
INTERNAL_SERVER_ERROR_HTML = """
|
||||
<h1>Internal Server Error</h1>
|
||||
<p>
|
||||
The server encountered an internal error and cannot complete
|
||||
your request.
|
||||
</p>
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
_sanic_exceptions = {}
|
||||
@@ -122,17 +123,18 @@ _sanic_exceptions = {}
|
||||
|
||||
def add_status_code(code):
|
||||
"""
|
||||
Decorator used for adding exceptions to _sanic_exceptions.
|
||||
Decorator used for adding exceptions to :class:`SanicException`.
|
||||
"""
|
||||
|
||||
def class_decorator(cls):
|
||||
cls.status_code = code
|
||||
_sanic_exceptions[code] = cls
|
||||
return cls
|
||||
|
||||
return class_decorator
|
||||
|
||||
|
||||
class SanicException(Exception):
|
||||
|
||||
def __init__(self, message, status_code=None):
|
||||
super().__init__(message)
|
||||
|
||||
@@ -156,8 +158,8 @@ class MethodNotSupported(SanicException):
|
||||
super().__init__(message)
|
||||
self.headers = dict()
|
||||
self.headers["Allow"] = ", ".join(allowed_methods)
|
||||
if method in ['HEAD', 'PATCH', 'PUT', 'DELETE']:
|
||||
self.headers['Content-Length'] = 0
|
||||
if method in ["HEAD", "PATCH", "PUT", "DELETE"]:
|
||||
self.headers["Content-Length"] = 0
|
||||
|
||||
|
||||
@add_status_code(500)
|
||||
@@ -169,6 +171,7 @@ class ServerError(SanicException):
|
||||
class ServiceUnavailable(SanicException):
|
||||
"""The server is currently unavailable (because it is overloaded or
|
||||
down for maintenance). Generally, this is a temporary state."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -192,6 +195,7 @@ class RequestTimeout(SanicException):
|
||||
the connection. The socket connection has actually been lost - the Web
|
||||
server has 'timed out' on that particular socket connection.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -209,11 +213,16 @@ class ContentRangeError(SanicException):
|
||||
def __init__(self, message, content_range):
|
||||
super().__init__(message)
|
||||
self.headers = {
|
||||
'Content-Type': 'text/plain',
|
||||
"Content-Range": "bytes */%s" % (content_range.total,)
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Range": "bytes */%s" % (content_range.total,),
|
||||
}
|
||||
|
||||
|
||||
@add_status_code(417)
|
||||
class HeaderExpectationFailed(SanicException):
|
||||
pass
|
||||
|
||||
|
||||
@add_status_code(403)
|
||||
class Forbidden(SanicException):
|
||||
pass
|
||||
@@ -223,6 +232,11 @@ class InvalidRangeType(ContentRangeError):
|
||||
pass
|
||||
|
||||
|
||||
class PyFileError(Exception):
|
||||
def __init__(self, file):
|
||||
super().__init__("could not execute config file %s", file)
|
||||
|
||||
|
||||
@add_status_code(401)
|
||||
class Unauthorized(SanicException):
|
||||
"""
|
||||
@@ -258,13 +272,14 @@ class Unauthorized(SanicException):
|
||||
scheme="Bearer",
|
||||
realm="Restricted Area")
|
||||
"""
|
||||
|
||||
def __init__(self, message, status_code=None, scheme=None, **kwargs):
|
||||
super().__init__(message, status_code)
|
||||
|
||||
# if auth-scheme is specified, set "WWW-Authenticate" header
|
||||
if scheme is not None:
|
||||
values = ['{!s}="{!s}"'.format(k, v) for k, v in kwargs.items()]
|
||||
challenge = ', '.join(values)
|
||||
challenge = ", ".join(values)
|
||||
|
||||
self.headers = {
|
||||
"WWW-Authenticate": "{} {}".format(scheme, challenge).rstrip()
|
||||
@@ -283,6 +298,6 @@ def abort(status_code, message=None):
|
||||
if message is None:
|
||||
message = STATUS_CODES.get(status_code)
|
||||
# These are stored as bytes in the STATUS_CODES dict
|
||||
message = message.decode('utf8')
|
||||
message = message.decode("utf8")
|
||||
sanic_exception = _sanic_exceptions.get(status_code, SanicException)
|
||||
raise sanic_exception(message=message, status_code=status_code)
|
||||
|
||||
@@ -1,22 +1,36 @@
|
||||
import sys
|
||||
from traceback import format_exc, extract_tb
|
||||
|
||||
from traceback import extract_tb, format_exc
|
||||
|
||||
from sanic.exceptions import (
|
||||
ContentRangeError,
|
||||
HeaderNotFound,
|
||||
INTERNAL_SERVER_ERROR_HTML,
|
||||
InvalidRangeType,
|
||||
SanicException,
|
||||
TRACEBACK_BORDER,
|
||||
TRACEBACK_LINE_HTML,
|
||||
TRACEBACK_STYLE,
|
||||
TRACEBACK_WRAPPER_HTML,
|
||||
TRACEBACK_WRAPPER_INNER_HTML,
|
||||
TRACEBACK_BORDER)
|
||||
ContentRangeError,
|
||||
HeaderNotFound,
|
||||
InvalidRangeType,
|
||||
SanicException,
|
||||
)
|
||||
from sanic.log import logger
|
||||
from sanic.response import text, html
|
||||
from sanic.response import html, text
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
"""
|
||||
Provide :class:`sanic.app.Sanic` application with a mechanism to handle
|
||||
and process any and all uncaught exceptions in a way the application
|
||||
developer will set fit.
|
||||
|
||||
This error handling framework is built into the core that can be extended
|
||||
by the developers to perform a wide range of tasks from recording the error
|
||||
stats to reporting them to an external service that can be used for
|
||||
realtime alerting system.
|
||||
|
||||
"""
|
||||
|
||||
handlers = None
|
||||
cached_handlers = None
|
||||
_missing = object()
|
||||
@@ -36,7 +50,8 @@ class ErrorHandler:
|
||||
return TRACEBACK_WRAPPER_INNER_HTML.format(
|
||||
exc_name=exception.__class__.__name__,
|
||||
exc_value=exception,
|
||||
frame_html=''.join(frame_html))
|
||||
frame_html="".join(frame_html),
|
||||
)
|
||||
|
||||
def _render_traceback_html(self, exception, request):
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
@@ -51,13 +66,39 @@ class ErrorHandler:
|
||||
exc_name=exception.__class__.__name__,
|
||||
exc_value=exception,
|
||||
inner_html=TRACEBACK_BORDER.join(reversed(exceptions)),
|
||||
path=request.path)
|
||||
path=request.path,
|
||||
)
|
||||
|
||||
def add(self, exception, handler):
|
||||
"""
|
||||
Add a new exception handler to an already existing handler object.
|
||||
|
||||
:param exception: Type of exception that need to be handled
|
||||
:param handler: Reference to the method that will handle the exception
|
||||
|
||||
:type exception: :class:`sanic.exceptions.SanicException` or
|
||||
:class:`Exception`
|
||||
:type handler: ``function``
|
||||
|
||||
:return: None
|
||||
"""
|
||||
self.handlers.append((exception, handler))
|
||||
|
||||
def lookup(self, exception):
|
||||
handler = self.cached_handlers.get(exception, self._missing)
|
||||
"""
|
||||
Lookup the existing instance of :class:`ErrorHandler` and fetch the
|
||||
registered handler for a specific type of exception.
|
||||
|
||||
This method leverages a dict lookup to speedup the retrieval process.
|
||||
|
||||
:param exception: Type of exception
|
||||
|
||||
:type exception: :class:`sanic.exceptions.SanicException` or
|
||||
:class:`Exception`
|
||||
|
||||
:return: Registered function if found ``None`` otherwise
|
||||
"""
|
||||
handler = self.cached_handlers.get(type(exception), self._missing)
|
||||
if handler is self._missing:
|
||||
for exception_class, handler in self.handlers:
|
||||
if isinstance(exception, exception_class):
|
||||
@@ -71,9 +112,15 @@ class ErrorHandler:
|
||||
"""Fetches and executes an exception handler and returns a response
|
||||
object
|
||||
|
||||
:param request: Request
|
||||
:param request: Instance of :class:`sanic.request.Request`
|
||||
:param exception: Exception to handle
|
||||
:return: Response object
|
||||
|
||||
:type request: :class:`sanic.request.Request`
|
||||
:type exception: :class:`sanic.exceptions.SanicException` or
|
||||
:class:`Exception`
|
||||
|
||||
:return: Wrap the return value obtained from :func:`default`
|
||||
or registered handler for that type of exception.
|
||||
"""
|
||||
handler = self.lookup(exception)
|
||||
response = None
|
||||
@@ -84,88 +131,130 @@ class ErrorHandler:
|
||||
response = self.default(request, exception)
|
||||
except Exception:
|
||||
self.log(format_exc())
|
||||
if self.debug:
|
||||
url = getattr(request, 'url', 'unknown')
|
||||
response_message = ('Exception raised in exception handler '
|
||||
'"%s" for uri: "%s"\n%s')
|
||||
logger.error(response_message,
|
||||
handler.__name__, url, format_exc())
|
||||
try:
|
||||
url = repr(request.url)
|
||||
except AttributeError:
|
||||
url = "unknown"
|
||||
response_message = (
|
||||
"Exception raised in exception handler " '"%s" for uri: %s'
|
||||
)
|
||||
logger.exception(response_message, handler.__name__, url)
|
||||
|
||||
return text(response_message % (
|
||||
handler.__name__, url, format_exc()), 500)
|
||||
if self.debug:
|
||||
return text(response_message % (handler.__name__, url), 500)
|
||||
else:
|
||||
return text('An error occurred while handling an error', 500)
|
||||
return text("An error occurred while handling an error", 500)
|
||||
return response
|
||||
|
||||
def log(self, message, level='error'):
|
||||
def log(self, message, level="error"):
|
||||
"""
|
||||
Override this method in an ErrorHandler subclass to prevent
|
||||
logging exceptions.
|
||||
Deprecated, do not use.
|
||||
"""
|
||||
getattr(logger, level)(message)
|
||||
|
||||
def default(self, request, exception):
|
||||
"""
|
||||
Provide a default behavior for the objects of :class:`ErrorHandler`.
|
||||
If a developer chooses to extent the :class:`ErrorHandler` they can
|
||||
provide a custom implementation for this method to behave in a way
|
||||
they see fit.
|
||||
|
||||
:param request: Incoming request
|
||||
:param exception: Exception object
|
||||
|
||||
:type request: :class:`sanic.request.Request`
|
||||
:type exception: :class:`sanic.exceptions.SanicException` or
|
||||
:class:`Exception`
|
||||
:return:
|
||||
"""
|
||||
self.log(format_exc())
|
||||
try:
|
||||
url = repr(request.url)
|
||||
except AttributeError:
|
||||
url = "unknown"
|
||||
|
||||
response_message = "Exception occurred while handling uri: %s"
|
||||
logger.exception(response_message, url)
|
||||
|
||||
if issubclass(type(exception), SanicException):
|
||||
return text(
|
||||
'Error: {}'.format(exception),
|
||||
status=getattr(exception, 'status_code', 500),
|
||||
headers=getattr(exception, 'headers', dict())
|
||||
"Error: {}".format(exception),
|
||||
status=getattr(exception, "status_code", 500),
|
||||
headers=getattr(exception, "headers", dict()),
|
||||
)
|
||||
elif self.debug:
|
||||
html_output = self._render_traceback_html(exception, request)
|
||||
|
||||
response_message = ('Exception occurred while handling uri: '
|
||||
'"%s"\n%s')
|
||||
logger.error(response_message, request.url, format_exc())
|
||||
return html(html_output, status=500)
|
||||
else:
|
||||
return html(INTERNAL_SERVER_ERROR_HTML, status=500)
|
||||
|
||||
|
||||
class ContentRangeHandler:
|
||||
"""Class responsible for parsing request header"""
|
||||
__slots__ = ('start', 'end', 'size', 'total', 'headers')
|
||||
"""
|
||||
A mechanism to parse and process the incoming request headers to
|
||||
extract the content range information.
|
||||
|
||||
:param request: Incoming api request
|
||||
:param stats: Stats related to the content
|
||||
|
||||
:type request: :class:`sanic.request.Request`
|
||||
:type stats: :class:`posix.stat_result`
|
||||
|
||||
:ivar start: Content Range start
|
||||
:ivar end: Content Range end
|
||||
:ivar size: Length of the content
|
||||
:ivar total: Total size identified by the :class:`posix.stat_result`
|
||||
instance
|
||||
:ivar ContentRangeHandler.headers: Content range header ``dict``
|
||||
"""
|
||||
|
||||
__slots__ = ("start", "end", "size", "total", "headers")
|
||||
|
||||
def __init__(self, request, stats):
|
||||
self.total = stats.st_size
|
||||
_range = request.headers.get('Range')
|
||||
_range = request.headers.get("Range")
|
||||
if _range is None:
|
||||
raise HeaderNotFound('Range Header Not Found')
|
||||
unit, _, value = tuple(map(str.strip, _range.partition('=')))
|
||||
if unit != 'bytes':
|
||||
raise HeaderNotFound("Range Header Not Found")
|
||||
unit, _, value = tuple(map(str.strip, _range.partition("=")))
|
||||
if unit != "bytes":
|
||||
raise InvalidRangeType(
|
||||
'%s is not a valid Range Type' % (unit,), self)
|
||||
start_b, _, end_b = tuple(map(str.strip, value.partition('-')))
|
||||
"%s is not a valid Range Type" % (unit,), self
|
||||
)
|
||||
start_b, _, end_b = tuple(map(str.strip, value.partition("-")))
|
||||
try:
|
||||
self.start = int(start_b) if start_b else None
|
||||
except ValueError:
|
||||
raise ContentRangeError(
|
||||
'\'%s\' is invalid for Content Range' % (start_b,), self)
|
||||
"'%s' is invalid for Content Range" % (start_b,), self
|
||||
)
|
||||
try:
|
||||
self.end = int(end_b) if end_b else None
|
||||
except ValueError:
|
||||
raise ContentRangeError(
|
||||
'\'%s\' is invalid for Content Range' % (end_b,), self)
|
||||
"'%s' is invalid for Content Range" % (end_b,), self
|
||||
)
|
||||
if self.end is None:
|
||||
if self.start is None:
|
||||
raise ContentRangeError(
|
||||
'Invalid for Content Range parameters', self)
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
else:
|
||||
# this case represents `Content-Range: bytes 5-`
|
||||
self.end = self.total
|
||||
self.end = self.total - 1
|
||||
else:
|
||||
if self.start is None:
|
||||
# this case represents `Content-Range: bytes -5`
|
||||
self.start = self.total - self.end
|
||||
self.end = self.total
|
||||
self.end = self.total - 1
|
||||
if self.start >= self.end:
|
||||
raise ContentRangeError(
|
||||
'Invalid for Content Range parameters', self)
|
||||
self.size = self.end - self.start
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
self.size = self.end - self.start + 1
|
||||
self.headers = {
|
||||
'Content-Range': "bytes %s-%s/%s" % (
|
||||
self.start, self.end, self.total)}
|
||||
"Content-Range": "bytes %s-%s/%s"
|
||||
% (self.start, self.end, self.total)
|
||||
}
|
||||
|
||||
def __bool__(self):
|
||||
return self.size > 0
|
||||
|
||||
172
sanic/headers.py
Normal file
172
sanic/headers.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import re
|
||||
|
||||
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||
from urllib.parse import unquote
|
||||
|
||||
|
||||
Options = Dict[str, Union[int, str]] # key=value fields in various headers
|
||||
OptionsIterable = Iterable[Tuple[str, str]] # May contain duplicate keys
|
||||
|
||||
_token, _quoted = r"([\w!#$%&'*+\-.^_`|~]+)", r'"([^"]*)"'
|
||||
_param = re.compile(fr";\s*{_token}=(?:{_token}|{_quoted})", re.ASCII)
|
||||
_firefox_quote_escape = re.compile(r'\\"(?!; |\s*$)')
|
||||
_ipv6 = "(?:[0-9A-Fa-f]{0,4}:){2,7}[0-9A-Fa-f]{0,4}"
|
||||
_ipv6_re = re.compile(_ipv6)
|
||||
_host_re = re.compile(
|
||||
r"((?:\[" + _ipv6 + r"\])|[a-zA-Z0-9.\-]{1,253})(?::(\d{1,5}))?"
|
||||
)
|
||||
|
||||
# RFC's quoted-pair escapes are mostly ignored by browsers. Chrome, Firefox and
|
||||
# curl all have different escaping, that we try to handle as well as possible,
|
||||
# even though no client espaces in a way that would allow perfect handling.
|
||||
|
||||
# For more information, consult ../tests/test_requests.py
|
||||
|
||||
|
||||
def parse_content_header(value: str) -> Tuple[str, Options]:
|
||||
"""Parse content-type and content-disposition header values.
|
||||
|
||||
E.g. 'form-data; name=upload; filename=\"file.txt\"' to
|
||||
('form-data', {'name': 'upload', 'filename': 'file.txt'})
|
||||
|
||||
Mostly identical to cgi.parse_header and werkzeug.parse_options_header
|
||||
but runs faster and handles special characters better. Unescapes quotes.
|
||||
"""
|
||||
value = _firefox_quote_escape.sub("%22", value)
|
||||
pos = value.find(";")
|
||||
if pos == -1:
|
||||
options: Dict[str, Union[int, str]] = {}
|
||||
else:
|
||||
options = {
|
||||
m.group(1).lower(): m.group(2) or m.group(3).replace("%22", '"')
|
||||
for m in _param.finditer(value[pos:])
|
||||
}
|
||||
value = value[:pos]
|
||||
return value.strip().lower(), options
|
||||
|
||||
|
||||
# https://tools.ietf.org/html/rfc7230#section-3.2.6 and
|
||||
# https://tools.ietf.org/html/rfc7239#section-4
|
||||
# This regex is for *reversed* strings because that works much faster for
|
||||
# right-to-left matching than the other way around. Be wary that all things are
|
||||
# a bit backwards! _rparam matches forwarded pairs alike ";key=value"
|
||||
_rparam = re.compile(f"(?:{_token}|{_quoted})={_token}\\s*($|[;,])", re.ASCII)
|
||||
|
||||
|
||||
def parse_forwarded(headers, config) -> Optional[Options]:
|
||||
"""Parse RFC 7239 Forwarded headers.
|
||||
The value of `by` or `secret` must match `config.FORWARDED_SECRET`
|
||||
:return: dict with keys and values, or None if nothing matched
|
||||
"""
|
||||
header = headers.getall("forwarded", None)
|
||||
secret = config.FORWARDED_SECRET
|
||||
if header is None or not secret:
|
||||
return None
|
||||
header = ",".join(header) # Join multiple header lines
|
||||
if secret not in header:
|
||||
return None
|
||||
# Loop over <separator><key>=<value> elements from right to left
|
||||
sep = pos = None
|
||||
options: List[Tuple[str, str]] = []
|
||||
found = False
|
||||
for m in _rparam.finditer(header[::-1]):
|
||||
# Start of new element? (on parser skips and non-semicolon right sep)
|
||||
if m.start() != pos or sep != ";":
|
||||
# Was the previous element (from right) what we wanted?
|
||||
if found:
|
||||
break
|
||||
# Clear values and parse as new element
|
||||
del options[:]
|
||||
pos = m.end()
|
||||
val_token, val_quoted, key, sep = m.groups()
|
||||
key = key.lower()[::-1]
|
||||
val = (val_token or val_quoted.replace('"\\', '"'))[::-1]
|
||||
options.append((key, val))
|
||||
if key in ("secret", "by") and val == secret:
|
||||
found = True
|
||||
# Check if we would return on next round, to avoid useless parse
|
||||
if found and sep != ";":
|
||||
break
|
||||
# If secret was found, return the matching options in left-to-right order
|
||||
return fwd_normalize(reversed(options)) if found else None
|
||||
|
||||
|
||||
def parse_xforwarded(headers, config) -> Optional[Options]:
|
||||
"""Parse traditional proxy headers."""
|
||||
real_ip_header = config.REAL_IP_HEADER
|
||||
proxies_count = config.PROXIES_COUNT
|
||||
addr = real_ip_header and headers.get(real_ip_header)
|
||||
if not addr and proxies_count:
|
||||
assert proxies_count > 0
|
||||
try:
|
||||
# Combine, split and filter multiple headers' entries
|
||||
forwarded_for = headers.getall(config.FORWARDED_FOR_HEADER)
|
||||
proxies = [
|
||||
p
|
||||
for p in (
|
||||
p.strip() for h in forwarded_for for p in h.split(",")
|
||||
)
|
||||
if p
|
||||
]
|
||||
addr = proxies[-proxies_count]
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
# No processing of other headers if no address is found
|
||||
if not addr:
|
||||
return None
|
||||
|
||||
def options():
|
||||
yield "for", addr
|
||||
for key, header in (
|
||||
("proto", "x-scheme"),
|
||||
("proto", "x-forwarded-proto"), # Overrides X-Scheme if present
|
||||
("host", "x-forwarded-host"),
|
||||
("port", "x-forwarded-port"),
|
||||
("path", "x-forwarded-path"),
|
||||
):
|
||||
yield key, headers.get(header)
|
||||
|
||||
return fwd_normalize(options())
|
||||
|
||||
|
||||
def fwd_normalize(fwd: OptionsIterable) -> Options:
|
||||
"""Normalize and convert values extracted from forwarded headers."""
|
||||
ret: Dict[str, Union[int, str]] = {}
|
||||
for key, val in fwd:
|
||||
if val is not None:
|
||||
try:
|
||||
if key in ("by", "for"):
|
||||
ret[key] = fwd_normalize_address(val)
|
||||
elif key in ("host", "proto"):
|
||||
ret[key] = val.lower()
|
||||
elif key == "port":
|
||||
ret[key] = int(val)
|
||||
elif key == "path":
|
||||
ret[key] = unquote(val)
|
||||
else:
|
||||
ret[key] = val
|
||||
except ValueError:
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
||||
def fwd_normalize_address(addr: str) -> str:
|
||||
"""Normalize address fields of proxy headers."""
|
||||
if addr == "unknown":
|
||||
raise ValueError() # omit unknown value identifiers
|
||||
if addr.startswith("_"):
|
||||
return addr # do not lower-case obfuscated strings
|
||||
if _ipv6_re.fullmatch(addr):
|
||||
addr = f"[{addr}]" # bracket IPv6
|
||||
return addr.lower()
|
||||
|
||||
|
||||
def parse_host(host: str) -> Tuple[Optional[str], Optional[int]]:
|
||||
"""Split host:port into hostname and port.
|
||||
:return: None in place of missing elements
|
||||
"""
|
||||
m = _host_re.fullmatch(host)
|
||||
if not m:
|
||||
return None, None
|
||||
host, port = m.groups()
|
||||
return host.lower(), int(port) if port is not None else None
|
||||
156
sanic/helpers.py
Normal file
156
sanic/helpers.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""Defines basics of HTTP standard."""
|
||||
|
||||
from importlib import import_module
|
||||
from inspect import ismodule
|
||||
|
||||
|
||||
STATUS_CODES = {
|
||||
100: b"Continue",
|
||||
101: b"Switching Protocols",
|
||||
102: b"Processing",
|
||||
103: b"Early Hints",
|
||||
200: b"OK",
|
||||
201: b"Created",
|
||||
202: b"Accepted",
|
||||
203: b"Non-Authoritative Information",
|
||||
204: b"No Content",
|
||||
205: b"Reset Content",
|
||||
206: b"Partial Content",
|
||||
207: b"Multi-Status",
|
||||
208: b"Already Reported",
|
||||
226: b"IM Used",
|
||||
300: b"Multiple Choices",
|
||||
301: b"Moved Permanently",
|
||||
302: b"Found",
|
||||
303: b"See Other",
|
||||
304: b"Not Modified",
|
||||
305: b"Use Proxy",
|
||||
307: b"Temporary Redirect",
|
||||
308: b"Permanent Redirect",
|
||||
400: b"Bad Request",
|
||||
401: b"Unauthorized",
|
||||
402: b"Payment Required",
|
||||
403: b"Forbidden",
|
||||
404: b"Not Found",
|
||||
405: b"Method Not Allowed",
|
||||
406: b"Not Acceptable",
|
||||
407: b"Proxy Authentication Required",
|
||||
408: b"Request Timeout",
|
||||
409: b"Conflict",
|
||||
410: b"Gone",
|
||||
411: b"Length Required",
|
||||
412: b"Precondition Failed",
|
||||
413: b"Request Entity Too Large",
|
||||
414: b"Request-URI Too Long",
|
||||
415: b"Unsupported Media Type",
|
||||
416: b"Requested Range Not Satisfiable",
|
||||
417: b"Expectation Failed",
|
||||
418: b"I'm a teapot",
|
||||
422: b"Unprocessable Entity",
|
||||
423: b"Locked",
|
||||
424: b"Failed Dependency",
|
||||
426: b"Upgrade Required",
|
||||
428: b"Precondition Required",
|
||||
429: b"Too Many Requests",
|
||||
431: b"Request Header Fields Too Large",
|
||||
451: b"Unavailable For Legal Reasons",
|
||||
500: b"Internal Server Error",
|
||||
501: b"Not Implemented",
|
||||
502: b"Bad Gateway",
|
||||
503: b"Service Unavailable",
|
||||
504: b"Gateway Timeout",
|
||||
505: b"HTTP Version Not Supported",
|
||||
506: b"Variant Also Negotiates",
|
||||
507: b"Insufficient Storage",
|
||||
508: b"Loop Detected",
|
||||
510: b"Not Extended",
|
||||
511: b"Network Authentication Required",
|
||||
}
|
||||
|
||||
# According to https://tools.ietf.org/html/rfc2616#section-7.1
|
||||
_ENTITY_HEADERS = frozenset(
|
||||
[
|
||||
"allow",
|
||||
"content-encoding",
|
||||
"content-language",
|
||||
"content-length",
|
||||
"content-location",
|
||||
"content-md5",
|
||||
"content-range",
|
||||
"content-type",
|
||||
"expires",
|
||||
"last-modified",
|
||||
"extension-header",
|
||||
]
|
||||
)
|
||||
|
||||
# According to https://tools.ietf.org/html/rfc2616#section-13.5.1
|
||||
_HOP_BY_HOP_HEADERS = frozenset(
|
||||
[
|
||||
"connection",
|
||||
"keep-alive",
|
||||
"proxy-authenticate",
|
||||
"proxy-authorization",
|
||||
"te",
|
||||
"trailers",
|
||||
"transfer-encoding",
|
||||
"upgrade",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def has_message_body(status):
|
||||
"""
|
||||
According to the following RFC message body and length SHOULD NOT
|
||||
be included in responses status 1XX, 204 and 304.
|
||||
https://tools.ietf.org/html/rfc2616#section-4.4
|
||||
https://tools.ietf.org/html/rfc2616#section-4.3
|
||||
"""
|
||||
return status not in (204, 304) and not (100 <= status < 200)
|
||||
|
||||
|
||||
def is_entity_header(header):
|
||||
"""Checks if the given header is an Entity Header"""
|
||||
return header.lower() in _ENTITY_HEADERS
|
||||
|
||||
|
||||
def is_hop_by_hop_header(header):
|
||||
"""Checks if the given header is a Hop By Hop header"""
|
||||
return header.lower() in _HOP_BY_HOP_HEADERS
|
||||
|
||||
|
||||
def remove_entity_headers(headers, allowed=("content-location", "expires")):
|
||||
"""
|
||||
Removes all the entity headers present in the headers given.
|
||||
According to RFC 2616 Section 10.3.5,
|
||||
Content-Location and Expires are allowed as for the
|
||||
"strong cache validator".
|
||||
https://tools.ietf.org/html/rfc2616#section-10.3.5
|
||||
|
||||
returns the headers without the entity headers
|
||||
"""
|
||||
allowed = set([h.lower() for h in allowed])
|
||||
headers = {
|
||||
header: value
|
||||
for header, value in headers.items()
|
||||
if not is_entity_header(header) or header.lower() in allowed
|
||||
}
|
||||
return headers
|
||||
|
||||
|
||||
def import_string(module_name, package=None):
|
||||
"""
|
||||
import a module or class by string path.
|
||||
|
||||
:module_name: str with path of module or path to import and
|
||||
instanciate a class
|
||||
:returns: a module object or one instance from class if
|
||||
module_name is a valid path to class
|
||||
|
||||
"""
|
||||
module, klass = module_name.rsplit(".", 1)
|
||||
module = import_module(module, package=package)
|
||||
obj = getattr(module, klass)
|
||||
if ismodule(obj):
|
||||
return obj
|
||||
return obj()
|
||||
128
sanic/http.py
128
sanic/http.py
@@ -1,128 +0,0 @@
|
||||
"""Defines basics of HTTP standard."""
|
||||
|
||||
STATUS_CODES = {
|
||||
100: b'Continue',
|
||||
101: b'Switching Protocols',
|
||||
102: b'Processing',
|
||||
200: b'OK',
|
||||
201: b'Created',
|
||||
202: b'Accepted',
|
||||
203: b'Non-Authoritative Information',
|
||||
204: b'No Content',
|
||||
205: b'Reset Content',
|
||||
206: b'Partial Content',
|
||||
207: b'Multi-Status',
|
||||
208: b'Already Reported',
|
||||
226: b'IM Used',
|
||||
300: b'Multiple Choices',
|
||||
301: b'Moved Permanently',
|
||||
302: b'Found',
|
||||
303: b'See Other',
|
||||
304: b'Not Modified',
|
||||
305: b'Use Proxy',
|
||||
307: b'Temporary Redirect',
|
||||
308: b'Permanent Redirect',
|
||||
400: b'Bad Request',
|
||||
401: b'Unauthorized',
|
||||
402: b'Payment Required',
|
||||
403: b'Forbidden',
|
||||
404: b'Not Found',
|
||||
405: b'Method Not Allowed',
|
||||
406: b'Not Acceptable',
|
||||
407: b'Proxy Authentication Required',
|
||||
408: b'Request Timeout',
|
||||
409: b'Conflict',
|
||||
410: b'Gone',
|
||||
411: b'Length Required',
|
||||
412: b'Precondition Failed',
|
||||
413: b'Request Entity Too Large',
|
||||
414: b'Request-URI Too Long',
|
||||
415: b'Unsupported Media Type',
|
||||
416: b'Requested Range Not Satisfiable',
|
||||
417: b'Expectation Failed',
|
||||
418: b'I\'m a teapot',
|
||||
422: b'Unprocessable Entity',
|
||||
423: b'Locked',
|
||||
424: b'Failed Dependency',
|
||||
426: b'Upgrade Required',
|
||||
428: b'Precondition Required',
|
||||
429: b'Too Many Requests',
|
||||
431: b'Request Header Fields Too Large',
|
||||
451: b'Unavailable For Legal Reasons',
|
||||
500: b'Internal Server Error',
|
||||
501: b'Not Implemented',
|
||||
502: b'Bad Gateway',
|
||||
503: b'Service Unavailable',
|
||||
504: b'Gateway Timeout',
|
||||
505: b'HTTP Version Not Supported',
|
||||
506: b'Variant Also Negotiates',
|
||||
507: b'Insufficient Storage',
|
||||
508: b'Loop Detected',
|
||||
510: b'Not Extended',
|
||||
511: b'Network Authentication Required'
|
||||
}
|
||||
|
||||
# According to https://tools.ietf.org/html/rfc2616#section-7.1
|
||||
_ENTITY_HEADERS = frozenset([
|
||||
'allow',
|
||||
'content-encoding',
|
||||
'content-language',
|
||||
'content-length',
|
||||
'content-location',
|
||||
'content-md5',
|
||||
'content-range',
|
||||
'content-type',
|
||||
'expires',
|
||||
'last-modified',
|
||||
'extension-header'
|
||||
])
|
||||
|
||||
# According to https://tools.ietf.org/html/rfc2616#section-13.5.1
|
||||
_HOP_BY_HOP_HEADERS = frozenset([
|
||||
'connection',
|
||||
'keep-alive',
|
||||
'proxy-authenticate',
|
||||
'proxy-authorization',
|
||||
'te',
|
||||
'trailers',
|
||||
'transfer-encoding',
|
||||
'upgrade'
|
||||
])
|
||||
|
||||
|
||||
def has_message_body(status):
|
||||
"""
|
||||
According to the following RFC message body and length SHOULD NOT
|
||||
be included in responses status 1XX, 204 and 304.
|
||||
https://tools.ietf.org/html/rfc2616#section-4.4
|
||||
https://tools.ietf.org/html/rfc2616#section-4.3
|
||||
"""
|
||||
return status not in (204, 304) and not (100 <= status < 200)
|
||||
|
||||
|
||||
def is_entity_header(header):
|
||||
"""Checks if the given header is an Entity Header"""
|
||||
return header.lower() in _ENTITY_HEADERS
|
||||
|
||||
|
||||
def is_hop_by_hop_header(header):
|
||||
"""Checks if the given header is a Hop By Hop header"""
|
||||
return header.lower() in _HOP_BY_HOP_HEADERS
|
||||
|
||||
|
||||
def remove_entity_headers(headers,
|
||||
allowed=('content-location', 'expires')):
|
||||
"""
|
||||
Removes all the entity headers present in the headers given.
|
||||
According to RFC 2616 Section 10.3.5,
|
||||
Content-Location and Expires are allowed as for the
|
||||
"strong cache validator".
|
||||
https://tools.ietf.org/html/rfc2616#section-10.3.5
|
||||
|
||||
returns the headers without the entity headers
|
||||
"""
|
||||
allowed = set([h.lower() for h in allowed])
|
||||
headers = {header: value for header, value in headers.items()
|
||||
if not is_entity_header(header)
|
||||
and header.lower() not in allowed}
|
||||
return headers
|
||||
35
sanic/log.py
35
sanic/log.py
@@ -5,59 +5,54 @@ import sys
|
||||
LOGGING_CONFIG_DEFAULTS = dict(
|
||||
version=1,
|
||||
disable_existing_loggers=False,
|
||||
|
||||
loggers={
|
||||
"root": {
|
||||
"level": "INFO",
|
||||
"handlers": ["console"]
|
||||
},
|
||||
"sanic.root": {"level": "INFO", "handlers": ["console"]},
|
||||
"sanic.error": {
|
||||
"level": "INFO",
|
||||
"handlers": ["error_console"],
|
||||
"propagate": True,
|
||||
"qualname": "sanic.error"
|
||||
"qualname": "sanic.error",
|
||||
},
|
||||
|
||||
"sanic.access": {
|
||||
"level": "INFO",
|
||||
"handlers": ["access_console"],
|
||||
"propagate": True,
|
||||
"qualname": "sanic.access"
|
||||
}
|
||||
"qualname": "sanic.access",
|
||||
},
|
||||
},
|
||||
handlers={
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "generic",
|
||||
"stream": sys.stdout
|
||||
"stream": sys.stdout,
|
||||
},
|
||||
"error_console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "generic",
|
||||
"stream": sys.stderr
|
||||
"stream": sys.stderr,
|
||||
},
|
||||
"access_console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "access",
|
||||
"stream": sys.stdout
|
||||
"stream": sys.stdout,
|
||||
},
|
||||
},
|
||||
formatters={
|
||||
"generic": {
|
||||
"format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s",
|
||||
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
|
||||
"class": "logging.Formatter"
|
||||
"class": "logging.Formatter",
|
||||
},
|
||||
"access": {
|
||||
"format": "%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: " +
|
||||
"%(request)s %(message)s %(status)d %(byte)d",
|
||||
"format": "%(asctime)s - (%(name)s)[%(levelname)s][%(host)s]: "
|
||||
+ "%(request)s %(message)s %(status)d %(byte)d",
|
||||
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
|
||||
"class": "logging.Formatter"
|
||||
"class": "logging.Formatter",
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger('root')
|
||||
error_logger = logging.getLogger('sanic.error')
|
||||
access_logger = logging.getLogger('sanic.access')
|
||||
logger = logging.getLogger("sanic.root")
|
||||
error_logger = logging.getLogger("sanic.error")
|
||||
access_logger = logging.getLogger("sanic.access")
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import os
|
||||
import sys
|
||||
import signal
|
||||
import subprocess
|
||||
from time import sleep
|
||||
import sys
|
||||
|
||||
from multiprocessing import Process
|
||||
from time import sleep
|
||||
|
||||
|
||||
def _iter_module_files():
|
||||
@@ -18,7 +19,7 @@ def _iter_module_files():
|
||||
for module in list(sys.modules.values()):
|
||||
if module is None:
|
||||
continue
|
||||
filename = getattr(module, '__file__', None)
|
||||
filename = getattr(module, "__file__", None)
|
||||
if filename:
|
||||
old = None
|
||||
while not os.path.isfile(filename):
|
||||
@@ -27,7 +28,7 @@ def _iter_module_files():
|
||||
if filename == old:
|
||||
break
|
||||
else:
|
||||
if filename[-4:] in ('.pyc', '.pyo'):
|
||||
if filename[-4:] in (".pyc", ".pyo"):
|
||||
filename = filename[:-1]
|
||||
yield filename
|
||||
|
||||
@@ -35,7 +36,15 @@ def _iter_module_files():
|
||||
def _get_args_for_reloading():
|
||||
"""Returns the executable."""
|
||||
rv = [sys.executable]
|
||||
rv.extend(sys.argv)
|
||||
main_module = sys.modules["__main__"]
|
||||
mod_spec = getattr(main_module, "__spec__", None)
|
||||
if mod_spec:
|
||||
# Parent exe was launched as a module rather than a script
|
||||
rv.extend(["-m", mod_spec.name])
|
||||
if len(sys.argv) > 1:
|
||||
rv.extend(sys.argv[1:])
|
||||
else:
|
||||
rv.extend(sys.argv)
|
||||
return rv
|
||||
|
||||
|
||||
@@ -43,13 +52,16 @@ def restart_with_reloader():
|
||||
"""Create a new process and a subprocess in it with the same arguments as
|
||||
this one.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
args = _get_args_for_reloading()
|
||||
new_environ = os.environ.copy()
|
||||
new_environ['SANIC_SERVER_RUNNING'] = 'true'
|
||||
cmd = ' '.join(args)
|
||||
new_environ["SANIC_SERVER_RUNNING"] = "true"
|
||||
cmd = " ".join(args)
|
||||
worker_process = Process(
|
||||
target=subprocess.call, args=(cmd,),
|
||||
kwargs=dict(shell=True, env=new_environ))
|
||||
target=subprocess.call,
|
||||
args=(cmd,),
|
||||
kwargs={"cwd": cwd, "shell": True, "env": new_environ},
|
||||
)
|
||||
worker_process.start()
|
||||
return worker_process
|
||||
|
||||
@@ -67,8 +79,10 @@ def kill_process_children_unix(pid):
|
||||
children_list_pid = children_list_file.read().split()
|
||||
|
||||
for child_pid in children_list_pid:
|
||||
children_proc_path = "/proc/%s/task/%s/children" % \
|
||||
(child_pid, child_pid)
|
||||
children_proc_path = "/proc/%s/task/%s/children" % (
|
||||
child_pid,
|
||||
child_pid,
|
||||
)
|
||||
if not os.path.isfile(children_proc_path):
|
||||
continue
|
||||
with open(children_proc_path) as children_list_file_2:
|
||||
@@ -90,7 +104,7 @@ def kill_process_children_osx(pid):
|
||||
:param pid: PID of parent process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
subprocess.run(['pkill', '-P', str(pid)])
|
||||
subprocess.run(["pkill", "-P", str(pid)])
|
||||
|
||||
|
||||
def kill_process_children(pid):
|
||||
@@ -99,12 +113,12 @@ def kill_process_children(pid):
|
||||
:param pid: PID of parent process (process ID)
|
||||
:return: Nothing
|
||||
"""
|
||||
if sys.platform == 'darwin':
|
||||
if sys.platform == "darwin":
|
||||
kill_process_children_osx(pid)
|
||||
elif sys.platform == 'linux':
|
||||
elif sys.platform == "linux":
|
||||
kill_process_children_unix(pid)
|
||||
else:
|
||||
pass # should signal error here
|
||||
pass # should signal error here
|
||||
|
||||
|
||||
def kill_program_completly(proc):
|
||||
@@ -127,9 +141,11 @@ def watchdog(sleep_interval):
|
||||
mtimes = {}
|
||||
worker_process = restart_with_reloader()
|
||||
signal.signal(
|
||||
signal.SIGTERM, lambda *args: kill_program_completly(worker_process))
|
||||
signal.SIGTERM, lambda *args: kill_program_completly(worker_process)
|
||||
)
|
||||
signal.signal(
|
||||
signal.SIGINT, lambda *args: kill_program_completly(worker_process))
|
||||
signal.SIGINT, lambda *args: kill_program_completly(worker_process)
|
||||
)
|
||||
while True:
|
||||
for filename in _iter_module_files():
|
||||
try:
|
||||
|
||||
532
sanic/request.py
532
sanic/request.py
@@ -1,27 +1,31 @@
|
||||
import sys
|
||||
import json
|
||||
import socket
|
||||
from cgi import parse_header
|
||||
from collections import namedtuple
|
||||
from http.cookies import SimpleCookie
|
||||
from httptools import parse_url
|
||||
from urllib.parse import parse_qs, urlunparse
|
||||
import asyncio
|
||||
import email.utils
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from ujson import loads as json_loads
|
||||
except ImportError:
|
||||
if sys.version_info[:2] == (3, 5):
|
||||
def json_loads(data):
|
||||
# on Python 3.5 json.loads only supports str not bytes
|
||||
return json.loads(data.decode())
|
||||
else:
|
||||
json_loads = json.loads
|
||||
from collections import defaultdict, namedtuple
|
||||
from http.cookies import SimpleCookie
|
||||
from types import SimpleNamespace
|
||||
from urllib.parse import parse_qs, parse_qsl, unquote, urlunparse
|
||||
|
||||
from httptools import parse_url # type: ignore
|
||||
|
||||
from sanic.exceptions import InvalidUsage
|
||||
from sanic.headers import (
|
||||
parse_content_header,
|
||||
parse_forwarded,
|
||||
parse_host,
|
||||
parse_xforwarded,
|
||||
)
|
||||
from sanic.log import error_logger, logger
|
||||
|
||||
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
|
||||
|
||||
try:
|
||||
from ujson import loads as json_loads # type: ignore
|
||||
except ImportError:
|
||||
from json import loads as json_loads # type: ignore
|
||||
|
||||
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
|
||||
EXPECT_HEADER = "EXPECT"
|
||||
|
||||
# HTTP/1.1: https://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.2.1
|
||||
# > If the media type remains unknown, the recipient SHOULD treat it
|
||||
@@ -42,20 +46,58 @@ class RequestParameters(dict):
|
||||
return super().get(name, default)
|
||||
|
||||
|
||||
class Request(dict):
|
||||
class StreamBuffer:
|
||||
def __init__(self, buffer_size=100):
|
||||
self._queue = asyncio.Queue(buffer_size)
|
||||
|
||||
async def read(self):
|
||||
""" Stop reading when gets None """
|
||||
payload = await self._queue.get()
|
||||
self._queue.task_done()
|
||||
return payload
|
||||
|
||||
async def put(self, payload):
|
||||
await self._queue.put(payload)
|
||||
|
||||
def is_full(self):
|
||||
return self._queue.full()
|
||||
|
||||
|
||||
class Request:
|
||||
"""Properties of an HTTP request such as URL, headers, etc."""
|
||||
|
||||
__slots__ = (
|
||||
'app', 'headers', 'version', 'method', '_cookies', 'transport',
|
||||
'body', 'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
|
||||
'_ip', '_parsed_url', 'uri_template', 'stream', '_remote_addr',
|
||||
'_socket', '_port', '__weakref__', 'raw_url'
|
||||
"__weakref__",
|
||||
"_cookies",
|
||||
"_ip",
|
||||
"_parsed_url",
|
||||
"_port",
|
||||
"_remote_addr",
|
||||
"_socket",
|
||||
"app",
|
||||
"body",
|
||||
"ctx",
|
||||
"endpoint",
|
||||
"headers",
|
||||
"method",
|
||||
"parsed_args",
|
||||
"parsed_not_grouped_args",
|
||||
"parsed_files",
|
||||
"parsed_form",
|
||||
"parsed_json",
|
||||
"parsed_forwarded",
|
||||
"raw_url",
|
||||
"stream",
|
||||
"transport",
|
||||
"uri_template",
|
||||
"version",
|
||||
)
|
||||
|
||||
def __init__(self, url_bytes, headers, version, method, transport):
|
||||
def __init__(self, url_bytes, headers, version, method, transport, app):
|
||||
self.raw_url = url_bytes
|
||||
# TODO: Content-Encoding detection
|
||||
self._parsed_url = parse_url(url_bytes)
|
||||
self.app = None
|
||||
self.app = app
|
||||
|
||||
self.headers = headers
|
||||
self.version = version
|
||||
@@ -63,26 +105,57 @@ class Request(dict):
|
||||
self.transport = transport
|
||||
|
||||
# Init but do not inhale
|
||||
self.body = []
|
||||
self.body_init()
|
||||
self.ctx = SimpleNamespace()
|
||||
self.parsed_forwarded = None
|
||||
self.parsed_json = None
|
||||
self.parsed_form = None
|
||||
self.parsed_files = None
|
||||
self.parsed_args = None
|
||||
self.parsed_args = defaultdict(RequestParameters)
|
||||
self.parsed_not_grouped_args = defaultdict(list)
|
||||
self.uri_template = None
|
||||
self._cookies = None
|
||||
self.stream = None
|
||||
self.endpoint = None
|
||||
|
||||
def __repr__(self):
|
||||
if self.method is None or not self.path:
|
||||
return '<{0}>'.format(self.__class__.__name__)
|
||||
return '<{0}: {1} {2}>'.format(self.__class__.__name__,
|
||||
self.method,
|
||||
self.path)
|
||||
return "<{0}: {1} {2}>".format(
|
||||
self.__class__.__name__, self.method, self.path
|
||||
)
|
||||
|
||||
def __bool__(self):
|
||||
if self.transport:
|
||||
return True
|
||||
return False
|
||||
def get(self, key, default=None):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return self.ctx.__dict__.get(key, default)
|
||||
|
||||
def __contains__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return key in self.ctx.__dict__
|
||||
|
||||
def __getitem__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
return self.ctx.__dict__[key]
|
||||
|
||||
def __delitem__(self, key):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
del self.ctx.__dict__[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
""".. deprecated:: 19.9
|
||||
Custom context is now stored in `request.custom_context.yourkey`"""
|
||||
setattr(self.ctx, key, value)
|
||||
|
||||
def body_init(self):
|
||||
self.body = []
|
||||
|
||||
def body_push(self, data):
|
||||
self.body.append(data)
|
||||
|
||||
def body_finish(self):
|
||||
self.body = b"".join(self.body)
|
||||
|
||||
@property
|
||||
def json(self):
|
||||
@@ -107,8 +180,8 @@ class Request(dict):
|
||||
|
||||
:return: token related to request
|
||||
"""
|
||||
prefixes = ('Bearer', 'Token')
|
||||
auth_header = self.headers.get('Authorization')
|
||||
prefixes = ("Bearer", "Token")
|
||||
auth_header = self.headers.get("Authorization")
|
||||
|
||||
if auth_header is not None:
|
||||
for prefix in prefixes:
|
||||
@@ -123,17 +196,20 @@ class Request(dict):
|
||||
self.parsed_form = RequestParameters()
|
||||
self.parsed_files = RequestParameters()
|
||||
content_type = self.headers.get(
|
||||
'Content-Type', DEFAULT_HTTP_CONTENT_TYPE)
|
||||
content_type, parameters = parse_header(content_type)
|
||||
"Content-Type", DEFAULT_HTTP_CONTENT_TYPE
|
||||
)
|
||||
content_type, parameters = parse_content_header(content_type)
|
||||
try:
|
||||
if content_type == 'application/x-www-form-urlencoded':
|
||||
if content_type == "application/x-www-form-urlencoded":
|
||||
self.parsed_form = RequestParameters(
|
||||
parse_qs(self.body.decode('utf-8')))
|
||||
elif content_type == 'multipart/form-data':
|
||||
parse_qs(self.body.decode("utf-8"))
|
||||
)
|
||||
elif content_type == "multipart/form-data":
|
||||
# TODO: Stream this instead of reading to/from memory
|
||||
boundary = parameters['boundary'].encode('utf-8')
|
||||
self.parsed_form, self.parsed_files = (
|
||||
parse_multipart_form(self.body, boundary))
|
||||
boundary = parameters["boundary"].encode("utf-8")
|
||||
self.parsed_form, self.parsed_files = parse_multipart_form(
|
||||
self.body, boundary
|
||||
)
|
||||
except Exception:
|
||||
error_logger.exception("Failed when parsing form")
|
||||
|
||||
@@ -146,106 +222,269 @@ class Request(dict):
|
||||
|
||||
return self.parsed_files
|
||||
|
||||
@property
|
||||
def args(self):
|
||||
if self.parsed_args is None:
|
||||
def get_args(
|
||||
self,
|
||||
keep_blank_values: bool = False,
|
||||
strict_parsing: bool = False,
|
||||
encoding: str = "utf-8",
|
||||
errors: str = "replace",
|
||||
) -> RequestParameters:
|
||||
"""
|
||||
Method to parse `query_string` using `urllib.parse.parse_qs`.
|
||||
This methods is used by `args` property.
|
||||
Can be used directly if you need to change default parameters.
|
||||
|
||||
:param keep_blank_values:
|
||||
flag indicating whether blank values in
|
||||
percent-encoded queries should be treated as blank strings.
|
||||
A true value indicates that blanks should be retained as blank
|
||||
strings. The default false value indicates that blank values
|
||||
are to be ignored and treated as if they were not included.
|
||||
:type keep_blank_values: bool
|
||||
:param strict_parsing:
|
||||
flag indicating what to do with parsing errors.
|
||||
If false (the default), errors are silently ignored. If true,
|
||||
errors raise a ValueError exception.
|
||||
:type strict_parsing: bool
|
||||
:param encoding:
|
||||
specify how to decode percent-encoded sequences
|
||||
into Unicode characters, as accepted by the bytes.decode() method.
|
||||
:type encoding: str
|
||||
:param errors:
|
||||
specify how to decode percent-encoded sequences
|
||||
into Unicode characters, as accepted by the bytes.decode() method.
|
||||
:type errors: str
|
||||
:return: RequestParameters
|
||||
"""
|
||||
if not self.parsed_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
]:
|
||||
if self.query_string:
|
||||
self.parsed_args = RequestParameters(
|
||||
parse_qs(self.query_string))
|
||||
else:
|
||||
self.parsed_args = RequestParameters()
|
||||
return self.parsed_args
|
||||
self.parsed_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
] = RequestParameters(
|
||||
parse_qs(
|
||||
qs=self.query_string,
|
||||
keep_blank_values=keep_blank_values,
|
||||
strict_parsing=strict_parsing,
|
||||
encoding=encoding,
|
||||
errors=errors,
|
||||
)
|
||||
)
|
||||
|
||||
return self.parsed_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
]
|
||||
|
||||
args = property(get_args)
|
||||
|
||||
@property
|
||||
def raw_args(self):
|
||||
def raw_args(self) -> dict:
|
||||
if self.app.debug: # pragma: no cover
|
||||
warnings.simplefilter("default")
|
||||
warnings.warn(
|
||||
"Use of raw_args will be deprecated in "
|
||||
"the future versions. Please use args or query_args "
|
||||
"properties instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return {k: v[0] for k, v in self.args.items()}
|
||||
|
||||
def get_query_args(
|
||||
self,
|
||||
keep_blank_values: bool = False,
|
||||
strict_parsing: bool = False,
|
||||
encoding: str = "utf-8",
|
||||
errors: str = "replace",
|
||||
) -> list:
|
||||
"""
|
||||
Method to parse `query_string` using `urllib.parse.parse_qsl`.
|
||||
This methods is used by `query_args` property.
|
||||
Can be used directly if you need to change default parameters.
|
||||
|
||||
:param keep_blank_values:
|
||||
flag indicating whether blank values in
|
||||
percent-encoded queries should be treated as blank strings.
|
||||
A true value indicates that blanks should be retained as blank
|
||||
strings. The default false value indicates that blank values
|
||||
are to be ignored and treated as if they were not included.
|
||||
:type keep_blank_values: bool
|
||||
:param strict_parsing:
|
||||
flag indicating what to do with parsing errors.
|
||||
If false (the default), errors are silently ignored. If true,
|
||||
errors raise a ValueError exception.
|
||||
:type strict_parsing: bool
|
||||
:param encoding:
|
||||
specify how to decode percent-encoded sequences
|
||||
into Unicode characters, as accepted by the bytes.decode() method.
|
||||
:type encoding: str
|
||||
:param errors:
|
||||
specify how to decode percent-encoded sequences
|
||||
into Unicode characters, as accepted by the bytes.decode() method.
|
||||
:type errors: str
|
||||
:return: list
|
||||
"""
|
||||
if not self.parsed_not_grouped_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
]:
|
||||
if self.query_string:
|
||||
self.parsed_not_grouped_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
] = parse_qsl(
|
||||
qs=self.query_string,
|
||||
keep_blank_values=keep_blank_values,
|
||||
strict_parsing=strict_parsing,
|
||||
encoding=encoding,
|
||||
errors=errors,
|
||||
)
|
||||
return self.parsed_not_grouped_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
]
|
||||
|
||||
query_args = property(get_query_args)
|
||||
|
||||
@property
|
||||
def cookies(self):
|
||||
if self._cookies is None:
|
||||
cookie = self.headers.get('Cookie')
|
||||
cookie = self.headers.get("Cookie")
|
||||
if cookie is not None:
|
||||
cookies = SimpleCookie()
|
||||
cookies.load(cookie)
|
||||
self._cookies = {name: cookie.value
|
||||
for name, cookie in cookies.items()}
|
||||
self._cookies = {
|
||||
name: cookie.value for name, cookie in cookies.items()
|
||||
}
|
||||
else:
|
||||
self._cookies = {}
|
||||
return self._cookies
|
||||
|
||||
@property
|
||||
def ip(self):
|
||||
if not hasattr(self, '_socket'):
|
||||
"""
|
||||
:return: peer ip of the socket
|
||||
"""
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._ip
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
if not hasattr(self, '_socket'):
|
||||
"""
|
||||
:return: peer port of the socket
|
||||
"""
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._port
|
||||
|
||||
@property
|
||||
def socket(self):
|
||||
if not hasattr(self, '_socket'):
|
||||
if not hasattr(self, "_socket"):
|
||||
self._get_address()
|
||||
return self._socket
|
||||
|
||||
def _get_address(self):
|
||||
sock = self.transport.get_extra_info('socket')
|
||||
self._socket = self.transport.get_extra_info("peername") or (
|
||||
None,
|
||||
None,
|
||||
)
|
||||
self._ip = self._socket[0]
|
||||
self._port = self._socket[1]
|
||||
|
||||
if sock.family == socket.AF_INET:
|
||||
self._socket = (self.transport.get_extra_info('peername') or
|
||||
(None, None))
|
||||
self._ip, self._port = self._socket
|
||||
elif sock.family == socket.AF_INET6:
|
||||
self._socket = (self.transport.get_extra_info('peername') or
|
||||
(None, None, None, None))
|
||||
self._ip, self._port, *_ = self._socket
|
||||
else:
|
||||
self._ip, self._port = (None, None)
|
||||
@property
|
||||
def server_name(self):
|
||||
"""
|
||||
Attempt to get the server's external hostname in this order:
|
||||
`config.SERVER_NAME`, proxied or direct Host headers
|
||||
:func:`Request.host`
|
||||
|
||||
:return: the server name without port number
|
||||
:rtype: str
|
||||
"""
|
||||
server_name = self.app.config.get("SERVER_NAME")
|
||||
if server_name:
|
||||
host = server_name.split("//", 1)[-1].split("/", 1)[0]
|
||||
return parse_host(host)[0]
|
||||
return parse_host(self.host)[0]
|
||||
|
||||
@property
|
||||
def forwarded(self):
|
||||
if self.parsed_forwarded is None:
|
||||
self.parsed_forwarded = (
|
||||
parse_forwarded(self.headers, self.app.config)
|
||||
or parse_xforwarded(self.headers, self.app.config)
|
||||
or {}
|
||||
)
|
||||
return self.parsed_forwarded
|
||||
|
||||
@property
|
||||
def server_port(self):
|
||||
"""
|
||||
Attempt to get the server's external port number in this order:
|
||||
`config.SERVER_NAME`, proxied or direct Host headers
|
||||
:func:`Request.host`,
|
||||
actual port used by the transport layer socket.
|
||||
:return: server port
|
||||
:rtype: int
|
||||
"""
|
||||
if self.forwarded:
|
||||
return self.forwarded.get("port") or (
|
||||
80 if self.scheme in ("http", "ws") else 443
|
||||
)
|
||||
return (
|
||||
parse_host(self.host)[1]
|
||||
or self.transport.get_extra_info("sockname")[1]
|
||||
)
|
||||
|
||||
@property
|
||||
def remote_addr(self):
|
||||
"""Attempt to return the original client ip based on X-Forwarded-For.
|
||||
"""Attempt to return the original client ip based on `forwarded`,
|
||||
`x-forwarded-for` or `x-real-ip`. If HTTP headers are unavailable or
|
||||
untrusted, returns an empty string.
|
||||
|
||||
:return: original client ip.
|
||||
"""
|
||||
if not hasattr(self, '_remote_addr'):
|
||||
forwarded_for = self.headers.get('X-Forwarded-For', '').split(',')
|
||||
remote_addrs = [
|
||||
addr for addr in [
|
||||
addr.strip() for addr in forwarded_for
|
||||
] if addr
|
||||
]
|
||||
if len(remote_addrs) > 0:
|
||||
self._remote_addr = remote_addrs[0]
|
||||
else:
|
||||
self._remote_addr = ''
|
||||
if not hasattr(self, "_remote_addr"):
|
||||
self._remote_addr = self.forwarded.get("for", "")
|
||||
return self._remote_addr
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
if self.app.websocket_enabled \
|
||||
and self.headers.get('upgrade') == 'websocket':
|
||||
scheme = 'ws'
|
||||
else:
|
||||
scheme = 'http'
|
||||
"""
|
||||
Attempt to get the request scheme.
|
||||
Seeking the value in this order:
|
||||
`forwarded` header, `x-forwarded-proto` header,
|
||||
`x-scheme` header, the sanic app itself.
|
||||
|
||||
if self.transport.get_extra_info('sslcontext'):
|
||||
scheme += 's'
|
||||
:return: http|https|ws|wss or arbitrary value given by the headers.
|
||||
:rtype: str
|
||||
"""
|
||||
forwarded_proto = self.forwarded.get("proto")
|
||||
if forwarded_proto:
|
||||
return forwarded_proto
|
||||
|
||||
if (
|
||||
self.app.websocket_enabled
|
||||
and self.headers.get("upgrade") == "websocket"
|
||||
):
|
||||
scheme = "ws"
|
||||
else:
|
||||
scheme = "http"
|
||||
|
||||
if self.transport.get_extra_info("sslcontext"):
|
||||
scheme += "s"
|
||||
|
||||
return scheme
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
# it appears that httptools doesn't return the host
|
||||
# so pull it from the headers
|
||||
return self.headers.get('Host', '')
|
||||
"""
|
||||
:return: proxied or direct Host header. Hostname and port number may be
|
||||
separated by sanic.headers.parse_host(request.host).
|
||||
"""
|
||||
return self.forwarded.get("host", self.headers.get("Host", ""))
|
||||
|
||||
@property
|
||||
def content_type(self):
|
||||
return self.headers.get('Content-Type', DEFAULT_HTTP_CONTENT_TYPE)
|
||||
return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
|
||||
|
||||
@property
|
||||
def match_info(self):
|
||||
@@ -254,27 +493,52 @@ class Request(dict):
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._parsed_url.path.decode('utf-8')
|
||||
return self._parsed_url.path.decode("utf-8")
|
||||
|
||||
@property
|
||||
def query_string(self):
|
||||
if self._parsed_url.query:
|
||||
return self._parsed_url.query.decode('utf-8')
|
||||
return self._parsed_url.query.decode("utf-8")
|
||||
else:
|
||||
return ''
|
||||
return ""
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return urlunparse((
|
||||
self.scheme,
|
||||
self.host,
|
||||
self.path,
|
||||
None,
|
||||
self.query_string,
|
||||
None))
|
||||
return urlunparse(
|
||||
(self.scheme, self.host, self.path, None, self.query_string, None)
|
||||
)
|
||||
|
||||
def url_for(self, view_name, **kwargs):
|
||||
"""
|
||||
Same as :func:`sanic.Sanic.url_for`, but automatically determine
|
||||
`scheme` and `netloc` base on the request. Since this method is aiming
|
||||
to generate correct schema & netloc, `_external` is implied.
|
||||
|
||||
:param kwargs: takes same parameters as in :func:`sanic.Sanic.url_for`
|
||||
:return: an absolute url to the given view
|
||||
:rtype: str
|
||||
"""
|
||||
# Full URL SERVER_NAME can only be handled in app.url_for
|
||||
if "//" in self.app.config.SERVER_NAME:
|
||||
return self.app.url_for(view_name, _external=True, **kwargs)
|
||||
|
||||
scheme = self.scheme
|
||||
host = self.server_name
|
||||
port = self.server_port
|
||||
|
||||
if (scheme.lower() in ("http", "ws") and port == 80) or (
|
||||
scheme.lower() in ("https", "wss") and port == 443
|
||||
):
|
||||
netloc = host
|
||||
else:
|
||||
netloc = "{}:{}".format(host, port)
|
||||
|
||||
return self.app.url_for(
|
||||
view_name, _external=True, _scheme=scheme, _server=netloc, **kwargs
|
||||
)
|
||||
|
||||
|
||||
File = namedtuple('File', ['type', 'body', 'name'])
|
||||
File = namedtuple("File", ["type", "body", "name"])
|
||||
|
||||
|
||||
def parse_multipart_form(body, boundary):
|
||||
@@ -290,49 +554,59 @@ def parse_multipart_form(body, boundary):
|
||||
form_parts = body.split(boundary)
|
||||
for form_part in form_parts[1:-1]:
|
||||
file_name = None
|
||||
content_type = 'text/plain'
|
||||
content_charset = 'utf-8'
|
||||
content_type = "text/plain"
|
||||
content_charset = "utf-8"
|
||||
field_name = None
|
||||
line_index = 2
|
||||
line_end_index = 0
|
||||
while not line_end_index == -1:
|
||||
line_end_index = form_part.find(b'\r\n', line_index)
|
||||
form_line = form_part[line_index:line_end_index].decode('utf-8')
|
||||
line_end_index = form_part.find(b"\r\n", line_index)
|
||||
form_line = form_part[line_index:line_end_index].decode("utf-8")
|
||||
line_index = line_end_index + 2
|
||||
|
||||
if not form_line:
|
||||
break
|
||||
|
||||
colon_index = form_line.index(':')
|
||||
colon_index = form_line.index(":")
|
||||
form_header_field = form_line[0:colon_index].lower()
|
||||
form_header_value, form_parameters = parse_header(
|
||||
form_line[colon_index + 2:])
|
||||
form_header_value, form_parameters = parse_content_header(
|
||||
form_line[colon_index + 2 :]
|
||||
)
|
||||
|
||||
if form_header_field == 'content-disposition':
|
||||
file_name = form_parameters.get('filename')
|
||||
field_name = form_parameters.get('name')
|
||||
elif form_header_field == 'content-type':
|
||||
if form_header_field == "content-disposition":
|
||||
field_name = form_parameters.get("name")
|
||||
file_name = form_parameters.get("filename")
|
||||
|
||||
# non-ASCII filenames in RFC2231, "filename*" format
|
||||
if file_name is None and form_parameters.get("filename*"):
|
||||
encoding, _, value = email.utils.decode_rfc2231(
|
||||
form_parameters["filename*"]
|
||||
)
|
||||
file_name = unquote(value, encoding=encoding)
|
||||
elif form_header_field == "content-type":
|
||||
content_type = form_header_value
|
||||
content_charset = form_parameters.get('charset', 'utf-8')
|
||||
content_charset = form_parameters.get("charset", "utf-8")
|
||||
|
||||
if field_name:
|
||||
post_data = form_part[line_index:-4]
|
||||
if file_name:
|
||||
form_file = File(type=content_type,
|
||||
name=file_name,
|
||||
body=post_data)
|
||||
if field_name in files:
|
||||
files[field_name].append(form_file)
|
||||
else:
|
||||
files[field_name] = [form_file]
|
||||
else:
|
||||
if file_name is None:
|
||||
value = post_data.decode(content_charset)
|
||||
if field_name in fields:
|
||||
fields[field_name].append(value)
|
||||
else:
|
||||
fields[field_name] = [value]
|
||||
else:
|
||||
form_file = File(
|
||||
type=content_type, name=file_name, body=post_data
|
||||
)
|
||||
if field_name in files:
|
||||
files[field_name].append(form_file)
|
||||
else:
|
||||
files[field_name] = [form_file]
|
||||
else:
|
||||
logger.debug('Form-data field does not have a \'name\' parameter \
|
||||
in the Content-Disposition header')
|
||||
logger.debug(
|
||||
"Form-data field does not have a 'name' parameter "
|
||||
"in the Content-Disposition header"
|
||||
)
|
||||
|
||||
return fields, files
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
from functools import partial
|
||||
from mimetypes import guess_type
|
||||
from os import path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from aiofiles import open as open_async # type: ignore
|
||||
|
||||
from sanic.compat import Header
|
||||
from sanic.cookies import CookieJar
|
||||
from sanic.helpers import STATUS_CODES, has_message_body, remove_entity_headers
|
||||
|
||||
|
||||
try:
|
||||
from ujson import dumps as json_dumps
|
||||
except BaseException:
|
||||
from json import dumps as json_dumps
|
||||
except ImportError:
|
||||
from json import dumps
|
||||
|
||||
from aiofiles import open as open_async
|
||||
from multidict import CIMultiDict
|
||||
|
||||
from sanic import http
|
||||
from sanic.cookies import CookieJar
|
||||
# This is done in order to ensure that the JSON response is
|
||||
# kept consistent across both ujson and inbuilt json usage.
|
||||
json_dumps = partial(dumps, separators=(",", ":"))
|
||||
|
||||
|
||||
class BaseHTTPResponse:
|
||||
@@ -24,16 +30,18 @@ class BaseHTTPResponse:
|
||||
return str(data).encode()
|
||||
|
||||
def _parse_headers(self):
|
||||
headers = b''
|
||||
headers = b""
|
||||
for name, value in self.headers.items():
|
||||
try:
|
||||
headers += (
|
||||
b'%b: %b\r\n' % (
|
||||
name.encode(), value.encode('utf-8')))
|
||||
headers += b"%b: %b\r\n" % (
|
||||
name.encode(),
|
||||
value.encode("utf-8"),
|
||||
)
|
||||
except AttributeError:
|
||||
headers += (
|
||||
b'%b: %b\r\n' % (
|
||||
str(name).encode(), str(value).encode('utf-8')))
|
||||
headers += b"%b: %b\r\n" % (
|
||||
str(name).encode(),
|
||||
str(value).encode("utf-8"),
|
||||
)
|
||||
|
||||
return headers
|
||||
|
||||
@@ -46,19 +54,31 @@ class BaseHTTPResponse:
|
||||
|
||||
class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
__slots__ = (
|
||||
'transport', 'streaming_fn', 'status',
|
||||
'content_type', 'headers', '_cookies'
|
||||
"protocol",
|
||||
"streaming_fn",
|
||||
"status",
|
||||
"content_type",
|
||||
"headers",
|
||||
"chunked",
|
||||
"_cookies",
|
||||
)
|
||||
|
||||
def __init__(self, streaming_fn, status=200, headers=None,
|
||||
content_type='text/plain'):
|
||||
def __init__(
|
||||
self,
|
||||
streaming_fn,
|
||||
status=200,
|
||||
headers=None,
|
||||
content_type="text/plain",
|
||||
chunked=True,
|
||||
):
|
||||
self.content_type = content_type
|
||||
self.streaming_fn = streaming_fn
|
||||
self.status = status
|
||||
self.headers = CIMultiDict(headers or {})
|
||||
self.headers = Header(headers or {})
|
||||
self.chunked = chunked
|
||||
self._cookies = None
|
||||
|
||||
def write(self, data):
|
||||
async def write(self, data):
|
||||
"""Writes a chunk of data to the streaming response.
|
||||
|
||||
:param data: bytes-ish data to be written.
|
||||
@@ -66,58 +86,76 @@ class StreamingHTTPResponse(BaseHTTPResponse):
|
||||
if type(data) != bytes:
|
||||
data = self._encode_body(data)
|
||||
|
||||
self.transport.write(
|
||||
b"%x\r\n%b\r\n" % (len(data), data))
|
||||
if self.chunked:
|
||||
await self.protocol.push_data(b"%x\r\n%b\r\n" % (len(data), data))
|
||||
else:
|
||||
await self.protocol.push_data(data)
|
||||
await self.protocol.drain()
|
||||
|
||||
async def stream(
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None
|
||||
):
|
||||
"""Streams headers, runs the `streaming_fn` callback that writes
|
||||
content to the response body, then finalizes the response body.
|
||||
"""
|
||||
if version != "1.1":
|
||||
self.chunked = False
|
||||
headers = self.get_headers(
|
||||
version, keep_alive=keep_alive,
|
||||
keep_alive_timeout=keep_alive_timeout)
|
||||
self.transport.write(headers)
|
||||
|
||||
version,
|
||||
keep_alive=keep_alive,
|
||||
keep_alive_timeout=keep_alive_timeout,
|
||||
)
|
||||
await self.protocol.push_data(headers)
|
||||
await self.protocol.drain()
|
||||
await self.streaming_fn(self)
|
||||
self.transport.write(b'0\r\n\r\n')
|
||||
if self.chunked:
|
||||
await self.protocol.push_data(b"0\r\n\r\n")
|
||||
# no need to await drain here after this write, because it is the
|
||||
# very last thing we write and nothing needs to wait for it.
|
||||
|
||||
def get_headers(
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None
|
||||
):
|
||||
# This is all returned in a kind-of funky way
|
||||
# We tried to make this as fast as possible in pure python
|
||||
timeout_header = b''
|
||||
timeout_header = b""
|
||||
if keep_alive and keep_alive_timeout is not None:
|
||||
timeout_header = b'Keep-Alive: %d\r\n' % keep_alive_timeout
|
||||
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
||||
|
||||
self.headers['Transfer-Encoding'] = 'chunked'
|
||||
self.headers.pop('Content-Length', None)
|
||||
self.headers['Content-Type'] = self.headers.get(
|
||||
'Content-Type', self.content_type)
|
||||
if self.chunked and version == "1.1":
|
||||
self.headers["Transfer-Encoding"] = "chunked"
|
||||
self.headers.pop("Content-Length", None)
|
||||
self.headers["Content-Type"] = self.headers.get(
|
||||
"Content-Type", self.content_type
|
||||
)
|
||||
|
||||
headers = self._parse_headers()
|
||||
|
||||
if self.status is 200:
|
||||
status = b'OK'
|
||||
if self.status == 200:
|
||||
status = b"OK"
|
||||
else:
|
||||
status = http.STATUS_CODES.get(self.status)
|
||||
status = STATUS_CODES.get(self.status)
|
||||
|
||||
return (b'HTTP/%b %d %b\r\n'
|
||||
b'%b'
|
||||
b'%b\r\n') % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
timeout_header,
|
||||
headers
|
||||
)
|
||||
return (b"HTTP/%b %d %b\r\n" b"%b" b"%b\r\n") % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
timeout_header,
|
||||
headers,
|
||||
)
|
||||
|
||||
|
||||
class HTTPResponse(BaseHTTPResponse):
|
||||
__slots__ = ('body', 'status', 'content_type', 'headers', '_cookies')
|
||||
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
|
||||
|
||||
def __init__(self, body=None, status=200, headers=None,
|
||||
content_type='text/plain', body_bytes=b''):
|
||||
def __init__(
|
||||
self,
|
||||
body=None,
|
||||
status=200,
|
||||
headers=None,
|
||||
content_type="text/plain",
|
||||
body_bytes=b"",
|
||||
):
|
||||
self.content_type = content_type
|
||||
|
||||
if body is not None:
|
||||
@@ -126,49 +164,48 @@ class HTTPResponse(BaseHTTPResponse):
|
||||
self.body = body_bytes
|
||||
|
||||
self.status = status
|
||||
self.headers = CIMultiDict(headers or {})
|
||||
self.headers = Header(headers or {})
|
||||
self._cookies = None
|
||||
|
||||
def output(
|
||||
self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
||||
# This is all returned in a kind-of funky way
|
||||
# We tried to make this as fast as possible in pure python
|
||||
timeout_header = b''
|
||||
timeout_header = b""
|
||||
if keep_alive and keep_alive_timeout is not None:
|
||||
timeout_header = b'Keep-Alive: %d\r\n' % keep_alive_timeout
|
||||
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
||||
|
||||
body = b''
|
||||
if http.has_message_body(self.status):
|
||||
body = b""
|
||||
if has_message_body(self.status):
|
||||
body = self.body
|
||||
self.headers['Content-Length'] = self.headers.get(
|
||||
'Content-Length', len(self.body))
|
||||
self.headers["Content-Length"] = self.headers.get(
|
||||
"Content-Length", len(self.body)
|
||||
)
|
||||
|
||||
self.headers['Content-Type'] = self.headers.get(
|
||||
'Content-Type', self.content_type)
|
||||
self.headers["Content-Type"] = self.headers.get(
|
||||
"Content-Type", self.content_type
|
||||
)
|
||||
|
||||
if self.status in (304, 412):
|
||||
self.headers = http.remove_entity_headers(self.headers)
|
||||
self.headers = remove_entity_headers(self.headers)
|
||||
|
||||
headers = self._parse_headers()
|
||||
|
||||
if self.status is 200:
|
||||
status = b'OK'
|
||||
if self.status == 200:
|
||||
status = b"OK"
|
||||
else:
|
||||
status = http.STATUS_CODES.get(self.status, b'UNKNOWN RESPONSE')
|
||||
status = STATUS_CODES.get(self.status, b"UNKNOWN RESPONSE")
|
||||
|
||||
return (b'HTTP/%b %d %b\r\n'
|
||||
b'Connection: %b\r\n'
|
||||
b'%b'
|
||||
b'%b\r\n'
|
||||
b'%b') % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
b'keep-alive' if keep_alive else b'close',
|
||||
timeout_header,
|
||||
headers,
|
||||
body
|
||||
)
|
||||
return (
|
||||
b"HTTP/%b %d %b\r\n" b"Connection: %b\r\n" b"%b" b"%b\r\n" b"%b"
|
||||
) % (
|
||||
version.encode(),
|
||||
self.status,
|
||||
status,
|
||||
b"keep-alive" if keep_alive else b"close",
|
||||
timeout_header,
|
||||
headers,
|
||||
body,
|
||||
)
|
||||
|
||||
@property
|
||||
def cookies(self):
|
||||
@@ -177,9 +214,14 @@ class HTTPResponse(BaseHTTPResponse):
|
||||
return self._cookies
|
||||
|
||||
|
||||
def json(body, status=200, headers=None,
|
||||
content_type="application/json", dumps=json_dumps,
|
||||
**kwargs):
|
||||
def json(
|
||||
body,
|
||||
status=200,
|
||||
headers=None,
|
||||
content_type="application/json",
|
||||
dumps=json_dumps,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Returns response object with body in json format.
|
||||
|
||||
@@ -188,12 +230,17 @@ def json(body, status=200, headers=None,
|
||||
:param headers: Custom Headers.
|
||||
:param kwargs: Remaining arguments that are passed to the json encoder.
|
||||
"""
|
||||
return HTTPResponse(dumps(body, **kwargs), headers=headers,
|
||||
status=status, content_type=content_type)
|
||||
return HTTPResponse(
|
||||
dumps(body, **kwargs),
|
||||
headers=headers,
|
||||
status=status,
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
|
||||
def text(body, status=200, headers=None,
|
||||
content_type="text/plain; charset=utf-8"):
|
||||
def text(
|
||||
body, status=200, headers=None, content_type="text/plain; charset=utf-8"
|
||||
):
|
||||
"""
|
||||
Returns response object with body in text format.
|
||||
|
||||
@@ -203,12 +250,13 @@ def text(body, status=200, headers=None,
|
||||
:param content_type: the content type (string) of the response
|
||||
"""
|
||||
return HTTPResponse(
|
||||
body, status=status, headers=headers,
|
||||
content_type=content_type)
|
||||
body, status=status, headers=headers, content_type=content_type
|
||||
)
|
||||
|
||||
|
||||
def raw(body, status=200, headers=None,
|
||||
content_type="application/octet-stream"):
|
||||
def raw(
|
||||
body, status=200, headers=None, content_type="application/octet-stream"
|
||||
):
|
||||
"""
|
||||
Returns response object without encoding the body.
|
||||
|
||||
@@ -217,8 +265,12 @@ def raw(body, status=200, headers=None,
|
||||
:param headers: Custom Headers.
|
||||
:param content_type: the content type (string) of the response.
|
||||
"""
|
||||
return HTTPResponse(body_bytes=body, status=status, headers=headers,
|
||||
content_type=content_type)
|
||||
return HTTPResponse(
|
||||
body_bytes=body,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
|
||||
def html(body, status=200, headers=None):
|
||||
@@ -229,12 +281,22 @@ def html(body, status=200, headers=None):
|
||||
:param status: Response code.
|
||||
:param headers: Custom Headers.
|
||||
"""
|
||||
return HTTPResponse(body, status=status, headers=headers,
|
||||
content_type="text/html; charset=utf-8")
|
||||
return HTTPResponse(
|
||||
body,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type="text/html; charset=utf-8",
|
||||
)
|
||||
|
||||
|
||||
async def file(location, status=200, mime_type=None, headers=None,
|
||||
filename=None, _range=None):
|
||||
async def file(
|
||||
location,
|
||||
status=200,
|
||||
mime_type=None,
|
||||
headers=None,
|
||||
filename=None,
|
||||
_range=None,
|
||||
):
|
||||
"""Return a response object with file data.
|
||||
|
||||
:param location: Location of file on system.
|
||||
@@ -246,28 +308,42 @@ async def file(location, status=200, mime_type=None, headers=None,
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
'Content-Disposition',
|
||||
'attachment; filename="{}"'.format(filename))
|
||||
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
||||
)
|
||||
filename = filename or path.split(location)[-1]
|
||||
|
||||
async with open_async(location, mode='rb') as _file:
|
||||
async with open_async(location, mode="rb") as _file:
|
||||
if _range:
|
||||
await _file.seek(_range.start)
|
||||
out_stream = await _file.read(_range.size)
|
||||
headers['Content-Range'] = 'bytes %s-%s/%s' % (
|
||||
_range.start, _range.end, _range.total)
|
||||
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
||||
_range.start,
|
||||
_range.end,
|
||||
_range.total,
|
||||
)
|
||||
status = 206
|
||||
else:
|
||||
out_stream = await _file.read()
|
||||
|
||||
mime_type = mime_type or guess_type(filename)[0] or 'text/plain'
|
||||
return HTTPResponse(status=status,
|
||||
headers=headers,
|
||||
content_type=mime_type,
|
||||
body_bytes=out_stream)
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
return HTTPResponse(
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=mime_type,
|
||||
body_bytes=out_stream,
|
||||
)
|
||||
|
||||
|
||||
async def file_stream(location, status=200, chunk_size=4096, mime_type=None,
|
||||
headers=None, filename=None, _range=None):
|
||||
async def file_stream(
|
||||
location,
|
||||
status=200,
|
||||
chunk_size=4096,
|
||||
mime_type=None,
|
||||
headers=None,
|
||||
filename=None,
|
||||
chunked=True,
|
||||
_range=None,
|
||||
):
|
||||
"""Return a streaming response object with file data.
|
||||
|
||||
:param location: Location of file on system.
|
||||
@@ -275,16 +351,17 @@ async def file_stream(location, status=200, chunk_size=4096, mime_type=None,
|
||||
:param mime_type: Specific mime_type.
|
||||
:param headers: Custom Headers.
|
||||
:param filename: Override filename.
|
||||
:param chunked: Enable or disable chunked transfer-encoding
|
||||
:param _range:
|
||||
"""
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
'Content-Disposition',
|
||||
'attachment; filename="{}"'.format(filename))
|
||||
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
||||
)
|
||||
filename = filename or path.split(location)[-1]
|
||||
|
||||
_file = await open_async(location, mode='rb')
|
||||
_file = await open_async(location, mode="rb")
|
||||
|
||||
async def _streaming_fn(response):
|
||||
nonlocal _file, chunk_size
|
||||
@@ -298,30 +375,41 @@ async def file_stream(location, status=200, chunk_size=4096, mime_type=None,
|
||||
if len(content) < 1:
|
||||
break
|
||||
to_send -= len(content)
|
||||
response.write(content)
|
||||
await response.write(content)
|
||||
else:
|
||||
while True:
|
||||
content = await _file.read(chunk_size)
|
||||
if len(content) < 1:
|
||||
break
|
||||
response.write(content)
|
||||
await response.write(content)
|
||||
finally:
|
||||
await _file.close()
|
||||
return # Returning from this fn closes the stream
|
||||
|
||||
mime_type = mime_type or guess_type(filename)[0] or 'text/plain'
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
if _range:
|
||||
headers['Content-Range'] = 'bytes %s-%s/%s' % (
|
||||
_range.start, _range.end, _range.total)
|
||||
return StreamingHTTPResponse(streaming_fn=_streaming_fn,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=mime_type)
|
||||
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
||||
_range.start,
|
||||
_range.end,
|
||||
_range.total,
|
||||
)
|
||||
status = 206
|
||||
return StreamingHTTPResponse(
|
||||
streaming_fn=_streaming_fn,
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=mime_type,
|
||||
chunked=chunked,
|
||||
)
|
||||
|
||||
|
||||
def stream(
|
||||
streaming_fn, status=200, headers=None,
|
||||
content_type="text/plain; charset=utf-8"):
|
||||
streaming_fn,
|
||||
status=200,
|
||||
headers=None,
|
||||
content_type="text/plain; charset=utf-8",
|
||||
chunked=True,
|
||||
):
|
||||
"""Accepts an coroutine `streaming_fn` which can be used to
|
||||
write chunks to a streaming response. Returns a `StreamingHTTPResponse`.
|
||||
|
||||
@@ -339,17 +427,20 @@ def stream(
|
||||
writes content to that response.
|
||||
:param mime_type: Specific mime_type.
|
||||
:param headers: Custom Headers.
|
||||
:param chunked: Enable or disable chunked transfer-encoding
|
||||
"""
|
||||
return StreamingHTTPResponse(
|
||||
streaming_fn,
|
||||
headers=headers,
|
||||
content_type=content_type,
|
||||
status=status
|
||||
status=status,
|
||||
chunked=chunked,
|
||||
)
|
||||
|
||||
|
||||
def redirect(to, headers=None, status=302,
|
||||
content_type="text/html; charset=utf-8"):
|
||||
def redirect(
|
||||
to, headers=None, status=302, content_type="text/html; charset=utf-8"
|
||||
):
|
||||
"""Abort execution and cause a 302 redirect (by default).
|
||||
|
||||
:param to: path or fully qualified URL to redirect to
|
||||
@@ -361,12 +452,11 @@ def redirect(to, headers=None, status=302,
|
||||
headers = headers or {}
|
||||
|
||||
# URL Quote the URL before redirecting
|
||||
safe_to = quote_plus(to, safe=":/#?&=@[]!$&'()*+,;")
|
||||
safe_to = quote_plus(to, safe=":/%#?&=@[]!$&'()*+,;")
|
||||
|
||||
# According to RFC 7231, a relative URI is now permitted.
|
||||
headers['Location'] = safe_to
|
||||
headers["Location"] = safe_to
|
||||
|
||||
return HTTPResponse(
|
||||
status=status,
|
||||
headers=headers,
|
||||
content_type=content_type)
|
||||
status=status, headers=headers, content_type=content_type
|
||||
)
|
||||
|
||||
209
sanic/router.py
209
sanic/router.py
@@ -1,33 +1,38 @@
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from collections import defaultdict, namedtuple
|
||||
from collections.abc import Iterable
|
||||
from functools import lru_cache
|
||||
from urllib.parse import unquote
|
||||
|
||||
from sanic.exceptions import NotFound, MethodNotSupported
|
||||
from sanic.exceptions import MethodNotSupported, NotFound
|
||||
from sanic.views import CompositionView
|
||||
|
||||
|
||||
Route = namedtuple(
|
||||
'Route',
|
||||
['handler', 'methods', 'pattern', 'parameters', 'name', 'uri'])
|
||||
Parameter = namedtuple('Parameter', ['name', 'cast'])
|
||||
"Route", ["handler", "methods", "pattern", "parameters", "name", "uri"]
|
||||
)
|
||||
Parameter = namedtuple("Parameter", ["name", "cast"])
|
||||
|
||||
REGEX_TYPES = {
|
||||
'string': (str, r'[^/]+'),
|
||||
'int': (int, r'\d+'),
|
||||
'number': (float, r'[0-9\\.]+'),
|
||||
'alpha': (str, r'[A-Za-z]+'),
|
||||
'path': (str, r'[^/].*?'),
|
||||
'uuid': (uuid.UUID, r'[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-'
|
||||
r'[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}')
|
||||
"string": (str, r"[^/]+"),
|
||||
"int": (int, r"-?\d+"),
|
||||
"number": (float, r"-?(?:\d+(?:\.\d*)?|\.\d+)"),
|
||||
"alpha": (str, r"[A-Za-z]+"),
|
||||
"path": (str, r"[^/].*?"),
|
||||
"uuid": (
|
||||
uuid.UUID,
|
||||
r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-"
|
||||
r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}",
|
||||
),
|
||||
}
|
||||
|
||||
ROUTER_CACHE_SIZE = 1024
|
||||
|
||||
|
||||
def url_hash(url):
|
||||
return url.count('/')
|
||||
return url.count("/")
|
||||
|
||||
|
||||
class RouteExists(Exception):
|
||||
@@ -38,6 +43,10 @@ class RouteDoesNotExist(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ParameterNameConflicts(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Router:
|
||||
"""Router supports basic routing with parameters and method checks
|
||||
|
||||
@@ -64,10 +73,11 @@ class Router:
|
||||
also be passed in as the type. The argument given to the function will
|
||||
always be a string, independent of the type.
|
||||
"""
|
||||
|
||||
routes_static = None
|
||||
routes_dynamic = None
|
||||
routes_always_check = None
|
||||
parameter_pattern = re.compile(r'<(.+?)>')
|
||||
parameter_pattern = re.compile(r"<(.+?)>")
|
||||
|
||||
def __init__(self):
|
||||
self.routes_all = {}
|
||||
@@ -94,9 +104,9 @@ class Router:
|
||||
"""
|
||||
# We could receive NAME or NAME:PATTERN
|
||||
name = parameter_string
|
||||
pattern = 'string'
|
||||
if ':' in parameter_string:
|
||||
name, pattern = parameter_string.split(':', 1)
|
||||
pattern = "string"
|
||||
if ":" in parameter_string:
|
||||
name, pattern = parameter_string.split(":", 1)
|
||||
if not name:
|
||||
raise ValueError(
|
||||
"Invalid parameter syntax: {}".format(parameter_string)
|
||||
@@ -108,8 +118,16 @@ class Router:
|
||||
|
||||
return name, _type, pattern
|
||||
|
||||
def add(self, uri, methods, handler, host=None, strict_slashes=False,
|
||||
version=None, name=None):
|
||||
def add(
|
||||
self,
|
||||
uri,
|
||||
methods,
|
||||
handler,
|
||||
host=None,
|
||||
strict_slashes=False,
|
||||
version=None,
|
||||
name=None,
|
||||
):
|
||||
"""Add a handler to the route list
|
||||
|
||||
:param uri: path to match
|
||||
@@ -123,8 +141,8 @@ class Router:
|
||||
:return: Nothing
|
||||
"""
|
||||
if version is not None:
|
||||
version = re.escape(str(version).strip('/').lstrip('v'))
|
||||
uri = "/".join(["/v{}".format(version), uri.lstrip('/')])
|
||||
version = re.escape(str(version).strip("/").lstrip("v"))
|
||||
uri = "/".join(["/v{}".format(version), uri.lstrip("/")])
|
||||
# add regular version
|
||||
self._add(uri, methods, handler, host, name)
|
||||
|
||||
@@ -139,28 +157,26 @@ class Router:
|
||||
return
|
||||
|
||||
# Add versions with and without trailing /
|
||||
slashed_methods = self.routes_all.get(uri + '/', frozenset({}))
|
||||
slashed_methods = self.routes_all.get(uri + "/", frozenset({}))
|
||||
unslashed_methods = self.routes_all.get(uri[:-1], frozenset({}))
|
||||
if isinstance(methods, Iterable):
|
||||
_slash_is_missing = all(method in slashed_methods for
|
||||
method in methods)
|
||||
_without_slash_is_missing = all(method in unslashed_methods for
|
||||
method in methods)
|
||||
_slash_is_missing = all(
|
||||
method in slashed_methods for method in methods
|
||||
)
|
||||
_without_slash_is_missing = all(
|
||||
method in unslashed_methods for method in methods
|
||||
)
|
||||
else:
|
||||
_slash_is_missing = methods in slashed_methods
|
||||
_without_slash_is_missing = methods in unslashed_methods
|
||||
|
||||
slash_is_missing = (
|
||||
not uri[-1] == '/' and not _slash_is_missing
|
||||
)
|
||||
slash_is_missing = not uri[-1] == "/" and not _slash_is_missing
|
||||
without_slash_is_missing = (
|
||||
uri[-1] == '/' and not
|
||||
_without_slash_is_missing and not
|
||||
uri == '/'
|
||||
uri[-1] == "/" and not _without_slash_is_missing and not uri == "/"
|
||||
)
|
||||
# add version with trailing slash
|
||||
if slash_is_missing:
|
||||
self._add(uri + '/', methods, handler, host, name)
|
||||
self._add(uri + "/", methods, handler, host, name)
|
||||
# add version without trailing slash
|
||||
elif without_slash_is_missing:
|
||||
self._add(uri[:-1], methods, handler, host, name)
|
||||
@@ -183,8 +199,10 @@ class Router:
|
||||
|
||||
else:
|
||||
if not isinstance(host, Iterable):
|
||||
raise ValueError("Expected either string or Iterable of "
|
||||
"host strings, not {!r}".format(host))
|
||||
raise ValueError(
|
||||
"Expected either string or Iterable of "
|
||||
"host strings, not {!r}".format(host)
|
||||
)
|
||||
|
||||
for host_ in host:
|
||||
self.add(uri, methods, handler, host_, name)
|
||||
@@ -195,40 +213,48 @@ class Router:
|
||||
methods = frozenset(methods)
|
||||
|
||||
parameters = []
|
||||
parameter_names = set()
|
||||
properties = {"unhashable": None}
|
||||
|
||||
def add_parameter(match):
|
||||
name = match.group(1)
|
||||
name, _type, pattern = self.parse_parameter_string(name)
|
||||
|
||||
parameter = Parameter(
|
||||
name=name, cast=_type)
|
||||
if name in parameter_names:
|
||||
raise ParameterNameConflicts(
|
||||
"Multiple parameter named <{name}> "
|
||||
"in route uri {uri}".format(name=name, uri=uri)
|
||||
)
|
||||
parameter_names.add(name)
|
||||
|
||||
parameter = Parameter(name=name, cast=_type)
|
||||
parameters.append(parameter)
|
||||
|
||||
# Mark the whole route as unhashable if it has the hash key in it
|
||||
if re.search(r'(^|[^^]){1}/', pattern):
|
||||
properties['unhashable'] = True
|
||||
if re.search(r"(^|[^^]){1}/", pattern):
|
||||
properties["unhashable"] = True
|
||||
# Mark the route as unhashable if it matches the hash key
|
||||
elif re.search(r'/', pattern):
|
||||
properties['unhashable'] = True
|
||||
elif re.search(r"/", pattern):
|
||||
properties["unhashable"] = True
|
||||
|
||||
return '({})'.format(pattern)
|
||||
return "({})".format(pattern)
|
||||
|
||||
pattern_string = re.sub(self.parameter_pattern, add_parameter, uri)
|
||||
pattern = re.compile(r'^{}$'.format(pattern_string))
|
||||
pattern = re.compile(r"^{}$".format(pattern_string))
|
||||
|
||||
def merge_route(route, methods, handler):
|
||||
# merge to the existing route when possible.
|
||||
if not route.methods or not methods:
|
||||
# method-unspecified routes are not mergeable.
|
||||
raise RouteExists(
|
||||
"Route already registered: {}".format(uri))
|
||||
raise RouteExists("Route already registered: {}".format(uri))
|
||||
elif route.methods.intersection(methods):
|
||||
# already existing method is not overloadable.
|
||||
duplicated = methods.intersection(route.methods)
|
||||
raise RouteExists(
|
||||
"Route already registered: {} [{}]".format(
|
||||
uri, ','.join(list(duplicated))))
|
||||
uri, ",".join(list(duplicated))
|
||||
)
|
||||
)
|
||||
if isinstance(route.handler, CompositionView):
|
||||
view = route.handler
|
||||
else:
|
||||
@@ -236,19 +262,22 @@ class Router:
|
||||
view.add(route.methods, route.handler)
|
||||
view.add(methods, handler)
|
||||
route = route._replace(
|
||||
handler=view, methods=methods.union(route.methods))
|
||||
handler=view, methods=methods.union(route.methods)
|
||||
)
|
||||
return route
|
||||
|
||||
if parameters:
|
||||
# TODO: This is too complex, we need to reduce the complexity
|
||||
if properties['unhashable']:
|
||||
if properties["unhashable"]:
|
||||
routes_to_check = self.routes_always_check
|
||||
ndx, route = self.check_dynamic_route_exists(
|
||||
pattern, routes_to_check, parameters)
|
||||
pattern, routes_to_check, parameters
|
||||
)
|
||||
else:
|
||||
routes_to_check = self.routes_dynamic[url_hash(uri)]
|
||||
ndx, route = self.check_dynamic_route_exists(
|
||||
pattern, routes_to_check, parameters)
|
||||
pattern, routes_to_check, parameters
|
||||
)
|
||||
if ndx != -1:
|
||||
# Pop the ndx of the route, no dups of the same route
|
||||
routes_to_check.pop(ndx)
|
||||
@@ -259,35 +288,41 @@ class Router:
|
||||
# if available
|
||||
# special prefix for static files
|
||||
is_static = False
|
||||
if name and name.startswith('_static_'):
|
||||
if name and name.startswith("_static_"):
|
||||
is_static = True
|
||||
name = name.split('_static_', 1)[-1]
|
||||
name = name.split("_static_", 1)[-1]
|
||||
|
||||
if hasattr(handler, '__blueprintname__'):
|
||||
handler_name = '{}.{}'.format(
|
||||
handler.__blueprintname__, name or handler.__name__)
|
||||
if hasattr(handler, "__blueprintname__"):
|
||||
handler_name = "{}.{}".format(
|
||||
handler.__blueprintname__, name or handler.__name__
|
||||
)
|
||||
else:
|
||||
handler_name = name or getattr(handler, '__name__', None)
|
||||
handler_name = name or getattr(handler, "__name__", None)
|
||||
|
||||
if route:
|
||||
route = merge_route(route, methods, handler)
|
||||
else:
|
||||
route = Route(
|
||||
handler=handler, methods=methods, pattern=pattern,
|
||||
parameters=parameters, name=handler_name, uri=uri)
|
||||
handler=handler,
|
||||
methods=methods,
|
||||
pattern=pattern,
|
||||
parameters=parameters,
|
||||
name=handler_name,
|
||||
uri=uri,
|
||||
)
|
||||
|
||||
self.routes_all[uri] = route
|
||||
if is_static:
|
||||
pair = self.routes_static_files.get(handler_name)
|
||||
if not (pair and (pair[0] + '/' == uri or uri + '/' == pair[0])):
|
||||
if not (pair and (pair[0] + "/" == uri or uri + "/" == pair[0])):
|
||||
self.routes_static_files[handler_name] = (uri, route)
|
||||
|
||||
else:
|
||||
pair = self.routes_names.get(handler_name)
|
||||
if not (pair and (pair[0] + '/' == uri or uri + '/' == pair[0])):
|
||||
if not (pair and (pair[0] + "/" == uri or uri + "/" == pair[0])):
|
||||
self.routes_names[handler_name] = (uri, route)
|
||||
|
||||
if properties['unhashable']:
|
||||
if properties["unhashable"]:
|
||||
self.routes_always_check.append(route)
|
||||
elif parameters:
|
||||
self.routes_dynamic[url_hash(uri)].append(route)
|
||||
@@ -296,6 +331,17 @@ class Router:
|
||||
|
||||
@staticmethod
|
||||
def check_dynamic_route_exists(pattern, routes_to_check, parameters):
|
||||
"""
|
||||
Check if a URL pattern exists in a list of routes provided based on
|
||||
the comparison of URL pattern and the parameters.
|
||||
|
||||
:param pattern: URL parameter pattern
|
||||
:param routes_to_check: list of dynamic routes either hashable or
|
||||
unhashable routes.
|
||||
:param parameters: List of :class:`Parameter` items
|
||||
:return: Tuple of index and route if matching route exists else
|
||||
-1 for index and None for route
|
||||
"""
|
||||
for ndx, route in enumerate(routes_to_check):
|
||||
if route.pattern == pattern and route.parameters == parameters:
|
||||
return ndx, route
|
||||
@@ -322,8 +368,10 @@ class Router:
|
||||
|
||||
if route in self.routes_always_check:
|
||||
self.routes_always_check.remove(route)
|
||||
elif url_hash(uri) in self.routes_dynamic \
|
||||
and route in self.routes_dynamic[url_hash(uri)]:
|
||||
elif (
|
||||
url_hash(uri) in self.routes_dynamic
|
||||
and route in self.routes_dynamic[url_hash(uri)]
|
||||
):
|
||||
self.routes_dynamic[url_hash(uri)].remove(route)
|
||||
else:
|
||||
self.routes_static.pop(uri)
|
||||
@@ -342,7 +390,7 @@ class Router:
|
||||
if not view_name:
|
||||
return (None, None)
|
||||
|
||||
if view_name == 'static' or view_name.endswith('.static'):
|
||||
if view_name == "static" or view_name.endswith(".static"):
|
||||
return self.routes_static_files.get(name, (None, None))
|
||||
|
||||
return self.routes_names.get(view_name, (None, None))
|
||||
@@ -356,14 +404,16 @@ class Router:
|
||||
"""
|
||||
# No virtual hosts specified; default behavior
|
||||
if not self.hosts:
|
||||
return self._get(request.path, request.method, '')
|
||||
return self._get(request.path, request.method, "")
|
||||
# virtual hosts specified; try to match route to the host header
|
||||
|
||||
try:
|
||||
return self._get(request.path, request.method,
|
||||
request.headers.get("Host", ''))
|
||||
return self._get(
|
||||
request.path, request.method, request.headers.get("Host", "")
|
||||
)
|
||||
# try default hosts
|
||||
except NotFound:
|
||||
return self._get(request.path, request.method, '')
|
||||
return self._get(request.path, request.method, "")
|
||||
|
||||
def get_supported_methods(self, url):
|
||||
"""Get a list of supported methods for a url and optional host.
|
||||
@@ -373,7 +423,7 @@ class Router:
|
||||
"""
|
||||
route = self.routes_all.get(url)
|
||||
# if methods are None then this logic will prevent an error
|
||||
return getattr(route, 'methods', None) or frozenset()
|
||||
return getattr(route, "methods", None) or frozenset()
|
||||
|
||||
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
|
||||
def _get(self, url, method, host):
|
||||
@@ -388,9 +438,10 @@ class Router:
|
||||
# Check against known static routes
|
||||
route = self.routes_static.get(url)
|
||||
method_not_supported = MethodNotSupported(
|
||||
'Method {} not allowed for URL {}'.format(method, url),
|
||||
"Method {} not allowed for URL {}".format(method, url),
|
||||
method=method,
|
||||
allowed_methods=self.get_supported_methods(url))
|
||||
allowed_methods=self.get_supported_methods(url),
|
||||
)
|
||||
if route:
|
||||
if route.methods and method not in route.methods:
|
||||
raise method_not_supported
|
||||
@@ -416,13 +467,14 @@ class Router:
|
||||
# Route was found but the methods didn't match
|
||||
if route_found:
|
||||
raise method_not_supported
|
||||
raise NotFound('Requested URL {} not found'.format(url))
|
||||
raise NotFound("Requested URL {} not found".format(url))
|
||||
|
||||
kwargs = {p.name: p.cast(value)
|
||||
for value, p
|
||||
in zip(match.groups(1), route.parameters)}
|
||||
kwargs = {
|
||||
p.name: p.cast(value)
|
||||
for value, p in zip(match.groups(1), route.parameters)
|
||||
}
|
||||
route_handler = route.handler
|
||||
if hasattr(route_handler, 'handlers'):
|
||||
if hasattr(route_handler, "handlers"):
|
||||
route_handler = route_handler.handlers[method]
|
||||
return route_handler, [], kwargs, route.uri
|
||||
|
||||
@@ -435,7 +487,8 @@ class Router:
|
||||
handler = self.get(request)[0]
|
||||
except (NotFound, MethodNotSupported):
|
||||
return False
|
||||
if (hasattr(handler, 'view_class') and
|
||||
hasattr(handler.view_class, request.method.lower())):
|
||||
if hasattr(handler, "view_class") and hasattr(
|
||||
handler.view_class, request.method.lower()
|
||||
):
|
||||
handler = getattr(handler.view_class, request.method.lower())
|
||||
return hasattr(handler, 'is_stream')
|
||||
return hasattr(handler, "is_stream")
|
||||
|
||||
674
sanic/server.py
674
sanic/server.py
@@ -1,69 +1,118 @@
|
||||
import asyncio
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from functools import partial
|
||||
from inspect import isawaitable
|
||||
from multiprocessing import Process
|
||||
from signal import (
|
||||
SIGTERM, SIGINT, SIG_IGN,
|
||||
signal as signal_func,
|
||||
Signals
|
||||
)
|
||||
from socket import (
|
||||
socket,
|
||||
SOL_SOCKET,
|
||||
SO_REUSEADDR,
|
||||
)
|
||||
from signal import SIG_IGN, SIGINT, SIGTERM, Signals
|
||||
from signal import signal as signal_func
|
||||
from socket import SO_REUSEADDR, SOL_SOCKET, socket
|
||||
from time import time
|
||||
|
||||
from httptools import HttpRequestParser
|
||||
from httptools.parser.errors import HttpParserError
|
||||
from multidict import CIMultiDict
|
||||
from httptools import HttpRequestParser # type: ignore
|
||||
from httptools.parser.errors import HttpParserError # type: ignore
|
||||
|
||||
from sanic.compat import Header
|
||||
from sanic.exceptions import (
|
||||
HeaderExpectationFailed,
|
||||
InvalidUsage,
|
||||
PayloadTooLarge,
|
||||
RequestTimeout,
|
||||
ServerError,
|
||||
ServiceUnavailable,
|
||||
)
|
||||
from sanic.log import access_logger, logger
|
||||
from sanic.request import EXPECT_HEADER, Request, StreamBuffer
|
||||
from sanic.response import HTTPResponse
|
||||
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
import uvloop # type: ignore
|
||||
|
||||
if not isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy):
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from sanic.log import logger, access_logger
|
||||
from sanic.response import HTTPResponse
|
||||
from sanic.request import Request
|
||||
from sanic.exceptions import (
|
||||
RequestTimeout, PayloadTooLarge, InvalidUsage, ServerError,
|
||||
ServiceUnavailable)
|
||||
|
||||
current_time = None
|
||||
|
||||
|
||||
class Signal:
|
||||
stopped = False
|
||||
|
||||
|
||||
class HttpProtocol(asyncio.Protocol):
|
||||
__slots__ = (
|
||||
# event loop, connection
|
||||
'loop', 'transport', 'connections', 'signal',
|
||||
# request params
|
||||
'parser', 'request', 'url', 'headers',
|
||||
# request config
|
||||
'request_handler', 'request_timeout', 'response_timeout',
|
||||
'keep_alive_timeout', 'request_max_size', 'request_class',
|
||||
'is_request_stream', 'router',
|
||||
# enable or disable access log purpose
|
||||
'access_log',
|
||||
# connection management
|
||||
'_total_request_size', '_request_timeout_handler',
|
||||
'_response_timeout_handler', '_keep_alive_timeout_handler',
|
||||
'_last_request_time', '_last_response_time', '_is_stream_handler')
|
||||
"""
|
||||
This class provides a basic HTTP implementation of the sanic framework.
|
||||
"""
|
||||
|
||||
def __init__(self, *, loop, request_handler, error_handler,
|
||||
signal=Signal(), connections=set(), request_timeout=60,
|
||||
response_timeout=60, keep_alive_timeout=5,
|
||||
request_max_size=None, request_class=None, access_log=True,
|
||||
keep_alive=True, is_request_stream=False, router=None,
|
||||
state=None, debug=False, **kwargs):
|
||||
__slots__ = (
|
||||
# app
|
||||
"app",
|
||||
# event loop, connection
|
||||
"loop",
|
||||
"transport",
|
||||
"connections",
|
||||
"signal",
|
||||
# request params
|
||||
"parser",
|
||||
"request",
|
||||
"url",
|
||||
"headers",
|
||||
# request config
|
||||
"request_handler",
|
||||
"request_timeout",
|
||||
"response_timeout",
|
||||
"keep_alive_timeout",
|
||||
"request_max_size",
|
||||
"request_buffer_queue_size",
|
||||
"request_class",
|
||||
"is_request_stream",
|
||||
"router",
|
||||
"error_handler",
|
||||
# enable or disable access log purpose
|
||||
"access_log",
|
||||
# connection management
|
||||
"_total_request_size",
|
||||
"_request_timeout_handler",
|
||||
"_response_timeout_handler",
|
||||
"_keep_alive_timeout_handler",
|
||||
"_last_request_time",
|
||||
"_last_response_time",
|
||||
"_is_stream_handler",
|
||||
"_not_paused",
|
||||
"_request_handler_task",
|
||||
"_request_stream_task",
|
||||
"_keep_alive",
|
||||
"_header_fragment",
|
||||
"state",
|
||||
"_debug",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
loop,
|
||||
app,
|
||||
request_handler,
|
||||
error_handler,
|
||||
signal=Signal(),
|
||||
connections=None,
|
||||
request_timeout=60,
|
||||
response_timeout=60,
|
||||
keep_alive_timeout=5,
|
||||
request_max_size=None,
|
||||
request_buffer_queue_size=100,
|
||||
request_class=None,
|
||||
access_log=True,
|
||||
keep_alive=True,
|
||||
is_request_stream=False,
|
||||
router=None,
|
||||
state=None,
|
||||
debug=False,
|
||||
**kwargs
|
||||
):
|
||||
self.loop = loop
|
||||
self.app = app
|
||||
self.transport = None
|
||||
self.request = None
|
||||
self.parser = None
|
||||
@@ -72,16 +121,18 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.router = router
|
||||
self.signal = signal
|
||||
self.access_log = access_log
|
||||
self.connections = connections
|
||||
self.connections = connections if connections is not None else set()
|
||||
self.request_handler = request_handler
|
||||
self.error_handler = error_handler
|
||||
self.request_timeout = request_timeout
|
||||
self.request_buffer_queue_size = request_buffer_queue_size
|
||||
self.response_timeout = response_timeout
|
||||
self.keep_alive_timeout = keep_alive_timeout
|
||||
self.request_max_size = request_max_size
|
||||
self.request_class = request_class or Request
|
||||
self.is_request_stream = is_request_stream
|
||||
self._is_stream_handler = False
|
||||
self._not_paused = asyncio.Event(loop=loop)
|
||||
self._total_request_size = 0
|
||||
self._request_timeout_handler = None
|
||||
self._response_timeout_handler = None
|
||||
@@ -91,18 +142,27 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self._request_handler_task = None
|
||||
self._request_stream_task = None
|
||||
self._keep_alive = keep_alive
|
||||
self._header_fragment = b''
|
||||
self._header_fragment = b""
|
||||
self.state = state if state else {}
|
||||
if 'requests_count' not in self.state:
|
||||
self.state['requests_count'] = 0
|
||||
if "requests_count" not in self.state:
|
||||
self.state["requests_count"] = 0
|
||||
self._debug = debug
|
||||
self._not_paused.set()
|
||||
|
||||
@property
|
||||
def keep_alive(self):
|
||||
"""
|
||||
Check if the connection needs to be kept alive based on the params
|
||||
attached to the `_keep_alive` attribute, :attr:`Signal.stopped`
|
||||
and :func:`HttpProtocol.parser.should_keep_alive`
|
||||
|
||||
:return: ``True`` if connection is to be kept alive ``False`` else
|
||||
"""
|
||||
return (
|
||||
self._keep_alive and
|
||||
not self.signal.stopped and
|
||||
self.parser.should_keep_alive())
|
||||
self._keep_alive
|
||||
and not self.signal.stopped
|
||||
and self.parser.should_keep_alive()
|
||||
)
|
||||
|
||||
# -------------------------------------------- #
|
||||
# Connection
|
||||
@@ -111,12 +171,17 @@ class HttpProtocol(asyncio.Protocol):
|
||||
def connection_made(self, transport):
|
||||
self.connections.add(self)
|
||||
self._request_timeout_handler = self.loop.call_later(
|
||||
self.request_timeout, self.request_timeout_callback)
|
||||
self.request_timeout, self.request_timeout_callback
|
||||
)
|
||||
self.transport = transport
|
||||
self._last_request_time = current_time
|
||||
self._last_request_time = time()
|
||||
|
||||
def connection_lost(self, exc):
|
||||
self.connections.discard(self)
|
||||
if self._request_handler_task:
|
||||
self._request_handler_task.cancel()
|
||||
if self._request_stream_task:
|
||||
self._request_stream_task.cancel()
|
||||
if self._request_timeout_handler:
|
||||
self._request_timeout_handler.cancel()
|
||||
if self._response_timeout_handler:
|
||||
@@ -124,60 +189,62 @@ class HttpProtocol(asyncio.Protocol):
|
||||
if self._keep_alive_timeout_handler:
|
||||
self._keep_alive_timeout_handler.cancel()
|
||||
|
||||
def pause_writing(self):
|
||||
self._not_paused.clear()
|
||||
|
||||
def resume_writing(self):
|
||||
self._not_paused.set()
|
||||
|
||||
def request_timeout_callback(self):
|
||||
# See the docstring in the RequestTimeout exception, to see
|
||||
# exactly what this timeout is checking for.
|
||||
# Check if elapsed time since request initiated exceeds our
|
||||
# configured maximum request timeout value
|
||||
time_elapsed = current_time - self._last_request_time
|
||||
time_elapsed = time() - self._last_request_time
|
||||
if time_elapsed < self.request_timeout:
|
||||
time_left = self.request_timeout - time_elapsed
|
||||
self._request_timeout_handler = (
|
||||
self.loop.call_later(time_left,
|
||||
self.request_timeout_callback)
|
||||
self._request_timeout_handler = self.loop.call_later(
|
||||
time_left, self.request_timeout_callback
|
||||
)
|
||||
else:
|
||||
if self._request_stream_task:
|
||||
self._request_stream_task.cancel()
|
||||
if self._request_handler_task:
|
||||
self._request_handler_task.cancel()
|
||||
try:
|
||||
raise RequestTimeout('Request Timeout')
|
||||
except RequestTimeout as exception:
|
||||
self.write_error(exception)
|
||||
self.write_error(RequestTimeout("Request Timeout"))
|
||||
|
||||
def response_timeout_callback(self):
|
||||
# Check if elapsed time since response was initiated exceeds our
|
||||
# configured maximum request timeout value
|
||||
time_elapsed = current_time - self._last_request_time
|
||||
time_elapsed = time() - self._last_request_time
|
||||
if time_elapsed < self.response_timeout:
|
||||
time_left = self.response_timeout - time_elapsed
|
||||
self._response_timeout_handler = (
|
||||
self.loop.call_later(time_left,
|
||||
self.response_timeout_callback)
|
||||
self._response_timeout_handler = self.loop.call_later(
|
||||
time_left, self.response_timeout_callback
|
||||
)
|
||||
else:
|
||||
if self._request_stream_task:
|
||||
self._request_stream_task.cancel()
|
||||
if self._request_handler_task:
|
||||
self._request_handler_task.cancel()
|
||||
try:
|
||||
raise ServiceUnavailable('Response Timeout')
|
||||
except ServiceUnavailable as exception:
|
||||
self.write_error(exception)
|
||||
self.write_error(ServiceUnavailable("Response Timeout"))
|
||||
|
||||
def keep_alive_timeout_callback(self):
|
||||
# Check if elapsed time since last response exceeds our configured
|
||||
# maximum keep alive timeout value
|
||||
time_elapsed = current_time - self._last_response_time
|
||||
"""
|
||||
Check if elapsed time since last response exceeds our configured
|
||||
maximum keep alive timeout value and if so, close the transport
|
||||
pipe and let the response writer handle the error.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
time_elapsed = time() - self._last_response_time
|
||||
if time_elapsed < self.keep_alive_timeout:
|
||||
time_left = self.keep_alive_timeout - time_elapsed
|
||||
self._keep_alive_timeout_handler = (
|
||||
self.loop.call_later(time_left,
|
||||
self.keep_alive_timeout_callback)
|
||||
self._keep_alive_timeout_handler = self.loop.call_later(
|
||||
time_left, self.keep_alive_timeout_callback
|
||||
)
|
||||
else:
|
||||
logger.debug('KeepAlive Timeout. Closing connection.')
|
||||
logger.debug("KeepAlive Timeout. Closing connection.")
|
||||
self.transport.close()
|
||||
self.transport = None
|
||||
|
||||
@@ -190,8 +257,7 @@ class HttpProtocol(asyncio.Protocol):
|
||||
# memory limits
|
||||
self._total_request_size += len(data)
|
||||
if self._total_request_size > self.request_max_size:
|
||||
exception = PayloadTooLarge('Payload Too Large')
|
||||
self.write_error(exception)
|
||||
self.write_error(PayloadTooLarge("Payload Too Large"))
|
||||
|
||||
# Create parser if this is the first time we're receiving data
|
||||
if self.parser is None:
|
||||
@@ -200,17 +266,16 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.parser = HttpRequestParser(self)
|
||||
|
||||
# requests count
|
||||
self.state['requests_count'] = self.state['requests_count'] + 1
|
||||
self.state["requests_count"] = self.state["requests_count"] + 1
|
||||
|
||||
# Parse request chunk or close connection
|
||||
try:
|
||||
self.parser.feed_data(data)
|
||||
except HttpParserError:
|
||||
message = 'Bad Request'
|
||||
message = "Bad Request"
|
||||
if self._debug:
|
||||
message += '\n' + traceback.format_exc()
|
||||
exception = InvalidUsage(message)
|
||||
self.write_error(exception)
|
||||
message += "\n" + traceback.format_exc()
|
||||
self.write_error(InvalidUsage(message))
|
||||
|
||||
def on_url(self, url):
|
||||
if not self.url:
|
||||
@@ -222,45 +287,79 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self._header_fragment += name
|
||||
|
||||
if value is not None:
|
||||
if self._header_fragment == b'Content-Length' \
|
||||
and int(value) > self.request_max_size:
|
||||
exception = PayloadTooLarge('Payload Too Large')
|
||||
self.write_error(exception)
|
||||
if (
|
||||
self._header_fragment == b"Content-Length"
|
||||
and int(value) > self.request_max_size
|
||||
):
|
||||
self.write_error(PayloadTooLarge("Payload Too Large"))
|
||||
try:
|
||||
value = value.decode()
|
||||
except UnicodeDecodeError:
|
||||
value = value.decode('latin_1')
|
||||
value = value.decode("latin_1")
|
||||
self.headers.append(
|
||||
(self._header_fragment.decode().casefold(), value))
|
||||
(self._header_fragment.decode().casefold(), value)
|
||||
)
|
||||
|
||||
self._header_fragment = b''
|
||||
self._header_fragment = b""
|
||||
|
||||
def on_headers_complete(self):
|
||||
self.request = self.request_class(
|
||||
url_bytes=self.url,
|
||||
headers=CIMultiDict(self.headers),
|
||||
headers=Header(self.headers),
|
||||
version=self.parser.get_http_version(),
|
||||
method=self.parser.get_method().decode(),
|
||||
transport=self.transport
|
||||
transport=self.transport,
|
||||
app=self.app,
|
||||
)
|
||||
# Remove any existing KeepAlive handler here,
|
||||
# It will be recreated if required on the new request.
|
||||
if self._keep_alive_timeout_handler:
|
||||
self._keep_alive_timeout_handler.cancel()
|
||||
self._keep_alive_timeout_handler = None
|
||||
|
||||
if self.request.headers.get(EXPECT_HEADER):
|
||||
self.expect_handler()
|
||||
|
||||
if self.is_request_stream:
|
||||
self._is_stream_handler = self.router.is_stream_handler(
|
||||
self.request)
|
||||
self.request
|
||||
)
|
||||
if self._is_stream_handler:
|
||||
self.request.stream = asyncio.Queue()
|
||||
self.request.stream = StreamBuffer(
|
||||
self.request_buffer_queue_size
|
||||
)
|
||||
self.execute_request_handler()
|
||||
|
||||
def expect_handler(self):
|
||||
"""
|
||||
Handler for Expect Header.
|
||||
"""
|
||||
expect = self.request.headers.get(EXPECT_HEADER)
|
||||
if self.request.version == "1.1":
|
||||
if expect.lower() == "100-continue":
|
||||
self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
else:
|
||||
self.write_error(
|
||||
HeaderExpectationFailed(
|
||||
"Unknown Expect: {expect}".format(expect=expect)
|
||||
)
|
||||
)
|
||||
|
||||
def on_body(self, body):
|
||||
if self.is_request_stream and self._is_stream_handler:
|
||||
self._request_stream_task = self.loop.create_task(
|
||||
self.request.stream.put(body))
|
||||
return
|
||||
self.request.body.append(body)
|
||||
self.body_append(body)
|
||||
)
|
||||
else:
|
||||
self.request.body_push(body)
|
||||
|
||||
async def body_append(self, body):
|
||||
if self.request.stream.is_full():
|
||||
self.transport.pause_reading()
|
||||
await self.request.stream.put(body)
|
||||
self.transport.resume_reading()
|
||||
else:
|
||||
await self.request.stream.put(body)
|
||||
|
||||
def on_message_complete(self):
|
||||
# Entire request (headers and whole body) is received.
|
||||
@@ -270,47 +369,66 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self._request_timeout_handler = None
|
||||
if self.is_request_stream and self._is_stream_handler:
|
||||
self._request_stream_task = self.loop.create_task(
|
||||
self.request.stream.put(None))
|
||||
self.request.stream.put(None)
|
||||
)
|
||||
return
|
||||
self.request.body = b''.join(self.request.body)
|
||||
self.request.body_finish()
|
||||
self.execute_request_handler()
|
||||
|
||||
def execute_request_handler(self):
|
||||
"""
|
||||
Invoke the request handler defined by the
|
||||
:func:`sanic.app.Sanic.handle_request` method
|
||||
|
||||
:return: None
|
||||
"""
|
||||
self._response_timeout_handler = self.loop.call_later(
|
||||
self.response_timeout, self.response_timeout_callback)
|
||||
self._last_request_time = current_time
|
||||
self.response_timeout, self.response_timeout_callback
|
||||
)
|
||||
self._last_request_time = time()
|
||||
self._request_handler_task = self.loop.create_task(
|
||||
self.request_handler(
|
||||
self.request,
|
||||
self.write_response,
|
||||
self.stream_response))
|
||||
self.request, self.write_response, self.stream_response
|
||||
)
|
||||
)
|
||||
|
||||
# -------------------------------------------- #
|
||||
# Responding
|
||||
# -------------------------------------------- #
|
||||
def log_response(self, response):
|
||||
"""
|
||||
Helper method provided to enable the logging of responses in case if
|
||||
the :attr:`HttpProtocol.access_log` is enabled.
|
||||
|
||||
:param response: Response generated for the current request
|
||||
|
||||
:type response: :class:`sanic.response.HTTPResponse` or
|
||||
:class:`sanic.response.StreamingHTTPResponse`
|
||||
|
||||
:return: None
|
||||
"""
|
||||
if self.access_log:
|
||||
extra = {
|
||||
'status': getattr(response, 'status', 0),
|
||||
}
|
||||
extra = {"status": getattr(response, "status", 0)}
|
||||
|
||||
if isinstance(response, HTTPResponse):
|
||||
extra['byte'] = len(response.body)
|
||||
extra["byte"] = len(response.body)
|
||||
else:
|
||||
extra['byte'] = -1
|
||||
extra["byte"] = -1
|
||||
|
||||
extra['host'] = 'UNKNOWN'
|
||||
extra["host"] = "UNKNOWN"
|
||||
if self.request is not None:
|
||||
if self.request.ip:
|
||||
extra['host'] = '{0}:{1}'.format(self.request.ip,
|
||||
self.request.port)
|
||||
extra["host"] = "{0}:{1}".format(
|
||||
self.request.ip, self.request.port
|
||||
)
|
||||
|
||||
extra['request'] = '{0} {1}'.format(self.request.method,
|
||||
self.request.url)
|
||||
extra["request"] = "{0} {1}".format(
|
||||
self.request.method, self.request.url
|
||||
)
|
||||
else:
|
||||
extra['request'] = 'nil'
|
||||
extra["request"] = "nil"
|
||||
|
||||
access_logger.info('', extra=extra)
|
||||
access_logger.info("", extra=extra)
|
||||
|
||||
def write_response(self, response):
|
||||
"""
|
||||
@@ -323,34 +441,46 @@ class HttpProtocol(asyncio.Protocol):
|
||||
keep_alive = self.keep_alive
|
||||
self.transport.write(
|
||||
response.output(
|
||||
self.request.version, keep_alive,
|
||||
self.keep_alive_timeout))
|
||||
self.request.version, keep_alive, self.keep_alive_timeout
|
||||
)
|
||||
)
|
||||
self.log_response(response)
|
||||
except AttributeError:
|
||||
logger.error('Invalid response object for url %s, '
|
||||
'Expected Type: HTTPResponse, Actual Type: %s',
|
||||
self.url, type(response))
|
||||
self.write_error(ServerError('Invalid response type'))
|
||||
logger.error(
|
||||
"Invalid response object for url %s, "
|
||||
"Expected Type: HTTPResponse, Actual Type: %s",
|
||||
self.url,
|
||||
type(response),
|
||||
)
|
||||
self.write_error(ServerError("Invalid response type"))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
logger.error('Connection lost before response written @ %s',
|
||||
self.request.ip)
|
||||
logger.error(
|
||||
"Connection lost before response written @ %s",
|
||||
self.request.ip,
|
||||
)
|
||||
keep_alive = False
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing response failed, connection closed {}".format(
|
||||
repr(e)))
|
||||
"Writing response failed, connection closed {}".format(repr(e))
|
||||
)
|
||||
finally:
|
||||
if not keep_alive:
|
||||
self.transport.close()
|
||||
self.transport = None
|
||||
else:
|
||||
self._keep_alive_timeout_handler = self.loop.call_later(
|
||||
self.keep_alive_timeout,
|
||||
self.keep_alive_timeout_callback)
|
||||
self._last_response_time = current_time
|
||||
self.keep_alive_timeout, self.keep_alive_timeout_callback
|
||||
)
|
||||
self._last_response_time = time()
|
||||
self.cleanup()
|
||||
|
||||
async def drain(self):
|
||||
await self._not_paused.wait()
|
||||
|
||||
async def push_data(self, data):
|
||||
self.transport.write(data)
|
||||
|
||||
async def stream_response(self, response):
|
||||
"""
|
||||
Streams a response to the client asynchronously. Attaches
|
||||
@@ -360,35 +490,42 @@ class HttpProtocol(asyncio.Protocol):
|
||||
if self._response_timeout_handler:
|
||||
self._response_timeout_handler.cancel()
|
||||
self._response_timeout_handler = None
|
||||
|
||||
try:
|
||||
keep_alive = self.keep_alive
|
||||
response.transport = self.transport
|
||||
response.protocol = self
|
||||
await response.stream(
|
||||
self.request.version, keep_alive, self.keep_alive_timeout)
|
||||
self.request.version, keep_alive, self.keep_alive_timeout
|
||||
)
|
||||
self.log_response(response)
|
||||
except AttributeError:
|
||||
logger.error('Invalid response object for url %s, '
|
||||
'Expected Type: HTTPResponse, Actual Type: %s',
|
||||
self.url, type(response))
|
||||
self.write_error(ServerError('Invalid response type'))
|
||||
logger.error(
|
||||
"Invalid response object for url %s, "
|
||||
"Expected Type: HTTPResponse, Actual Type: %s",
|
||||
self.url,
|
||||
type(response),
|
||||
)
|
||||
self.write_error(ServerError("Invalid response type"))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
logger.error('Connection lost before response written @ %s',
|
||||
self.request.ip)
|
||||
logger.error(
|
||||
"Connection lost before response written @ %s",
|
||||
self.request.ip,
|
||||
)
|
||||
keep_alive = False
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing response failed, connection closed {}".format(
|
||||
repr(e)))
|
||||
"Writing response failed, connection closed {}".format(repr(e))
|
||||
)
|
||||
finally:
|
||||
if not keep_alive:
|
||||
self.transport.close()
|
||||
self.transport = None
|
||||
else:
|
||||
self._keep_alive_timeout_handler = self.loop.call_later(
|
||||
self.keep_alive_timeout,
|
||||
self.keep_alive_timeout_callback)
|
||||
self._last_response_time = current_time
|
||||
self.keep_alive_timeout, self.keep_alive_timeout_callback
|
||||
)
|
||||
self._last_response_time = time()
|
||||
self.cleanup()
|
||||
|
||||
def write_error(self, exception):
|
||||
@@ -400,35 +537,57 @@ class HttpProtocol(asyncio.Protocol):
|
||||
response = None
|
||||
try:
|
||||
response = self.error_handler.response(self.request, exception)
|
||||
version = self.request.version if self.request else '1.1'
|
||||
version = self.request.version if self.request else "1.1"
|
||||
self.transport.write(response.output(version))
|
||||
except RuntimeError:
|
||||
if self._debug:
|
||||
logger.error('Connection lost before error written @ %s',
|
||||
self.request.ip if self.request else 'Unknown')
|
||||
logger.error(
|
||||
"Connection lost before error written @ %s",
|
||||
self.request.ip if self.request else "Unknown",
|
||||
)
|
||||
except Exception as e:
|
||||
self.bail_out(
|
||||
"Writing error failed, connection closed {}".format(
|
||||
repr(e)), from_error=True
|
||||
"Writing error failed, connection closed {}".format(repr(e)),
|
||||
from_error=True,
|
||||
)
|
||||
finally:
|
||||
if self.parser and (self.keep_alive
|
||||
or getattr(response, 'status', 0) == 408):
|
||||
if self.parser and (
|
||||
self.keep_alive or getattr(response, "status", 0) == 408
|
||||
):
|
||||
self.log_response(response)
|
||||
try:
|
||||
self.transport.close()
|
||||
except AttributeError as e:
|
||||
logger.debug('Connection lost before server could close it.')
|
||||
except AttributeError:
|
||||
logger.debug("Connection lost before server could close it.")
|
||||
|
||||
def bail_out(self, message, from_error=False):
|
||||
if from_error or self.transport.is_closing():
|
||||
logger.error("Transport closed @ %s and exception "
|
||||
"experienced during error handling",
|
||||
self.transport.get_extra_info('peername'))
|
||||
logger.debug('Exception:\n%s', traceback.format_exc())
|
||||
"""
|
||||
In case if the transport pipes are closed and the sanic app encounters
|
||||
an error while writing data to the transport pipe, we log the error
|
||||
with proper details.
|
||||
|
||||
:param message: Error message to display
|
||||
:param from_error: If the bail out was invoked while handling an
|
||||
exception scenario.
|
||||
|
||||
:type message: str
|
||||
:type from_error: bool
|
||||
|
||||
:return: None
|
||||
"""
|
||||
if from_error or self.transport is None or self.transport.is_closing():
|
||||
logger.error(
|
||||
"Transport closed @ %s and exception "
|
||||
"experienced during error handling",
|
||||
(
|
||||
self.transport.get_extra_info("peername")
|
||||
if self.transport is not None
|
||||
else "N/A"
|
||||
),
|
||||
)
|
||||
logger.debug("Exception:", exc_info=True)
|
||||
else:
|
||||
exception = ServerError(message)
|
||||
self.write_error(exception)
|
||||
self.write_error(ServerError(message))
|
||||
logger.error(message)
|
||||
|
||||
def cleanup(self):
|
||||
@@ -463,18 +622,6 @@ class HttpProtocol(asyncio.Protocol):
|
||||
self.transport = None
|
||||
|
||||
|
||||
def update_current_time(loop):
|
||||
"""Cache the current time, since it is needed at the end of every
|
||||
keep-alive request to update the request timeout time
|
||||
|
||||
:param loop:
|
||||
:return:
|
||||
"""
|
||||
global current_time
|
||||
current_time = time()
|
||||
loop.call_later(1, partial(update_current_time, loop))
|
||||
|
||||
|
||||
def trigger_events(events, loop):
|
||||
"""Trigger event callbacks (functions or async)
|
||||
|
||||
@@ -487,17 +634,118 @@ def trigger_events(events, loop):
|
||||
loop.run_until_complete(result)
|
||||
|
||||
|
||||
def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
after_start=None, before_stop=None, after_stop=None, debug=False,
|
||||
request_timeout=60, response_timeout=60, keep_alive_timeout=5,
|
||||
ssl=None, sock=None, request_max_size=None, reuse_port=False,
|
||||
loop=None, protocol=HttpProtocol, backlog=100,
|
||||
register_sys_signals=True, run_multiple=False, run_async=False,
|
||||
connections=None, signal=Signal(), request_class=None,
|
||||
access_log=True, keep_alive=True, is_request_stream=False,
|
||||
router=None, websocket_max_size=None, websocket_max_queue=None,
|
||||
websocket_read_limit=2 ** 16, websocket_write_limit=2 ** 16,
|
||||
state=None, graceful_shutdown_timeout=15.0):
|
||||
class AsyncioServer:
|
||||
"""
|
||||
Wraps an asyncio server with functionality that might be useful to
|
||||
a user who needs to manage the server lifecycle manually.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"loop",
|
||||
"serve_coro",
|
||||
"_after_start",
|
||||
"_before_stop",
|
||||
"_after_stop",
|
||||
"server",
|
||||
"connections",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
loop,
|
||||
serve_coro,
|
||||
connections,
|
||||
after_start,
|
||||
before_stop,
|
||||
after_stop,
|
||||
):
|
||||
# Note, Sanic already called "before_server_start" events
|
||||
# before this helper was even created. So we don't need it here.
|
||||
self.loop = loop
|
||||
self.serve_coro = serve_coro
|
||||
self._after_start = after_start
|
||||
self._before_stop = before_stop
|
||||
self._after_stop = after_stop
|
||||
self.server = None
|
||||
self.connections = connections
|
||||
|
||||
def after_start(self):
|
||||
"""Trigger "after_server_start" events"""
|
||||
trigger_events(self._after_start, self.loop)
|
||||
|
||||
def before_stop(self):
|
||||
"""Trigger "before_server_stop" events"""
|
||||
trigger_events(self._before_stop, self.loop)
|
||||
|
||||
def after_stop(self):
|
||||
"""Trigger "after_server_stop" events"""
|
||||
trigger_events(self._after_stop, self.loop)
|
||||
|
||||
def is_serving(self):
|
||||
if self.server:
|
||||
return self.server.is_serving()
|
||||
return False
|
||||
|
||||
def wait_closed(self):
|
||||
if self.server:
|
||||
return self.server.wait_closed()
|
||||
|
||||
def close(self):
|
||||
if self.server:
|
||||
self.server.close()
|
||||
coro = self.wait_closed()
|
||||
task = asyncio.ensure_future(coro, loop=self.loop)
|
||||
return task
|
||||
|
||||
def __await__(self):
|
||||
"""Starts the asyncio server, returns AsyncServerCoro"""
|
||||
task = asyncio.ensure_future(self.serve_coro)
|
||||
while not task.done():
|
||||
yield
|
||||
self.server = task.result()
|
||||
return self
|
||||
|
||||
|
||||
def serve(
|
||||
host,
|
||||
port,
|
||||
app,
|
||||
request_handler,
|
||||
error_handler,
|
||||
before_start=None,
|
||||
after_start=None,
|
||||
before_stop=None,
|
||||
after_stop=None,
|
||||
debug=False,
|
||||
request_timeout=60,
|
||||
response_timeout=60,
|
||||
keep_alive_timeout=5,
|
||||
ssl=None,
|
||||
sock=None,
|
||||
request_max_size=None,
|
||||
request_buffer_queue_size=100,
|
||||
reuse_port=False,
|
||||
loop=None,
|
||||
protocol=HttpProtocol,
|
||||
backlog=100,
|
||||
register_sys_signals=True,
|
||||
run_multiple=False,
|
||||
run_async=False,
|
||||
connections=None,
|
||||
signal=Signal(),
|
||||
request_class=None,
|
||||
access_log=True,
|
||||
keep_alive=True,
|
||||
is_request_stream=False,
|
||||
router=None,
|
||||
websocket_max_size=None,
|
||||
websocket_max_queue=None,
|
||||
websocket_read_limit=2 ** 16,
|
||||
websocket_write_limit=2 ** 16,
|
||||
state=None,
|
||||
graceful_shutdown_timeout=15.0,
|
||||
asyncio_server_kwargs=None,
|
||||
):
|
||||
"""Start asynchronous HTTP Server on an individual process.
|
||||
|
||||
:param host: Address to host on
|
||||
@@ -524,6 +772,8 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
:param reuse_port: `True` for multiple workers
|
||||
:param loop: asyncio compatible event loop
|
||||
:param protocol: subclass of asyncio protocol class
|
||||
:param run_async: bool: Do not create a new event loop for the server,
|
||||
and return an AsyncServer object rather than running it
|
||||
:param request_class: Request class to use
|
||||
:param access_log: disable/enable access log
|
||||
:param websocket_max_size: enforces the maximum size for
|
||||
@@ -537,7 +787,12 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
outgoing bytes, the low-water limit is a
|
||||
quarter of the high-water limit.
|
||||
:param is_request_stream: disable/enable Request.stream
|
||||
:param request_buffer_queue_size: streaming request buffer queue size
|
||||
:param router: Router object
|
||||
:param graceful_shutdown_timeout: How long take to Force close non-idle
|
||||
connection
|
||||
:param asyncio_server_kwargs: key-value args for asyncio/uvloop
|
||||
create_server method
|
||||
:return: Nothing
|
||||
"""
|
||||
if not run_async:
|
||||
@@ -548,12 +803,15 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
if debug:
|
||||
loop.set_debug(debug)
|
||||
|
||||
app.asgi = False
|
||||
|
||||
connections = connections if connections is not None else set()
|
||||
server = partial(
|
||||
protocol,
|
||||
loop=loop,
|
||||
connections=connections,
|
||||
signal=signal,
|
||||
app=app,
|
||||
request_handler=request_handler,
|
||||
error_handler=error_handler,
|
||||
request_timeout=request_timeout,
|
||||
@@ -572,7 +830,9 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
state=state,
|
||||
debug=debug,
|
||||
)
|
||||
|
||||
asyncio_server_kwargs = (
|
||||
asyncio_server_kwargs if asyncio_server_kwargs else {}
|
||||
)
|
||||
server_coroutine = loop.create_server(
|
||||
server,
|
||||
host,
|
||||
@@ -580,15 +840,19 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
ssl=ssl,
|
||||
reuse_port=reuse_port,
|
||||
sock=sock,
|
||||
backlog=backlog
|
||||
backlog=backlog,
|
||||
**asyncio_server_kwargs
|
||||
)
|
||||
|
||||
# Instead of pulling time at the end of every request,
|
||||
# pull it once per minute
|
||||
loop.call_soon(partial(update_current_time, loop))
|
||||
|
||||
if run_async:
|
||||
return server_coroutine
|
||||
return AsyncioServer(
|
||||
loop,
|
||||
server_coroutine,
|
||||
connections,
|
||||
after_start,
|
||||
before_stop,
|
||||
after_stop,
|
||||
)
|
||||
|
||||
trigger_events(before_start, loop)
|
||||
|
||||
@@ -611,11 +875,13 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
try:
|
||||
loop.add_signal_handler(_signal, loop.stop)
|
||||
except NotImplementedError:
|
||||
logger.warning('Sanic tried to use loop.add_signal_handler '
|
||||
'but it is not implemented on this platform.')
|
||||
logger.warning(
|
||||
"Sanic tried to use loop.add_signal_handler "
|
||||
"but it is not implemented on this platform."
|
||||
)
|
||||
pid = os.getpid()
|
||||
try:
|
||||
logger.info('Starting worker [%s]', pid)
|
||||
logger.info("Starting worker [%s]", pid)
|
||||
loop.run_forever()
|
||||
finally:
|
||||
logger.info("Stopping worker [%s]", pid)
|
||||
@@ -646,9 +912,7 @@ def serve(host, port, request_handler, error_handler, before_start=None,
|
||||
coros = []
|
||||
for conn in connections:
|
||||
if hasattr(conn, "websocket") and conn.websocket:
|
||||
coros.append(
|
||||
conn.websocket.close_connection()
|
||||
)
|
||||
coros.append(conn.websocket.close_connection())
|
||||
else:
|
||||
conn.close()
|
||||
|
||||
@@ -669,18 +933,20 @@ def serve_multiple(server_settings, workers):
|
||||
:param stop_event: if provided, is used as a stop signal
|
||||
:return:
|
||||
"""
|
||||
server_settings['reuse_port'] = True
|
||||
server_settings['run_multiple'] = True
|
||||
server_settings["reuse_port"] = True
|
||||
server_settings["run_multiple"] = True
|
||||
|
||||
# Handling when custom socket is not provided.
|
||||
if server_settings.get('sock') is None:
|
||||
if server_settings.get("sock") is None:
|
||||
sock = socket()
|
||||
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
|
||||
sock.bind((server_settings['host'], server_settings['port']))
|
||||
sock.bind((server_settings["host"], server_settings["port"]))
|
||||
sock.set_inheritable(True)
|
||||
server_settings['sock'] = sock
|
||||
server_settings['host'] = None
|
||||
server_settings['port'] = None
|
||||
server_settings["sock"] = sock
|
||||
server_settings["host"] = None
|
||||
server_settings["port"] = None
|
||||
|
||||
processes = []
|
||||
|
||||
def sig_handler(signal, frame):
|
||||
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
|
||||
@@ -690,8 +956,6 @@ def serve_multiple(server_settings, workers):
|
||||
signal_func(SIGINT, lambda s, f: sig_handler(s, f))
|
||||
signal_func(SIGTERM, lambda s, f: sig_handler(s, f))
|
||||
|
||||
processes = []
|
||||
|
||||
for _ in range(workers):
|
||||
process = Process(target=serve, kwargs=server_settings)
|
||||
process.daemon = True
|
||||
@@ -704,4 +968,4 @@ def serve_multiple(server_settings, workers):
|
||||
# the above processes will block this until they're stopped
|
||||
for process in processes:
|
||||
process.terminate()
|
||||
server_settings.get('sock').close()
|
||||
server_settings.get("sock").close()
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from mimetypes import guess_type
|
||||
from os import path
|
||||
from re import sub
|
||||
from time import strftime, gmtime
|
||||
from time import gmtime, strftime
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiofiles.os import stat
|
||||
from aiofiles.os import stat # type: ignore
|
||||
|
||||
from sanic.exceptions import (
|
||||
ContentRangeError,
|
||||
@@ -13,13 +13,22 @@ from sanic.exceptions import (
|
||||
InvalidUsage,
|
||||
)
|
||||
from sanic.handlers import ContentRangeHandler
|
||||
from sanic.response import file, file_stream, HTTPResponse
|
||||
from sanic.response import HTTPResponse, file, file_stream
|
||||
|
||||
|
||||
def register(app, uri, file_or_directory, pattern,
|
||||
use_modified_since, use_content_range,
|
||||
stream_large_files, name='static', host=None,
|
||||
strict_slashes=None, content_type=None):
|
||||
def register(
|
||||
app,
|
||||
uri,
|
||||
file_or_directory,
|
||||
pattern,
|
||||
use_modified_since,
|
||||
use_content_range,
|
||||
stream_large_files,
|
||||
name="static",
|
||||
host=None,
|
||||
strict_slashes=None,
|
||||
content_type=None,
|
||||
):
|
||||
# TODO: Though sanic is not a file server, I feel like we should at least
|
||||
# make a good effort here. Modified-since is nice, but we could
|
||||
# also look into etags, expires, and caching
|
||||
@@ -46,12 +55,12 @@ def register(app, uri, file_or_directory, pattern,
|
||||
# If we're not trying to match a file directly,
|
||||
# serve from the folder
|
||||
if not path.isfile(file_or_directory):
|
||||
uri += '<file_uri:' + pattern + '>'
|
||||
uri += "<file_uri:" + pattern + ">"
|
||||
|
||||
async def _handler(request, file_uri=None):
|
||||
# Using this to determine if the URL is trying to break out of the path
|
||||
# served. os.path.realpath seems to be very slow
|
||||
if file_uri and '../' in file_uri:
|
||||
if file_uri and "../" in file_uri:
|
||||
raise InvalidUsage("Invalid URL")
|
||||
# Merge served directory and requested file if provided
|
||||
# Strip all / that in the beginning of the URL to help prevent python
|
||||
@@ -59,15 +68,16 @@ def register(app, uri, file_or_directory, pattern,
|
||||
root_path = file_path = file_or_directory
|
||||
if file_uri:
|
||||
file_path = path.join(
|
||||
file_or_directory, sub('^[/]*', '', file_uri))
|
||||
file_or_directory, sub("^[/]*", "", file_uri)
|
||||
)
|
||||
|
||||
# URL decode the path sent by the browser otherwise we won't be able to
|
||||
# match filenames which got encoded (filenames with spaces etc)
|
||||
file_path = path.abspath(unquote(file_path))
|
||||
if not file_path.startswith(path.abspath(unquote(root_path))):
|
||||
raise FileNotFound('File not found',
|
||||
path=file_or_directory,
|
||||
relative_url=file_uri)
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
try:
|
||||
headers = {}
|
||||
# Check if the client has been sent this file before
|
||||
@@ -76,33 +86,35 @@ def register(app, uri, file_or_directory, pattern,
|
||||
if use_modified_since:
|
||||
stats = await stat(file_path)
|
||||
modified_since = strftime(
|
||||
'%a, %d %b %Y %H:%M:%S GMT', gmtime(stats.st_mtime))
|
||||
if request.headers.get('If-Modified-Since') == modified_since:
|
||||
"%a, %d %b %Y %H:%M:%S GMT", gmtime(stats.st_mtime)
|
||||
)
|
||||
if request.headers.get("If-Modified-Since") == modified_since:
|
||||
return HTTPResponse(status=304)
|
||||
headers['Last-Modified'] = modified_since
|
||||
headers["Last-Modified"] = modified_since
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
if not stats:
|
||||
stats = await stat(file_path)
|
||||
headers['Accept-Ranges'] = 'bytes'
|
||||
headers['Content-Length'] = str(stats.st_size)
|
||||
if request.method != 'HEAD':
|
||||
headers["Accept-Ranges"] = "bytes"
|
||||
headers["Content-Length"] = str(stats.st_size)
|
||||
if request.method != "HEAD":
|
||||
try:
|
||||
_range = ContentRangeHandler(request, stats)
|
||||
except HeaderNotFound:
|
||||
pass
|
||||
else:
|
||||
del headers['Content-Length']
|
||||
del headers["Content-Length"]
|
||||
for key, value in _range.headers.items():
|
||||
headers[key] = value
|
||||
headers['Content-Type'] = content_type \
|
||||
or guess_type(file_path)[0] or 'text/plain'
|
||||
if request.method == 'HEAD':
|
||||
headers["Content-Type"] = (
|
||||
content_type or guess_type(file_path)[0] or "text/plain"
|
||||
)
|
||||
if request.method == "HEAD":
|
||||
return HTTPResponse(headers=headers)
|
||||
else:
|
||||
if stream_large_files:
|
||||
if isinstance(stream_large_files, int):
|
||||
if type(stream_large_files) == int:
|
||||
threshold = stream_large_files
|
||||
else:
|
||||
threshold = 1024 * 1024
|
||||
@@ -110,19 +122,25 @@ def register(app, uri, file_or_directory, pattern,
|
||||
if not stats:
|
||||
stats = await stat(file_path)
|
||||
if stats.st_size >= threshold:
|
||||
return await file_stream(file_path, headers=headers,
|
||||
_range=_range)
|
||||
return await file_stream(
|
||||
file_path, headers=headers, _range=_range
|
||||
)
|
||||
return await file(file_path, headers=headers, _range=_range)
|
||||
except ContentRangeError:
|
||||
raise
|
||||
except Exception:
|
||||
raise FileNotFound('File not found',
|
||||
path=file_or_directory,
|
||||
relative_url=file_uri)
|
||||
raise FileNotFound(
|
||||
"File not found", path=file_or_directory, relative_url=file_uri
|
||||
)
|
||||
|
||||
# special prefix for static files
|
||||
if not name.startswith('_static_'):
|
||||
name = '_static_{}'.format(name)
|
||||
if not name.startswith("_static_"):
|
||||
name = "_static_{}".format(name)
|
||||
|
||||
app.route(uri, methods=['GET', 'HEAD'], name=name, host=host,
|
||||
strict_slashes=strict_slashes)(_handler)
|
||||
app.route(
|
||||
uri,
|
||||
methods=["GET", "HEAD"],
|
||||
name=name,
|
||||
host=host,
|
||||
strict_slashes=strict_slashes,
|
||||
)(_handler)
|
||||
|
||||
394
sanic/testing.py
394
sanic/testing.py
@@ -1,85 +1,133 @@
|
||||
import traceback
|
||||
import asyncio
|
||||
import types
|
||||
import typing
|
||||
|
||||
from json import JSONDecodeError
|
||||
from sanic.log import logger
|
||||
from socket import socket
|
||||
from urllib.parse import unquote, urlsplit
|
||||
|
||||
import httpcore # type: ignore
|
||||
import requests_async as requests # type: ignore
|
||||
import websockets # type: ignore
|
||||
|
||||
from sanic.asgi import ASGIApp
|
||||
from sanic.exceptions import MethodNotSupported
|
||||
from sanic.log import logger
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
HOST = '127.0.0.1'
|
||||
ASGI_HOST = "mockserver"
|
||||
HOST = "127.0.0.1"
|
||||
PORT = 42101
|
||||
|
||||
|
||||
class SanicTestClient:
|
||||
def __init__(self, app, port=PORT):
|
||||
"""Use port=None to bind to a random port"""
|
||||
self.app = app
|
||||
self.port = port
|
||||
|
||||
async def _local_request(self, method, uri, cookies=None, *args, **kwargs):
|
||||
import aiohttp
|
||||
if uri.startswith(('http:', 'https:', 'ftp:', 'ftps://' '//')):
|
||||
url = uri
|
||||
else:
|
||||
url = 'http://{host}:{port}{uri}'.format(
|
||||
host=HOST, port=self.port, uri=uri)
|
||||
def get_new_session(self):
|
||||
return requests.Session()
|
||||
|
||||
async def _local_request(self, method, url, *args, **kwargs):
|
||||
logger.info(url)
|
||||
conn = aiohttp.TCPConnector(verify_ssl=False)
|
||||
async with aiohttp.ClientSession(
|
||||
cookies=cookies, connector=conn) as session:
|
||||
async with getattr(
|
||||
session, method.lower())(url, *args, **kwargs) as response:
|
||||
try:
|
||||
response.text = await response.text()
|
||||
except UnicodeDecodeError as e:
|
||||
response.text = None
|
||||
raw_cookies = kwargs.pop("raw_cookies", None)
|
||||
|
||||
if method == "websocket":
|
||||
async with websockets.connect(url, *args, **kwargs) as websocket:
|
||||
websocket.opened = websocket.open
|
||||
return websocket
|
||||
else:
|
||||
async with self.get_new_session() as session:
|
||||
|
||||
try:
|
||||
response.json = await response.json()
|
||||
except (JSONDecodeError,
|
||||
UnicodeDecodeError,
|
||||
aiohttp.ClientResponseError):
|
||||
response = await getattr(session, method.lower())(
|
||||
url, verify=False, *args, **kwargs
|
||||
)
|
||||
except NameError:
|
||||
raise Exception(response.status_code)
|
||||
|
||||
try:
|
||||
response.json = response.json()
|
||||
except (JSONDecodeError, UnicodeDecodeError):
|
||||
response.json = None
|
||||
|
||||
response.body = await response.read()
|
||||
response.status = response.status_code
|
||||
response.content_type = response.headers.get("content-type")
|
||||
|
||||
if raw_cookies:
|
||||
response.raw_cookies = {}
|
||||
for cookie in response.cookies:
|
||||
response.raw_cookies[cookie.name] = cookie
|
||||
|
||||
return response
|
||||
|
||||
def _sanic_endpoint_test(
|
||||
self, method='get', uri='/', gather_request=True,
|
||||
debug=False, server_kwargs={"auto_reload": False},
|
||||
*request_args, **request_kwargs):
|
||||
self,
|
||||
method="get",
|
||||
uri="/",
|
||||
gather_request=True,
|
||||
debug=False,
|
||||
server_kwargs={"auto_reload": False},
|
||||
*request_args,
|
||||
**request_kwargs,
|
||||
):
|
||||
results = [None, None]
|
||||
exceptions = []
|
||||
|
||||
if gather_request:
|
||||
|
||||
def _collect_request(request):
|
||||
if results[0] is None:
|
||||
results[0] = request
|
||||
|
||||
self.app.request_middleware.appendleft(_collect_request)
|
||||
|
||||
@self.app.exception(MethodNotSupported)
|
||||
async def error_handler(request, exception):
|
||||
if request.method in ['HEAD', 'PATCH', 'PUT', 'DELETE']:
|
||||
if request.method in ["HEAD", "PATCH", "PUT", "DELETE"]:
|
||||
return text(
|
||||
'', exception.status_code, headers=exception.headers
|
||||
"", exception.status_code, headers=exception.headers
|
||||
)
|
||||
else:
|
||||
return self.app.error_handler.default(request, exception)
|
||||
|
||||
@self.app.listener('after_server_start')
|
||||
if self.port:
|
||||
server_kwargs = dict(host=HOST, port=self.port, **server_kwargs)
|
||||
host, port = HOST, self.port
|
||||
else:
|
||||
sock = socket()
|
||||
sock.bind((HOST, 0))
|
||||
server_kwargs = dict(sock=sock, **server_kwargs)
|
||||
host, port = sock.getsockname()
|
||||
|
||||
if uri.startswith(
|
||||
("http:", "https:", "ftp:", "ftps://", "//", "ws:", "wss:")
|
||||
):
|
||||
url = uri
|
||||
else:
|
||||
uri = uri if uri.startswith("/") else "/{uri}".format(uri=uri)
|
||||
scheme = "ws" if method == "websocket" else "http"
|
||||
url = "{scheme}://{host}:{port}{uri}".format(
|
||||
scheme=scheme, host=host, port=port, uri=uri
|
||||
)
|
||||
|
||||
@self.app.listener("after_server_start")
|
||||
async def _collect_response(sanic, loop):
|
||||
try:
|
||||
response = await self._local_request(
|
||||
method, uri, *request_args,
|
||||
**request_kwargs)
|
||||
method, url, *request_args, **request_kwargs
|
||||
)
|
||||
results[-1] = response
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
'Exception:\n{}'.format(traceback.format_exc()))
|
||||
logger.exception("Exception")
|
||||
exceptions.append(e)
|
||||
self.app.stop()
|
||||
|
||||
self.app.run(host=HOST, debug=debug, port=self.port, **server_kwargs)
|
||||
self.app.listeners['after_server_start'].pop()
|
||||
self.app.run(debug=debug, **server_kwargs)
|
||||
self.app.listeners["after_server_start"].pop()
|
||||
|
||||
if exceptions:
|
||||
raise ValueError("Exception during request: {}".format(exceptions))
|
||||
@@ -88,34 +136,288 @@ class SanicTestClient:
|
||||
try:
|
||||
request, response = results
|
||||
return request, response
|
||||
except BaseException:
|
||||
except BaseException: # noqa
|
||||
raise ValueError(
|
||||
"Request and response object expected, got ({})".format(
|
||||
results))
|
||||
results
|
||||
)
|
||||
)
|
||||
else:
|
||||
try:
|
||||
return results[-1]
|
||||
except BaseException:
|
||||
except BaseException: # noqa
|
||||
raise ValueError(
|
||||
"Request object expected, got ({})".format(results))
|
||||
"Request object expected, got ({})".format(results)
|
||||
)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('get', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("get", *args, **kwargs)
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('post', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("post", *args, **kwargs)
|
||||
|
||||
def put(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('put', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("put", *args, **kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('delete', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("delete", *args, **kwargs)
|
||||
|
||||
def patch(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('patch', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("patch", *args, **kwargs)
|
||||
|
||||
def options(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('options', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("options", *args, **kwargs)
|
||||
|
||||
def head(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test('head', *args, **kwargs)
|
||||
return self._sanic_endpoint_test("head", *args, **kwargs)
|
||||
|
||||
def websocket(self, *args, **kwargs):
|
||||
return self._sanic_endpoint_test("websocket", *args, **kwargs)
|
||||
|
||||
|
||||
class SanicASGIAdapter(requests.asgi.ASGIAdapter): # noqa
|
||||
async def send( # type: ignore
|
||||
self,
|
||||
request: requests.PreparedRequest,
|
||||
gather_return: bool = False,
|
||||
*args: typing.Any,
|
||||
**kwargs: typing.Any,
|
||||
) -> requests.Response:
|
||||
"""This method is taken MOSTLY verbatim from requests-asyn. The
|
||||
difference is the capturing of a response on the ASGI call and then
|
||||
returning it on the response object. This is implemented to achieve:
|
||||
|
||||
request, response = await app.asgi_client.get("/")
|
||||
|
||||
You can see the original code here:
|
||||
https://github.com/encode/requests-async/blob/614f40f77f19e6c6da8a212ae799107b0384dbf9/requests_async/asgi.py#L51""" # noqa
|
||||
scheme, netloc, path, query, fragment = urlsplit(
|
||||
request.url
|
||||
) # type: ignore
|
||||
|
||||
default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme]
|
||||
|
||||
if ":" in netloc:
|
||||
host, port_string = netloc.split(":", 1)
|
||||
port = int(port_string)
|
||||
else:
|
||||
host = netloc
|
||||
port = default_port
|
||||
|
||||
# Include the 'host' header.
|
||||
if "host" in request.headers:
|
||||
headers = [] # type: typing.List[typing.Tuple[bytes, bytes]]
|
||||
elif port == default_port:
|
||||
headers = [(b"host", host.encode())]
|
||||
else:
|
||||
headers = [(b"host", (f"{host}:{port}").encode())]
|
||||
|
||||
# Include other request headers.
|
||||
headers += [
|
||||
(key.lower().encode(), value.encode())
|
||||
for key, value in request.headers.items()
|
||||
]
|
||||
|
||||
no_response = False
|
||||
if scheme in {"ws", "wss"}:
|
||||
subprotocol = request.headers.get("sec-websocket-protocol", None)
|
||||
if subprotocol is None:
|
||||
subprotocols = [] # type: typing.Sequence[str]
|
||||
else:
|
||||
subprotocols = [
|
||||
value.strip() for value in subprotocol.split(",")
|
||||
]
|
||||
|
||||
scope = {
|
||||
"type": "websocket",
|
||||
"path": unquote(path),
|
||||
"root_path": "",
|
||||
"scheme": scheme,
|
||||
"query_string": query.encode(),
|
||||
"headers": headers,
|
||||
"client": ["testclient", 50000],
|
||||
"server": [host, port],
|
||||
"subprotocols": subprotocols,
|
||||
}
|
||||
no_response = True
|
||||
|
||||
else:
|
||||
scope = {
|
||||
"type": "http",
|
||||
"http_version": "1.1",
|
||||
"method": request.method,
|
||||
"path": unquote(path),
|
||||
"root_path": "",
|
||||
"scheme": scheme,
|
||||
"query_string": query.encode(),
|
||||
"headers": headers,
|
||||
"client": ["testclient", 50000],
|
||||
"server": [host, port],
|
||||
"extensions": {"http.response.template": {}},
|
||||
}
|
||||
|
||||
async def receive():
|
||||
nonlocal request_complete, response_complete
|
||||
|
||||
if request_complete:
|
||||
while not response_complete:
|
||||
await asyncio.sleep(0.0001)
|
||||
return {"type": "http.disconnect"}
|
||||
|
||||
body = request.body
|
||||
if isinstance(body, str):
|
||||
body_bytes = body.encode("utf-8") # type: bytes
|
||||
elif body is None:
|
||||
body_bytes = b""
|
||||
elif isinstance(body, types.GeneratorType):
|
||||
try:
|
||||
chunk = body.send(None)
|
||||
if isinstance(chunk, str):
|
||||
chunk = chunk.encode("utf-8")
|
||||
return {
|
||||
"type": "http.request",
|
||||
"body": chunk,
|
||||
"more_body": True,
|
||||
}
|
||||
except StopIteration:
|
||||
request_complete = True
|
||||
return {"type": "http.request", "body": b""}
|
||||
else:
|
||||
body_bytes = body
|
||||
|
||||
request_complete = True
|
||||
return {"type": "http.request", "body": body_bytes}
|
||||
|
||||
request_complete = False
|
||||
response_started = False
|
||||
response_complete = False
|
||||
raw_kwargs = {"content": b""} # type: typing.Dict[str, typing.Any]
|
||||
template = None
|
||||
context = None
|
||||
return_value = None
|
||||
|
||||
async def send(message) -> None:
|
||||
nonlocal raw_kwargs, response_started, response_complete, template, context # noqa
|
||||
|
||||
if message["type"] == "http.response.start":
|
||||
assert (
|
||||
not response_started
|
||||
), 'Received multiple "http.response.start" messages.'
|
||||
raw_kwargs["status_code"] = message["status"]
|
||||
raw_kwargs["headers"] = message["headers"]
|
||||
response_started = True
|
||||
elif message["type"] == "http.response.body":
|
||||
assert response_started, (
|
||||
'Received "http.response.body" '
|
||||
'without "http.response.start".'
|
||||
)
|
||||
assert (
|
||||
not response_complete
|
||||
), 'Received "http.response.body" after response completed.'
|
||||
body = message.get("body", b"")
|
||||
more_body = message.get("more_body", False)
|
||||
if request.method != "HEAD":
|
||||
raw_kwargs["content"] += body
|
||||
if not more_body:
|
||||
response_complete = True
|
||||
elif message["type"] == "http.response.template":
|
||||
template = message["template"]
|
||||
context = message["context"]
|
||||
|
||||
try:
|
||||
return_value = await self.app(scope, receive, send)
|
||||
except BaseException as exc:
|
||||
if not self.suppress_exceptions:
|
||||
raise exc from None
|
||||
|
||||
if no_response:
|
||||
response_started = True
|
||||
raw_kwargs = {"status_code": 204, "headers": []}
|
||||
|
||||
if not self.suppress_exceptions:
|
||||
assert response_started, "TestClient did not receive any response."
|
||||
elif not response_started:
|
||||
raw_kwargs = {"status_code": 500, "headers": []}
|
||||
|
||||
raw = httpcore.Response(**raw_kwargs)
|
||||
response = self.build_response(request, raw)
|
||||
if template is not None:
|
||||
response.template = template
|
||||
response.context = context
|
||||
|
||||
if gather_return:
|
||||
response.return_value = return_value
|
||||
return response
|
||||
|
||||
|
||||
class TestASGIApp(ASGIApp):
|
||||
async def __call__(self):
|
||||
await super().__call__()
|
||||
return self.request
|
||||
|
||||
|
||||
async def app_call_with_return(self, scope, receive, send):
|
||||
asgi_app = await TestASGIApp.create(self, scope, receive, send)
|
||||
return await asgi_app()
|
||||
|
||||
|
||||
class SanicASGITestClient(requests.ASGISession):
|
||||
def __init__(
|
||||
self,
|
||||
app,
|
||||
base_url: str = "http://{}".format(ASGI_HOST),
|
||||
suppress_exceptions: bool = False,
|
||||
) -> None:
|
||||
app.__class__.__call__ = app_call_with_return
|
||||
app.asgi = True
|
||||
super().__init__(app)
|
||||
|
||||
adapter = SanicASGIAdapter(
|
||||
app, suppress_exceptions=suppress_exceptions
|
||||
)
|
||||
self.mount("http://", adapter)
|
||||
self.mount("https://", adapter)
|
||||
self.mount("ws://", adapter)
|
||||
self.mount("wss://", adapter)
|
||||
self.headers.update({"user-agent": "testclient"})
|
||||
self.app = app
|
||||
self.base_url = base_url
|
||||
|
||||
async def request(self, method, url, gather_request=True, *args, **kwargs):
|
||||
|
||||
self.gather_request = gather_request
|
||||
response = await super().request(method, url, *args, **kwargs)
|
||||
response.status = response.status_code
|
||||
response.body = response.content
|
||||
response.content_type = response.headers.get("content-type")
|
||||
|
||||
if hasattr(response, "return_value"):
|
||||
request = response.return_value
|
||||
del response.return_value
|
||||
return request, response
|
||||
|
||||
return response
|
||||
|
||||
def merge_environment_settings(self, *args, **kwargs):
|
||||
settings = super().merge_environment_settings(*args, **kwargs)
|
||||
settings.update({"gather_return": self.gather_request})
|
||||
return settings
|
||||
|
||||
async def websocket(self, uri, subprotocols=None, *args, **kwargs):
|
||||
if uri.startswith(("ws:", "wss:")):
|
||||
url = uri
|
||||
else:
|
||||
uri = uri if uri.startswith("/") else "/{uri}".format(uri=uri)
|
||||
url = "ws://testserver{uri}".format(uri=uri)
|
||||
|
||||
headers = kwargs.get("headers", {})
|
||||
headers.setdefault("connection", "upgrade")
|
||||
headers.setdefault("sec-websocket-key", "testserver==")
|
||||
headers.setdefault("sec-websocket-version", "13")
|
||||
if subprotocols is not None:
|
||||
headers.setdefault(
|
||||
"sec-websocket-protocol", ", ".join(subprotocols)
|
||||
)
|
||||
kwargs["headers"] = headers
|
||||
|
||||
return await self.request("websocket", url, **kwargs)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from sanic.exceptions import InvalidUsage
|
||||
from typing import Any, Callable, List
|
||||
|
||||
from sanic.constants import HTTP_METHODS
|
||||
from sanic.exceptions import InvalidUsage
|
||||
|
||||
|
||||
class HTTPMethodView:
|
||||
@@ -37,7 +39,7 @@ class HTTPMethodView:
|
||||
To add any decorator you could set it into decorators variable
|
||||
"""
|
||||
|
||||
decorators = []
|
||||
decorators: List[Callable[[Callable[..., Any]], Callable[..., Any]]] = []
|
||||
|
||||
def dispatch_request(self, request, *args, **kwargs):
|
||||
handler = getattr(self, request.method.lower(), None)
|
||||
@@ -48,6 +50,7 @@ class HTTPMethodView:
|
||||
"""Return view function for use with the routing system, that
|
||||
dispatches request to appropriate handler method.
|
||||
"""
|
||||
|
||||
def view(*args, **kwargs):
|
||||
self = view.view_class(*class_args, **class_kwargs)
|
||||
return self.dispatch_request(*args, **kwargs)
|
||||
@@ -94,11 +97,13 @@ class CompositionView:
|
||||
for method in methods:
|
||||
if method not in HTTP_METHODS:
|
||||
raise InvalidUsage(
|
||||
'{} is not a valid HTTP method.'.format(method))
|
||||
"{} is not a valid HTTP method.".format(method)
|
||||
)
|
||||
|
||||
if method in self.handlers:
|
||||
raise InvalidUsage(
|
||||
'Method {} is already registered.'.format(method))
|
||||
"Method {} is already registered.".format(method)
|
||||
)
|
||||
self.handlers[method] = handler
|
||||
|
||||
def __call__(self, request, *args, **kwargs):
|
||||
|
||||
@@ -1,18 +1,44 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
from httptools import HttpParserUpgrade # type: ignore
|
||||
from websockets import ( # type: ignore
|
||||
ConnectionClosed,
|
||||
InvalidHandshake,
|
||||
WebSocketCommonProtocol,
|
||||
handshake,
|
||||
)
|
||||
|
||||
from sanic.exceptions import InvalidUsage
|
||||
from sanic.server import HttpProtocol
|
||||
from httptools import HttpParserUpgrade
|
||||
from websockets import handshake, WebSocketCommonProtocol, InvalidHandshake
|
||||
from websockets import ConnectionClosed # noqa
|
||||
|
||||
|
||||
__all__ = ["ConnectionClosed", "WebSocketProtocol", "WebSocketConnection"]
|
||||
|
||||
ASIMessage = MutableMapping[str, Any]
|
||||
|
||||
|
||||
class WebSocketProtocol(HttpProtocol):
|
||||
def __init__(self, *args, websocket_timeout=10,
|
||||
websocket_max_size=None,
|
||||
websocket_max_queue=None,
|
||||
websocket_read_limit=2 ** 16,
|
||||
websocket_write_limit=2 ** 16, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
websocket_timeout=10,
|
||||
websocket_max_size=None,
|
||||
websocket_max_queue=None,
|
||||
websocket_read_limit=2 ** 16,
|
||||
websocket_write_limit=2 ** 16,
|
||||
**kwargs
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.websocket = None
|
||||
# self.app = None
|
||||
self.websocket_timeout = websocket_timeout
|
||||
self.websocket_max_size = websocket_max_size
|
||||
self.websocket_max_queue = websocket_max_queue
|
||||
@@ -57,36 +83,32 @@ class WebSocketProtocol(HttpProtocol):
|
||||
|
||||
async def websocket_handshake(self, request, subprotocols=None):
|
||||
# let the websockets package do the handshake with the client
|
||||
headers = []
|
||||
|
||||
def get_header(k):
|
||||
return request.headers.get(k, '')
|
||||
|
||||
def set_header(k, v):
|
||||
headers.append((k, v))
|
||||
headers = {}
|
||||
|
||||
try:
|
||||
key = handshake.check_request(get_header)
|
||||
handshake.build_response(set_header, key)
|
||||
key = handshake.check_request(request.headers)
|
||||
handshake.build_response(headers, key)
|
||||
except InvalidHandshake:
|
||||
raise InvalidUsage('Invalid websocket request')
|
||||
raise InvalidUsage("Invalid websocket request")
|
||||
|
||||
subprotocol = None
|
||||
if subprotocols and 'Sec-Websocket-Protocol' in request.headers:
|
||||
if subprotocols and "Sec-Websocket-Protocol" in request.headers:
|
||||
# select a subprotocol
|
||||
client_subprotocols = [p.strip() for p in request.headers[
|
||||
'Sec-Websocket-Protocol'].split(',')]
|
||||
client_subprotocols = [
|
||||
p.strip()
|
||||
for p in request.headers["Sec-Websocket-Protocol"].split(",")
|
||||
]
|
||||
for p in client_subprotocols:
|
||||
if p in subprotocols:
|
||||
subprotocol = p
|
||||
set_header('Sec-Websocket-Protocol', subprotocol)
|
||||
headers["Sec-Websocket-Protocol"] = subprotocol
|
||||
break
|
||||
|
||||
# write the 101 response back to the client
|
||||
rv = b'HTTP/1.1 101 Switching Protocols\r\n'
|
||||
for k, v in headers:
|
||||
rv += k.encode('utf-8') + b': ' + v.encode('utf-8') + b'\r\n'
|
||||
rv += b'\r\n'
|
||||
rv = b"HTTP/1.1 101 Switching Protocols\r\n"
|
||||
for k, v in headers.items():
|
||||
rv += k.encode("utf-8") + b": " + v.encode("utf-8") + b"\r\n"
|
||||
rv += b"\r\n"
|
||||
request.transport.write(rv)
|
||||
|
||||
# hook up the websocket protocol
|
||||
@@ -95,9 +117,54 @@ class WebSocketProtocol(HttpProtocol):
|
||||
max_size=self.websocket_max_size,
|
||||
max_queue=self.websocket_max_queue,
|
||||
read_limit=self.websocket_read_limit,
|
||||
write_limit=self.websocket_write_limit
|
||||
write_limit=self.websocket_write_limit,
|
||||
)
|
||||
# Following two lines are required for websockets 8.x
|
||||
self.websocket.is_client = False
|
||||
self.websocket.side = "server"
|
||||
self.websocket.subprotocol = subprotocol
|
||||
self.websocket.connection_made(request.transport)
|
||||
self.websocket.connection_open()
|
||||
return self.websocket
|
||||
|
||||
|
||||
class WebSocketConnection:
|
||||
|
||||
# TODO
|
||||
# - Implement ping/pong
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
send: Callable[[ASIMessage], Awaitable[None]],
|
||||
receive: Callable[[], Awaitable[ASIMessage]],
|
||||
) -> None:
|
||||
self._send = send
|
||||
self._receive = receive
|
||||
|
||||
async def send(self, data: Union[str, bytes], *args, **kwargs) -> None:
|
||||
message: Dict[str, Union[str, bytes]] = {"type": "websocket.send"}
|
||||
|
||||
if isinstance(data, bytes):
|
||||
message.update({"bytes": data})
|
||||
else:
|
||||
message.update({"text": str(data)})
|
||||
|
||||
await self._send(message)
|
||||
|
||||
async def recv(self, *args, **kwargs) -> Optional[str]:
|
||||
message = await self._receive()
|
||||
|
||||
if message["type"] == "websocket.receive":
|
||||
return message["text"]
|
||||
elif message["type"] == "websocket.disconnect":
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
receive = recv
|
||||
|
||||
async def accept(self) -> None:
|
||||
await self._send({"type": "websocket.accept", "subprotocol": ""})
|
||||
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
|
||||
114
sanic/worker.py
114
sanic/worker.py
@@ -1,24 +1,27 @@
|
||||
import os
|
||||
import sys
|
||||
import signal
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
ssl = None
|
||||
import gunicorn.workers.base as base # type: ignore
|
||||
|
||||
from sanic.server import HttpProtocol, Signal, serve, trigger_events
|
||||
from sanic.websocket import WebSocketProtocol
|
||||
|
||||
|
||||
try:
|
||||
import uvloop
|
||||
import ssl # type: ignore
|
||||
except ImportError:
|
||||
ssl = None # type: ignore
|
||||
|
||||
try:
|
||||
import uvloop # type: ignore
|
||||
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
except ImportError:
|
||||
pass
|
||||
import gunicorn.workers.base as base
|
||||
|
||||
from sanic.server import trigger_events, serve, HttpProtocol, Signal
|
||||
from sanic.websocket import WebSocketProtocol
|
||||
|
||||
|
||||
class GunicornWorker(base.Worker):
|
||||
@@ -50,36 +53,44 @@ class GunicornWorker(base.Worker):
|
||||
def run(self):
|
||||
is_debug = self.log.loglevel == logging.DEBUG
|
||||
protocol = (
|
||||
self.websocket_protocol if self.app.callable.websocket_enabled
|
||||
else self.http_protocol)
|
||||
self.websocket_protocol
|
||||
if self.app.callable.websocket_enabled
|
||||
else self.http_protocol
|
||||
)
|
||||
|
||||
self._server_settings = self.app.callable._helper(
|
||||
loop=self.loop,
|
||||
debug=is_debug,
|
||||
protocol=protocol,
|
||||
ssl=self.ssl_context,
|
||||
run_async=True)
|
||||
self._server_settings['signal'] = self.signal
|
||||
self._server_settings.pop('sock')
|
||||
trigger_events(self._server_settings.get('before_start', []),
|
||||
self.loop)
|
||||
self._server_settings['before_start'] = ()
|
||||
run_async=True,
|
||||
)
|
||||
self._server_settings["signal"] = self.signal
|
||||
self._server_settings.pop("sock")
|
||||
trigger_events(
|
||||
self._server_settings.get("before_start", []), self.loop
|
||||
)
|
||||
self._server_settings["before_start"] = ()
|
||||
|
||||
self._runner = asyncio.ensure_future(self._run(), loop=self.loop)
|
||||
try:
|
||||
self.loop.run_until_complete(self._runner)
|
||||
self.app.callable.is_running = True
|
||||
trigger_events(self._server_settings.get('after_start', []),
|
||||
self.loop)
|
||||
trigger_events(
|
||||
self._server_settings.get("after_start", []), self.loop
|
||||
)
|
||||
self.loop.run_until_complete(self._check_alive())
|
||||
trigger_events(self._server_settings.get('before_stop', []),
|
||||
self.loop)
|
||||
trigger_events(
|
||||
self._server_settings.get("before_stop", []), self.loop
|
||||
)
|
||||
self.loop.run_until_complete(self.close())
|
||||
except BaseException:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
try:
|
||||
trigger_events(self._server_settings.get('after_stop', []),
|
||||
self.loop)
|
||||
trigger_events(
|
||||
self._server_settings.get("after_stop", []), self.loop
|
||||
)
|
||||
except BaseException:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
@@ -90,8 +101,11 @@ class GunicornWorker(base.Worker):
|
||||
async def close(self):
|
||||
if self.servers:
|
||||
# stop accepting connections
|
||||
self.log.info("Stopping server: %s, connections: %s",
|
||||
self.pid, len(self.connections))
|
||||
self.log.info(
|
||||
"Stopping server: %s, connections: %s",
|
||||
self.pid,
|
||||
len(self.connections),
|
||||
)
|
||||
for server in self.servers:
|
||||
server.close()
|
||||
await server.wait_closed()
|
||||
@@ -105,8 +119,9 @@ class GunicornWorker(base.Worker):
|
||||
# gracefully shutdown timeout
|
||||
start_shutdown = 0
|
||||
graceful_shutdown_timeout = self.cfg.graceful_timeout
|
||||
while self.connections and \
|
||||
(start_shutdown < graceful_shutdown_timeout):
|
||||
while self.connections and (
|
||||
start_shutdown < graceful_shutdown_timeout
|
||||
):
|
||||
await asyncio.sleep(0.1)
|
||||
start_shutdown = start_shutdown + 0.1
|
||||
|
||||
@@ -115,9 +130,7 @@ class GunicornWorker(base.Worker):
|
||||
coros = []
|
||||
for conn in self.connections:
|
||||
if hasattr(conn, "websocket") and conn.websocket:
|
||||
coros.append(
|
||||
conn.websocket.close_connection()
|
||||
)
|
||||
coros.append(conn.websocket.close_connection())
|
||||
else:
|
||||
conn.close()
|
||||
_shutdown = asyncio.gather(*coros, loop=self.loop)
|
||||
@@ -148,8 +161,9 @@ class GunicornWorker(base.Worker):
|
||||
)
|
||||
if self.max_requests and req_count > self.max_requests:
|
||||
self.alive = False
|
||||
self.log.info("Max requests exceeded, shutting down: %s",
|
||||
self)
|
||||
self.log.info(
|
||||
"Max requests exceeded, shutting down: %s", self
|
||||
)
|
||||
elif pid == os.getpid() and self.ppid != os.getppid():
|
||||
self.alive = False
|
||||
self.log.info("Parent changed, shutting down: %s", self)
|
||||
@@ -175,23 +189,29 @@ class GunicornWorker(base.Worker):
|
||||
def init_signals(self):
|
||||
# Set up signals through the event loop API.
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit,
|
||||
signal.SIGQUIT, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
|
||||
)
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit,
|
||||
signal.SIGTERM, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
|
||||
)
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGINT, self.handle_quit,
|
||||
signal.SIGINT, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGINT, self.handle_quit, signal.SIGINT, None
|
||||
)
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch,
|
||||
signal.SIGWINCH, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
|
||||
)
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1,
|
||||
signal.SIGUSR1, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
|
||||
)
|
||||
|
||||
self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort,
|
||||
signal.SIGABRT, None)
|
||||
self.loop.add_signal_handler(
|
||||
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
|
||||
)
|
||||
|
||||
# Don't let SIGTERM and SIGUSR1 disturb active requests
|
||||
# by interrupting system calls
|
||||
|
||||
59
scripts/changelog.py
Executable file
59
scripts/changelog.py
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from os import path
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
import towncrier
|
||||
import click
|
||||
except ImportError:
|
||||
print(
|
||||
"Please make sure you have a installed towncrier and click before using this tool"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--draft",
|
||||
"draft",
|
||||
default=False,
|
||||
flag_value=True,
|
||||
help="Render the news fragments, don't write to files, "
|
||||
"don't check versions.",
|
||||
)
|
||||
@click.option(
|
||||
"--dir", "directory", default=path.dirname(path.abspath(__file__))
|
||||
)
|
||||
@click.option("--name", "project_name", default=None)
|
||||
@click.option(
|
||||
"--version",
|
||||
"project_version",
|
||||
default=None,
|
||||
help="Render the news fragments using given version.",
|
||||
)
|
||||
@click.option("--date", "project_date", default=None)
|
||||
@click.option(
|
||||
"--yes",
|
||||
"answer_yes",
|
||||
default=False,
|
||||
flag_value=True,
|
||||
help="Do not ask for confirmation to remove news fragments.",
|
||||
)
|
||||
def _main(
|
||||
draft,
|
||||
directory,
|
||||
project_name,
|
||||
project_version,
|
||||
project_date,
|
||||
answer_yes,
|
||||
):
|
||||
return towncrier.__main(
|
||||
draft,
|
||||
directory,
|
||||
project_name,
|
||||
project_version,
|
||||
project_date,
|
||||
answer_yes,
|
||||
)
|
||||
|
||||
_main()
|
||||
33
scripts/pyproject.toml
Normal file
33
scripts/pyproject.toml
Normal file
@@ -0,0 +1,33 @@
|
||||
[tool.towncrier]
|
||||
package = "sanic"
|
||||
package_dir = "."
|
||||
filename = "../CHANGELOG.rst"
|
||||
directory = "./changelogs"
|
||||
underlines = ["=", "*", "~"]
|
||||
issue_format = "`#{issue} <https://github.com/huge-success/sanic/issues/{issue}>`__"
|
||||
title_format = "Version {version}"
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "feature"
|
||||
name = "Features"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "bugfix"
|
||||
name = "Bugfixes"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "doc"
|
||||
name = "Improved Documentation"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "removal"
|
||||
name = "Deprecations and Removals"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "misc"
|
||||
name = "Miscellaneous internal changes"
|
||||
showcontent = true
|
||||
353
scripts/release.py
Executable file
353
scripts/release.py
Executable file
@@ -0,0 +1,353 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from collections import OrderedDict
|
||||
from configparser import RawConfigParser
|
||||
from datetime import datetime
|
||||
from json import dumps
|
||||
from os import path, chdir
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
from jinja2 import Environment, BaseLoader
|
||||
from requests import patch
|
||||
import towncrier
|
||||
|
||||
GIT_COMMANDS = {
|
||||
"get_tag": ["git describe --tags --abbrev=0"],
|
||||
"commit_version_change": [
|
||||
"git add . && git commit -m 'Bumping up version from "
|
||||
"{current_version} to {new_version}'"
|
||||
],
|
||||
"create_new_tag": [
|
||||
"git tag -a {new_version} -m 'Bumping up version from "
|
||||
"{current_version} to {new_version}'"
|
||||
],
|
||||
"push_tag": ["git push origin {new_version}"],
|
||||
"get_change_log": [
|
||||
'git log --no-merges --pretty=format:"%h::: %cn::: %s" '
|
||||
"{current_version}.."
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
RELEASE_NOTE_TEMPLATE = """
|
||||
# {{ release_name }} - {% now 'utc', '%Y-%m-%d' %}
|
||||
|
||||
To see the exhaustive list of pull requests included in this release see:
|
||||
https://github.com/huge-success/sanic/milestone/{{milestone}}?closed=1
|
||||
|
||||
# Changelog
|
||||
{% for row in changelogs %}
|
||||
* {{ row -}}
|
||||
{% endfor %}
|
||||
|
||||
# Credits
|
||||
{% for author in authors %}
|
||||
* {{ author -}}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
JINJA_RELEASE_NOTE_TEMPLATE = Environment(
|
||||
loader=BaseLoader, extensions=["jinja2_time.TimeExtension"]
|
||||
).from_string(RELEASE_NOTE_TEMPLATE)
|
||||
|
||||
RELEASE_NOTE_UPDATE_URL = (
|
||||
"https://api.github.com/repos/huge-success/sanic/releases/tags/"
|
||||
"{new_version}?access_token={token}"
|
||||
)
|
||||
|
||||
|
||||
class Directory:
|
||||
def __init__(self):
|
||||
self._old_path = path.dirname(path.abspath(__file__))
|
||||
self._new_path = path.dirname(self._old_path)
|
||||
|
||||
def __enter__(self):
|
||||
chdir(self._new_path)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
chdir(self._old_path)
|
||||
|
||||
|
||||
def _run_shell_command(command: list):
|
||||
try:
|
||||
process = Popen(
|
||||
command, stderr=PIPE, stdout=PIPE, stdin=PIPE, shell=True
|
||||
)
|
||||
output, error = process.communicate()
|
||||
return_code = process.returncode
|
||||
return output.decode("utf-8"), error, return_code
|
||||
except:
|
||||
return None, None, -1
|
||||
|
||||
|
||||
def _fetch_default_calendar_release_version():
|
||||
return datetime.now().strftime("%y.%m.0")
|
||||
|
||||
|
||||
def _fetch_current_version(config_file: str) -> str:
|
||||
if path.isfile(config_file):
|
||||
config_parser = RawConfigParser()
|
||||
with open(config_file) as cfg:
|
||||
config_parser.read_file(cfg)
|
||||
return (
|
||||
config_parser.get("version", "current_version")
|
||||
or _fetch_default_calendar_release_version()
|
||||
)
|
||||
else:
|
||||
return _fetch_default_calendar_release_version()
|
||||
|
||||
|
||||
def _change_micro_version(current_version: str):
|
||||
version_string = current_version.split(".")
|
||||
version_string[-1] = str((int(version_string[-1]) + 1))
|
||||
return ".".join(version_string)
|
||||
|
||||
|
||||
def _get_new_version(
|
||||
config_file: str = "./setup.cfg",
|
||||
current_version: str = None,
|
||||
micro_release: bool = False,
|
||||
):
|
||||
if micro_release:
|
||||
if current_version:
|
||||
return _change_micro_version(current_version)
|
||||
elif config_file:
|
||||
return _change_micro_version(_fetch_current_version(config_file))
|
||||
else:
|
||||
return _fetch_default_calendar_release_version()
|
||||
else:
|
||||
return _fetch_default_calendar_release_version()
|
||||
|
||||
|
||||
def _get_current_tag(git_command_name="get_tag"):
|
||||
global GIT_COMMANDS
|
||||
command = GIT_COMMANDS.get(git_command_name)
|
||||
out, err, ret = _run_shell_command(command)
|
||||
if len(str(out)):
|
||||
return str(out).split("\n")[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _update_release_version_for_sanic(
|
||||
current_version, new_version, config_file, generate_changelog
|
||||
):
|
||||
config_parser = RawConfigParser()
|
||||
with open(config_file) as cfg:
|
||||
config_parser.read_file(cfg)
|
||||
config_parser.set("version", "current_version", new_version)
|
||||
|
||||
version_files = config_parser.get("version", "files")
|
||||
current_version_line = config_parser.get(
|
||||
"version", "current_version_pattern"
|
||||
).format(current_version=current_version)
|
||||
new_version_line = config_parser.get(
|
||||
"version", "new_version_pattern"
|
||||
).format(new_version=new_version)
|
||||
|
||||
for version_file in version_files.split(","):
|
||||
with open(version_file) as init_file:
|
||||
data = init_file.read()
|
||||
|
||||
new_data = data.replace(current_version_line, new_version_line)
|
||||
with open(version_file, "w") as init_file:
|
||||
init_file.write(new_data)
|
||||
|
||||
with open(config_file, "w") as config:
|
||||
config_parser.write(config)
|
||||
|
||||
if generate_changelog:
|
||||
towncrier.__main(
|
||||
draft=False,
|
||||
directory=path.dirname(path.abspath(__file__)),
|
||||
project_name=None,
|
||||
project_version=new_version,
|
||||
project_date=None,
|
||||
answer_yes=True,
|
||||
)
|
||||
|
||||
command = GIT_COMMANDS.get("commit_version_change")
|
||||
command[0] = command[0].format(
|
||||
new_version=new_version, current_version=current_version
|
||||
)
|
||||
_, err, ret = _run_shell_command(command)
|
||||
if int(ret) != 0:
|
||||
print(
|
||||
"Failed to Commit Version upgrade changes to Sanic: {}".format(
|
||||
err.decode("utf-8")
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
|
||||
|
||||
def _generate_change_log(current_version: str = None):
|
||||
global GIT_COMMANDS
|
||||
command = GIT_COMMANDS.get("get_change_log")
|
||||
command[0] = command[0].format(current_version=current_version)
|
||||
output, error, ret = _run_shell_command(command=command)
|
||||
if not len(str(output)):
|
||||
print("Unable to Fetch Change log details to update the Release Note")
|
||||
exit(1)
|
||||
|
||||
commit_details = OrderedDict()
|
||||
commit_details["authors"] = dict()
|
||||
commit_details["commits"] = list()
|
||||
|
||||
for line in str(output).split("\n"):
|
||||
commit, author, description = line.split(":::")
|
||||
if "GitHub" not in author:
|
||||
commit_details["authors"][author] = 1
|
||||
commit_details["commits"].append(" - ".join([commit, description]))
|
||||
|
||||
return commit_details
|
||||
|
||||
|
||||
def _generate_markdown_document(
|
||||
milestone, release_name, current_version, release_version
|
||||
):
|
||||
global JINJA_RELEASE_NOTE_TEMPLATE
|
||||
release_name = release_name or release_version
|
||||
change_log = _generate_change_log(current_version=current_version)
|
||||
return JINJA_RELEASE_NOTE_TEMPLATE.render(
|
||||
release_name=release_name,
|
||||
milestone=milestone,
|
||||
changelogs=change_log["commits"],
|
||||
authors=change_log["authors"].keys(),
|
||||
)
|
||||
|
||||
|
||||
def _tag_release(new_version, current_version, milestone, release_name, token):
|
||||
global GIT_COMMANDS
|
||||
global RELEASE_NOTE_UPDATE_URL
|
||||
for command_name in ["create_new_tag", "push_tag"]:
|
||||
command = GIT_COMMANDS.get(command_name)
|
||||
command[0] = command[0].format(
|
||||
new_version=new_version, current_version=current_version
|
||||
)
|
||||
out, error, ret = _run_shell_command(command=command)
|
||||
if int(ret) != 0:
|
||||
print("Failed to execute the command: {}".format(command[0]))
|
||||
exit(1)
|
||||
|
||||
change_log = _generate_markdown_document(
|
||||
milestone, release_name, current_version, new_version
|
||||
)
|
||||
|
||||
body = {"name": release_name or new_version, "body": change_log}
|
||||
|
||||
headers = {"content-type": "application/json"}
|
||||
|
||||
response = patch(
|
||||
RELEASE_NOTE_UPDATE_URL.format(new_version=new_version, token=token),
|
||||
data=dumps(body),
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
|
||||
def release(args: Namespace):
|
||||
current_tag = _get_current_tag()
|
||||
current_version = _fetch_current_version(args.config)
|
||||
if current_tag and current_version not in current_tag:
|
||||
print(
|
||||
"Tag mismatch between what's in git and what was provided by "
|
||||
"--current-version. Existing: {}, Give: {}".format(
|
||||
current_tag, current_version
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
new_version = args.release_version or _get_new_version(
|
||||
args.config, current_version, args.micro_release
|
||||
)
|
||||
_update_release_version_for_sanic(
|
||||
current_version=current_version,
|
||||
new_version=new_version,
|
||||
config_file=args.config,
|
||||
generate_changelog=args.generate_changelog,
|
||||
)
|
||||
if args.tag_release:
|
||||
_tag_release(
|
||||
current_version=current_version,
|
||||
new_version=new_version,
|
||||
milestone=args.milestone,
|
||||
release_name=args.release_name,
|
||||
token=args.token,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli = ArgumentParser(description="Sanic Release Manager")
|
||||
cli.add_argument(
|
||||
"--release-version",
|
||||
"-r",
|
||||
help="New Version to use for Release",
|
||||
default=_fetch_default_calendar_release_version(),
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--current-version",
|
||||
"-cv",
|
||||
help="Current Version to default in case if you don't want to "
|
||||
"use the version configuration files",
|
||||
default=None,
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
help="Configuration file used for release",
|
||||
default="./setup.cfg",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--token",
|
||||
"-t",
|
||||
help="Git access token with necessary access to Huge Sanic Org",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--milestone",
|
||||
"-ms",
|
||||
help="Git Release milestone information to include in relase note",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--release-name",
|
||||
"-n",
|
||||
help="Release Name to use if any",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--micro-release",
|
||||
"-m",
|
||||
help="Micro Release with patches only",
|
||||
default=False,
|
||||
action="store_true",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--tag-release",
|
||||
help="Tag a new release for Sanic",
|
||||
default=False,
|
||||
action="store_true",
|
||||
required=False,
|
||||
)
|
||||
cli.add_argument(
|
||||
"--generate-changelog",
|
||||
help="Generate changelog for Sanic as part of release",
|
||||
default=False,
|
||||
action="store_true",
|
||||
required=False,
|
||||
)
|
||||
args = cli.parse_args()
|
||||
if args.tag_release:
|
||||
for key, value in {
|
||||
"--token/-t": args.token,
|
||||
"--milestone/-m": args.milestone,
|
||||
}.items():
|
||||
if not value:
|
||||
print(f"{key} is mandatory while using --tag-release")
|
||||
exit(1)
|
||||
with Directory():
|
||||
release(args)
|
||||
21
setup.cfg
Normal file
21
setup.cfg
Normal file
@@ -0,0 +1,21 @@
|
||||
[flake8]
|
||||
ignore = E203, W503
|
||||
|
||||
[isort]
|
||||
atomic = true
|
||||
default_section = THIRDPARTY
|
||||
include_trailing_comma = true
|
||||
known_first_party = sanic
|
||||
known_third_party = pytest
|
||||
line_length = 79
|
||||
lines_after_imports = 2
|
||||
lines_between_types = 1
|
||||
multi_line_output = 3
|
||||
not_skip = __init__.py
|
||||
|
||||
[version]
|
||||
current_version = 19.9.0
|
||||
files = sanic/__version__.py
|
||||
current_version_pattern = __version__ = "{current_version}"
|
||||
new_version_pattern = __version__ = "{new_version}"
|
||||
|
||||
141
setup.py
141
setup.py
@@ -4,73 +4,140 @@ Sanic
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
import sys
|
||||
from distutils.util import strtobool
|
||||
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
|
||||
def open_local(paths, mode='r', encoding='utf8'):
|
||||
path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
*paths
|
||||
)
|
||||
class PyTest(TestCommand):
|
||||
"""
|
||||
Provide a Test runner to be used from setup.py to run unit tests
|
||||
"""
|
||||
|
||||
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ""
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
import pytest
|
||||
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
def open_local(paths, mode="r", encoding="utf8"):
|
||||
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), *paths)
|
||||
|
||||
return codecs.open(path, mode, encoding)
|
||||
|
||||
|
||||
with open_local(['sanic', '__init__.py'], encoding='latin1') as fp:
|
||||
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
|
||||
try:
|
||||
version = re.findall(r"^__version__ = '([^']+)'\r?$",
|
||||
fp.read(), re.M)[0]
|
||||
version = re.findall(
|
||||
r"^__version__ = \"([^']+)\"\r?$", fp.read(), re.M
|
||||
)[0]
|
||||
except IndexError:
|
||||
raise RuntimeError('Unable to determine version.')
|
||||
raise RuntimeError("Unable to determine version.")
|
||||
|
||||
|
||||
with open_local(['README.rst']) as rm:
|
||||
with open_local(["README.rst"]) as rm:
|
||||
long_description = rm.read()
|
||||
|
||||
setup_kwargs = {
|
||||
'name': 'sanic',
|
||||
'version': version,
|
||||
'url': 'http://github.com/channelcat/sanic/',
|
||||
'license': 'MIT',
|
||||
'author': 'Channel Cat',
|
||||
'author_email': 'channelcat@gmail.com',
|
||||
'description': (
|
||||
'A microframework based on uvloop, httptools, and learnings of flask'),
|
||||
'long_description': long_description,
|
||||
'packages': ['sanic'],
|
||||
'platforms': 'any',
|
||||
'classifiers': [
|
||||
'Development Status :: 4 - Beta',
|
||||
'Environment :: Web Environment',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
"name": "sanic",
|
||||
"version": version,
|
||||
"url": "http://github.com/huge-success/sanic/",
|
||||
"license": "MIT",
|
||||
"author": "Sanic Community",
|
||||
"author_email": "admhpkns@gmail.com",
|
||||
"description": (
|
||||
"A web server and web framework that's written to go fast. Build fast. Run fast."
|
||||
),
|
||||
"long_description": long_description,
|
||||
"packages": ["sanic"],
|
||||
"platforms": "any",
|
||||
"classifiers": [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Web Environment",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
],
|
||||
}
|
||||
|
||||
env_dependency = '; sys_platform != "win32" and implementation_name == "cpython"'
|
||||
ujson = 'ujson>=1.35' + env_dependency
|
||||
uvloop = 'uvloop>=0.5.3' + env_dependency
|
||||
env_dependency = (
|
||||
'; sys_platform != "win32" ' 'and implementation_name == "cpython"'
|
||||
)
|
||||
ujson = "ujson>=1.35" + env_dependency
|
||||
uvloop = "uvloop>=0.5.3" + env_dependency
|
||||
|
||||
requirements = [
|
||||
'httptools>=0.0.9',
|
||||
"httptools>=0.0.10",
|
||||
uvloop,
|
||||
ujson,
|
||||
'aiofiles>=0.3.0',
|
||||
'websockets>=5.0,<6.0',
|
||||
'multidict>=4.0,<5.0',
|
||||
"aiofiles>=0.3.0",
|
||||
"websockets>=7.0,<9.0",
|
||||
"multidict>=4.0,<5.0",
|
||||
"requests-async==0.5.0",
|
||||
]
|
||||
|
||||
tests_require = [
|
||||
"pytest==5.2.1",
|
||||
"multidict>=4.0,<5.0",
|
||||
"gunicorn",
|
||||
"pytest-cov",
|
||||
"httpcore==0.3.0",
|
||||
"beautifulsoup4",
|
||||
uvloop,
|
||||
ujson,
|
||||
"pytest-sanic",
|
||||
"pytest-sugar",
|
||||
"pytest-benchmark",
|
||||
]
|
||||
|
||||
docs_require = [
|
||||
"sphinx>=2.1.2",
|
||||
"sphinx_rtd_theme",
|
||||
"recommonmark>=0.5.0",
|
||||
"docutils",
|
||||
"pygments",
|
||||
]
|
||||
|
||||
dev_require = tests_require + [
|
||||
"aiofiles",
|
||||
"tox",
|
||||
"black",
|
||||
"flake8",
|
||||
"bandit",
|
||||
"towncrier",
|
||||
]
|
||||
|
||||
all_require = dev_require + docs_require
|
||||
|
||||
if strtobool(os.environ.get("SANIC_NO_UJSON", "no")):
|
||||
print("Installing without uJSON")
|
||||
requirements.remove(ujson)
|
||||
tests_require.remove(ujson)
|
||||
|
||||
# 'nt' means windows OS
|
||||
if strtobool(os.environ.get("SANIC_NO_UVLOOP", "no")):
|
||||
print("Installing without uvLoop")
|
||||
requirements.remove(uvloop)
|
||||
tests_require.remove(uvloop)
|
||||
|
||||
setup_kwargs['install_requires'] = requirements
|
||||
extras_require = {
|
||||
"test": tests_require,
|
||||
"dev": dev_require,
|
||||
"docs": docs_require,
|
||||
"all": all_require,
|
||||
}
|
||||
|
||||
setup_kwargs["install_requires"] = requirements
|
||||
setup_kwargs["tests_require"] = tests_require
|
||||
setup_kwargs["extras_require"] = extras_require
|
||||
setup_kwargs["cmdclass"] = {"test": PyTest}
|
||||
setup(**setup_kwargs)
|
||||
|
||||
55
tests/benchmark/test_route_resolution_benchmark.py
Normal file
55
tests/benchmark/test_route_resolution_benchmark.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from random import choice, seed
|
||||
|
||||
from pytest import mark
|
||||
|
||||
import sanic.router
|
||||
|
||||
|
||||
seed("Pack my box with five dozen liquor jugs.")
|
||||
|
||||
# Disable Caching for testing purpose
|
||||
sanic.router.ROUTER_CACHE_SIZE = 0
|
||||
|
||||
|
||||
class TestSanicRouteResolution:
|
||||
@mark.asyncio
|
||||
async def test_resolve_route_no_arg_string_path(
|
||||
self, sanic_router, route_generator, benchmark
|
||||
):
|
||||
simple_routes = route_generator.generate_random_direct_route(
|
||||
max_route_depth=4
|
||||
)
|
||||
router, simple_routes = sanic_router(route_details=simple_routes)
|
||||
route_to_call = choice(simple_routes)
|
||||
|
||||
result = benchmark.pedantic(
|
||||
router._get,
|
||||
("/{}".format(route_to_call[-1]), route_to_call[0], "localhost"),
|
||||
iterations=1000,
|
||||
rounds=1000,
|
||||
)
|
||||
assert await result[0](None) == 1
|
||||
|
||||
@mark.asyncio
|
||||
async def test_resolve_route_with_typed_args(
|
||||
self, sanic_router, route_generator, benchmark
|
||||
):
|
||||
typed_routes = route_generator.add_typed_parameters(
|
||||
route_generator.generate_random_direct_route(max_route_depth=4),
|
||||
max_route_depth=8,
|
||||
)
|
||||
router, typed_routes = sanic_router(route_details=typed_routes)
|
||||
route_to_call = choice(typed_routes)
|
||||
url = route_generator.generate_url_for_template(
|
||||
template=route_to_call[-1]
|
||||
)
|
||||
|
||||
print("{} -> {}".format(route_to_call[-1], url))
|
||||
|
||||
result = benchmark.pedantic(
|
||||
router._get,
|
||||
("/{}".format(url), route_to_call[0], "localhost"),
|
||||
iterations=1000,
|
||||
rounds=1000,
|
||||
)
|
||||
assert await result[0](None) == 1
|
||||
131
tests/conftest.py
Normal file
131
tests/conftest.py
Normal file
@@ -0,0 +1,131 @@
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from sanic import Sanic
|
||||
from sanic.router import RouteExists, Router
|
||||
|
||||
|
||||
random.seed("Pack my box with five dozen liquor jugs.")
|
||||
|
||||
if sys.platform in ["win32", "cygwin"]:
|
||||
collect_ignore = ["test_worker.py"]
|
||||
|
||||
|
||||
async def _handler(request):
|
||||
"""
|
||||
Dummy placeholder method used for route resolver when creating a new
|
||||
route into the sanic router. This router is not actually called by the
|
||||
sanic app. So do not worry about the arguments to this method.
|
||||
|
||||
If you change the return value of this method, make sure to propagate the
|
||||
change to any test case that leverages RouteStringGenerator.
|
||||
"""
|
||||
return 1
|
||||
|
||||
|
||||
TYPE_TO_GENERATOR_MAP = {
|
||||
"string": lambda: "".join(
|
||||
[random.choice(string.ascii_letters + string.digits) for _ in range(4)]
|
||||
),
|
||||
"int": lambda: random.choice(range(1000000)),
|
||||
"number": lambda: random.random(),
|
||||
"alpha": lambda: "".join(
|
||||
[random.choice(string.ascii_letters) for _ in range(4)]
|
||||
),
|
||||
"uuid": lambda: str(uuid.uuid1()),
|
||||
}
|
||||
|
||||
|
||||
class RouteStringGenerator:
|
||||
|
||||
ROUTE_COUNT_PER_DEPTH = 100
|
||||
HTTP_METHODS = ["GET", "PUT", "POST", "PATCH", "DELETE", "OPTION"]
|
||||
ROUTE_PARAM_TYPES = ["string", "int", "number", "alpha", "uuid"]
|
||||
|
||||
def generate_random_direct_route(self, max_route_depth=4):
|
||||
routes = []
|
||||
for depth in range(1, max_route_depth + 1):
|
||||
for _ in range(self.ROUTE_COUNT_PER_DEPTH):
|
||||
route = "/".join(
|
||||
[
|
||||
TYPE_TO_GENERATOR_MAP.get("string")()
|
||||
for _ in range(depth)
|
||||
]
|
||||
)
|
||||
route = route.replace(".", "", -1)
|
||||
route_detail = (random.choice(self.HTTP_METHODS), route)
|
||||
|
||||
if route_detail not in routes:
|
||||
routes.append(route_detail)
|
||||
return routes
|
||||
|
||||
def add_typed_parameters(self, current_routes, max_route_depth=8):
|
||||
routes = []
|
||||
for method, route in current_routes:
|
||||
current_length = len(route.split("/"))
|
||||
new_route_part = "/".join(
|
||||
[
|
||||
"<{}:{}>".format(
|
||||
TYPE_TO_GENERATOR_MAP.get("string")(),
|
||||
random.choice(self.ROUTE_PARAM_TYPES),
|
||||
)
|
||||
for _ in range(max_route_depth - current_length)
|
||||
]
|
||||
)
|
||||
route = "/".join([route, new_route_part])
|
||||
route = route.replace(".", "", -1)
|
||||
routes.append((method, route))
|
||||
return routes
|
||||
|
||||
@staticmethod
|
||||
def generate_url_for_template(template):
|
||||
url = template
|
||||
for pattern, param_type in re.findall(
|
||||
re.compile(r"((?:<\w+:(string|int|number|alpha|uuid)>)+)"),
|
||||
template,
|
||||
):
|
||||
value = TYPE_TO_GENERATOR_MAP.get(param_type)()
|
||||
url = url.replace(pattern, str(value), -1)
|
||||
return url
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def sanic_router():
|
||||
# noinspection PyProtectedMember
|
||||
def _setup(route_details: tuple) -> (Router, tuple):
|
||||
router = Router()
|
||||
added_router = []
|
||||
for method, route in route_details:
|
||||
try:
|
||||
router._add(
|
||||
uri="/{}".format(route),
|
||||
methods=frozenset({method}),
|
||||
host="localhost",
|
||||
handler=_handler,
|
||||
)
|
||||
added_router.append((method, route))
|
||||
except RouteExists:
|
||||
pass
|
||||
return router, added_router
|
||||
|
||||
return _setup
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def route_generator() -> RouteStringGenerator:
|
||||
return RouteStringGenerator()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def url_param_generator():
|
||||
return TYPE_TO_GENERATOR_MAP
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(request):
|
||||
return Sanic(request.node.name)
|
||||
@@ -1,18 +1,26 @@
|
||||
# Run with python3 simple_server.py PORT
|
||||
|
||||
from aiohttp import web
|
||||
import asyncio
|
||||
import sys
|
||||
import uvloop
|
||||
|
||||
import ujson as json
|
||||
import uvloop
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
|
||||
loop = uvloop.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
|
||||
async def handle(request):
|
||||
return web.Response(body=json.dumps({"test":True}).encode('utf-8'), content_type='application/json')
|
||||
return web.Response(
|
||||
body=json.dumps({"test": True}).encode("utf-8"),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
app = web.Application(loop=loop)
|
||||
app.router.add_route('GET', '/', handle)
|
||||
app.router.add_route("GET", "/", handle)
|
||||
|
||||
web.run_app(app, port=sys.argv[1], access_log=None)
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
# Run with: gunicorn --workers=1 --worker-class=meinheld.gmeinheld.MeinheldWorker -b :8000 simple_server:app
|
||||
import bottle
|
||||
from bottle import route, run
|
||||
import ujson
|
||||
|
||||
from bottle import route, run
|
||||
|
||||
@route('/')
|
||||
|
||||
@route("/")
|
||||
def index():
|
||||
return ujson.dumps({'test': True})
|
||||
return ujson.dumps({"test": True})
|
||||
|
||||
|
||||
app = bottle.default_app()
|
||||
|
||||
@@ -3,9 +3,11 @@
|
||||
import falcon
|
||||
import ujson as json
|
||||
|
||||
|
||||
class TestResource:
|
||||
def on_get(self, req, resp):
|
||||
resp.body = json.dumps({"test": True})
|
||||
|
||||
|
||||
app = falcon.API()
|
||||
app.add_route('/', TestResource())
|
||||
app.add_route("/", TestResource())
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user