2018-11-07 16:02:34 +00:00
|
|
|
from functools import partial
|
2016-10-24 09:21:06 +01:00
|
|
|
from mimetypes import guess_type
|
|
|
|
from os import path
|
2018-07-13 05:31:33 +01:00
|
|
|
from urllib.parse import quote_plus
|
2017-03-28 10:50:09 +01:00
|
|
|
|
2019-09-22 21:55:36 +01:00
|
|
|
from aiofiles import open as open_async # type: ignore
|
2017-01-31 01:04:51 +00:00
|
|
|
|
2019-07-19 04:11:25 +01:00
|
|
|
from sanic.compat import Header
|
2017-02-16 02:54:00 +00:00
|
|
|
from sanic.cookies import CookieJar
|
2018-10-18 05:20:16 +01:00
|
|
|
from sanic.helpers import STATUS_CODES, has_message_body, remove_entity_headers
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
from ujson import dumps as json_dumps
|
2019-09-08 22:08:34 +01:00
|
|
|
except ImportError:
|
2018-11-07 16:02:34 +00:00
|
|
|
from json import dumps
|
|
|
|
|
|
|
|
# This is done in order to ensure that the JSON response is
|
|
|
|
# kept consistent across both ujson and inbuilt json usage.
|
|
|
|
json_dumps = partial(dumps, separators=(",", ":"))
|
2016-12-30 18:13:16 +00:00
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2017-02-21 16:05:06 +00:00
|
|
|
class BaseHTTPResponse:
|
|
|
|
def _encode_body(self, data):
|
|
|
|
try:
|
|
|
|
# Try to encode it regularly
|
|
|
|
return data.encode()
|
|
|
|
except AttributeError:
|
|
|
|
# Convert it to a str if you can't
|
|
|
|
return str(data).encode()
|
|
|
|
|
|
|
|
def _parse_headers(self):
|
2018-10-14 01:55:33 +01:00
|
|
|
headers = b""
|
2017-02-21 16:05:06 +00:00
|
|
|
for name, value in self.headers.items():
|
|
|
|
try:
|
2018-10-14 01:55:33 +01:00
|
|
|
headers += b"%b: %b\r\n" % (
|
|
|
|
name.encode(),
|
|
|
|
value.encode("utf-8"),
|
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
except AttributeError:
|
2018-10-14 01:55:33 +01:00
|
|
|
headers += b"%b: %b\r\n" % (
|
|
|
|
str(name).encode(),
|
|
|
|
str(value).encode("utf-8"),
|
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
return headers
|
|
|
|
|
|
|
|
@property
|
|
|
|
def cookies(self):
|
|
|
|
if self._cookies is None:
|
|
|
|
self._cookies = CookieJar(self.headers)
|
|
|
|
return self._cookies
|
|
|
|
|
|
|
|
|
|
|
|
class StreamingHTTPResponse(BaseHTTPResponse):
|
|
|
|
__slots__ = (
|
2018-10-14 01:55:33 +01:00
|
|
|
"protocol",
|
|
|
|
"streaming_fn",
|
|
|
|
"status",
|
|
|
|
"content_type",
|
|
|
|
"headers",
|
2019-04-20 20:26:30 +01:00
|
|
|
"chunked",
|
2018-10-14 01:55:33 +01:00
|
|
|
"_cookies",
|
2017-07-26 17:32:23 +01:00
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def __init__(
|
2019-04-20 20:26:30 +01:00
|
|
|
self,
|
|
|
|
streaming_fn,
|
|
|
|
status=200,
|
|
|
|
headers=None,
|
|
|
|
content_type="text/plain",
|
2019-04-22 08:52:38 +01:00
|
|
|
chunked=True,
|
2018-10-14 01:55:33 +01:00
|
|
|
):
|
2017-02-21 16:05:06 +00:00
|
|
|
self.content_type = content_type
|
|
|
|
self.streaming_fn = streaming_fn
|
|
|
|
self.status = status
|
2019-07-19 04:11:25 +01:00
|
|
|
self.headers = Header(headers or {})
|
2019-04-20 20:26:30 +01:00
|
|
|
self.chunked = chunked
|
2017-02-21 16:05:06 +00:00
|
|
|
self._cookies = None
|
|
|
|
|
2018-08-19 02:12:13 +01:00
|
|
|
async def write(self, data):
|
2017-02-21 16:05:06 +00:00
|
|
|
"""Writes a chunk of data to the streaming response.
|
|
|
|
|
|
|
|
:param data: bytes-ish data to be written.
|
|
|
|
"""
|
2017-02-22 15:42:16 +00:00
|
|
|
if type(data) != bytes:
|
|
|
|
data = self._encode_body(data)
|
|
|
|
|
2019-04-20 20:26:30 +01:00
|
|
|
if self.chunked:
|
2019-05-27 00:11:52 +01:00
|
|
|
await self.protocol.push_data(b"%x\r\n%b\r\n" % (len(data), data))
|
2019-04-20 20:26:30 +01:00
|
|
|
else:
|
2019-05-27 00:11:52 +01:00
|
|
|
await self.protocol.push_data(data)
|
2018-08-19 02:12:13 +01:00
|
|
|
await self.protocol.drain()
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
async def stream(
|
2018-10-14 01:55:33 +01:00
|
|
|
self, version="1.1", keep_alive=False, keep_alive_timeout=None
|
|
|
|
):
|
2017-03-29 04:44:01 +01:00
|
|
|
"""Streams headers, runs the `streaming_fn` callback that writes
|
|
|
|
content to the response body, then finalizes the response body.
|
2017-02-21 16:05:06 +00:00
|
|
|
"""
|
2019-04-22 08:52:38 +01:00
|
|
|
if version != "1.1":
|
|
|
|
self.chunked = False
|
2017-02-21 16:05:06 +00:00
|
|
|
headers = self.get_headers(
|
2018-10-14 01:55:33 +01:00
|
|
|
version,
|
|
|
|
keep_alive=keep_alive,
|
|
|
|
keep_alive_timeout=keep_alive_timeout,
|
|
|
|
)
|
2019-05-27 00:11:52 +01:00
|
|
|
await self.protocol.push_data(headers)
|
2018-08-19 02:12:13 +01:00
|
|
|
await self.protocol.drain()
|
2017-02-21 16:05:06 +00:00
|
|
|
await self.streaming_fn(self)
|
2019-04-20 20:26:30 +01:00
|
|
|
if self.chunked:
|
2019-05-27 00:11:52 +01:00
|
|
|
await self.protocol.push_data(b"0\r\n\r\n")
|
2018-08-19 02:12:13 +01:00
|
|
|
# no need to await drain here after this write, because it is the
|
|
|
|
# very last thing we write and nothing needs to wait for it.
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
def get_headers(
|
2018-10-14 01:55:33 +01:00
|
|
|
self, version="1.1", keep_alive=False, keep_alive_timeout=None
|
|
|
|
):
|
2017-02-21 16:05:06 +00:00
|
|
|
# This is all returned in a kind-of funky way
|
|
|
|
# We tried to make this as fast as possible in pure python
|
2018-10-14 01:55:33 +01:00
|
|
|
timeout_header = b""
|
2017-02-21 16:05:06 +00:00
|
|
|
if keep_alive and keep_alive_timeout is not None:
|
2018-10-14 01:55:33 +01:00
|
|
|
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
2017-02-21 16:05:06 +00:00
|
|
|
|
2019-04-22 08:52:38 +01:00
|
|
|
if self.chunked and version == "1.1":
|
2019-04-20 20:26:30 +01:00
|
|
|
self.headers["Transfer-Encoding"] = "chunked"
|
|
|
|
self.headers.pop("Content-Length", None)
|
2018-10-14 01:55:33 +01:00
|
|
|
self.headers["Content-Type"] = self.headers.get(
|
|
|
|
"Content-Type", self.content_type
|
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
headers = self._parse_headers()
|
|
|
|
|
2019-02-05 13:47:46 +00:00
|
|
|
if self.status == 200:
|
2018-10-14 01:55:33 +01:00
|
|
|
status = b"OK"
|
2017-10-20 00:43:07 +01:00
|
|
|
else:
|
2018-09-25 18:46:40 +01:00
|
|
|
status = STATUS_CODES.get(self.status)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
return (b"HTTP/%b %d %b\r\n" b"%b" b"%b\r\n") % (
|
|
|
|
version.encode(),
|
|
|
|
self.status,
|
|
|
|
status,
|
|
|
|
timeout_header,
|
|
|
|
headers,
|
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
class HTTPResponse(BaseHTTPResponse):
|
2018-10-14 01:55:33 +01:00
|
|
|
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
body=None,
|
|
|
|
status=200,
|
|
|
|
headers=None,
|
|
|
|
content_type="text/plain",
|
|
|
|
body_bytes=b"",
|
|
|
|
):
|
2016-10-15 20:59:00 +01:00
|
|
|
self.content_type = content_type
|
|
|
|
|
2016-10-16 10:21:24 +01:00
|
|
|
if body is not None:
|
2017-02-21 16:05:06 +00:00
|
|
|
self.body = self._encode_body(body)
|
2016-10-15 20:59:00 +01:00
|
|
|
else:
|
|
|
|
self.body = body_bytes
|
|
|
|
|
|
|
|
self.status = status
|
2019-07-19 04:11:25 +01:00
|
|
|
self.headers = Header(headers or {})
|
2016-10-23 09:32:16 +01:00
|
|
|
self._cookies = None
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
|
2016-10-15 20:59:00 +01:00
|
|
|
# This is all returned in a kind-of funky way
|
|
|
|
# We tried to make this as fast as possible in pure python
|
2018-10-14 01:55:33 +01:00
|
|
|
timeout_header = b""
|
2017-02-12 18:05:14 +00:00
|
|
|
if keep_alive and keep_alive_timeout is not None:
|
2018-10-14 01:55:33 +01:00
|
|
|
timeout_header = b"Keep-Alive: %d\r\n" % keep_alive_timeout
|
2018-02-01 16:51:51 +00:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
body = b""
|
2018-09-25 18:46:40 +01:00
|
|
|
if has_message_body(self.status):
|
2018-02-02 08:29:54 +00:00
|
|
|
body = self.body
|
2018-10-14 01:55:33 +01:00
|
|
|
self.headers["Content-Length"] = self.headers.get(
|
|
|
|
"Content-Length", len(self.body)
|
|
|
|
)
|
2018-02-01 16:51:51 +00:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
self.headers["Content-Type"] = self.headers.get(
|
|
|
|
"Content-Type", self.content_type
|
|
|
|
)
|
2018-06-27 06:25:25 +01:00
|
|
|
|
|
|
|
if self.status in (304, 412):
|
2018-09-25 18:46:40 +01:00
|
|
|
self.headers = remove_entity_headers(self.headers)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
headers = self._parse_headers()
|
2017-02-12 18:05:14 +00:00
|
|
|
|
2019-02-05 13:47:46 +00:00
|
|
|
if self.status == 200:
|
2018-10-14 01:55:33 +01:00
|
|
|
status = b"OK"
|
2017-10-20 00:43:07 +01:00
|
|
|
else:
|
2018-10-14 01:55:33 +01:00
|
|
|
status = STATUS_CODES.get(self.status, b"UNKNOWN RESPONSE")
|
|
|
|
|
|
|
|
return (
|
|
|
|
b"HTTP/%b %d %b\r\n" b"Connection: %b\r\n" b"%b" b"%b\r\n" b"%b"
|
|
|
|
) % (
|
|
|
|
version.encode(),
|
|
|
|
self.status,
|
|
|
|
status,
|
|
|
|
b"keep-alive" if keep_alive else b"close",
|
|
|
|
timeout_header,
|
|
|
|
headers,
|
|
|
|
body,
|
|
|
|
)
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2016-10-23 09:32:16 +01:00
|
|
|
@property
|
|
|
|
def cookies(self):
|
|
|
|
if self._cookies is None:
|
2016-10-25 09:27:54 +01:00
|
|
|
self._cookies = CookieJar(self.headers)
|
2016-10-23 09:32:16 +01:00
|
|
|
return self._cookies
|
|
|
|
|
2016-10-16 10:21:24 +01:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def json(
|
|
|
|
body,
|
|
|
|
status=200,
|
|
|
|
headers=None,
|
|
|
|
content_type="application/json",
|
|
|
|
dumps=json_dumps,
|
|
|
|
**kwargs
|
|
|
|
):
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2016-12-25 06:24:17 +00:00
|
|
|
Returns response object with body in json format.
|
2017-07-02 07:46:34 +01:00
|
|
|
|
2016-12-25 06:05:26 +00:00
|
|
|
:param body: Response data to be serialized.
|
|
|
|
:param status: Response code.
|
|
|
|
:param headers: Custom Headers.
|
2017-01-31 01:04:51 +00:00
|
|
|
:param kwargs: Remaining arguments that are passed to the json encoder.
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2018-10-14 01:55:33 +01:00
|
|
|
return HTTPResponse(
|
|
|
|
dumps(body, **kwargs),
|
|
|
|
headers=headers,
|
|
|
|
status=status,
|
|
|
|
content_type=content_type,
|
|
|
|
)
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def text(
|
|
|
|
body, status=200, headers=None, content_type="text/plain; charset=utf-8"
|
|
|
|
):
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2016-12-25 06:24:17 +00:00
|
|
|
Returns response object with body in text format.
|
2017-07-02 07:46:34 +01:00
|
|
|
|
2016-12-25 06:05:26 +00:00
|
|
|
:param body: Response data to be encoded.
|
|
|
|
:param status: Response code.
|
|
|
|
:param headers: Custom Headers.
|
2017-03-16 05:52:18 +00:00
|
|
|
:param content_type: the content type (string) of the response
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2017-02-21 16:05:06 +00:00
|
|
|
return HTTPResponse(
|
2018-10-14 01:55:33 +01:00
|
|
|
body, status=status, headers=headers, content_type=content_type
|
|
|
|
)
|
2017-02-14 17:27:39 +00:00
|
|
|
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def raw(
|
|
|
|
body, status=200, headers=None, content_type="application/octet-stream"
|
|
|
|
):
|
2017-02-14 17:27:39 +00:00
|
|
|
"""
|
2017-02-14 17:40:33 +00:00
|
|
|
Returns response object without encoding the body.
|
2017-07-02 07:46:34 +01:00
|
|
|
|
2017-02-14 17:40:33 +00:00
|
|
|
:param body: Response data.
|
2017-02-14 17:27:39 +00:00
|
|
|
:param status: Response code.
|
|
|
|
:param headers: Custom Headers.
|
2017-03-16 05:52:18 +00:00
|
|
|
:param content_type: the content type (string) of the response.
|
2017-02-14 17:27:39 +00:00
|
|
|
"""
|
2018-10-14 01:55:33 +01:00
|
|
|
return HTTPResponse(
|
|
|
|
body_bytes=body,
|
|
|
|
status=status,
|
|
|
|
headers=headers,
|
|
|
|
content_type=content_type,
|
|
|
|
)
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
def html(body, status=200, headers=None):
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2016-12-25 06:24:17 +00:00
|
|
|
Returns response object with body in html format.
|
2017-07-02 07:46:34 +01:00
|
|
|
|
2016-12-25 06:05:26 +00:00
|
|
|
:param body: Response data to be encoded.
|
|
|
|
:param status: Response code.
|
|
|
|
:param headers: Custom Headers.
|
|
|
|
"""
|
2018-10-14 01:55:33 +01:00
|
|
|
return HTTPResponse(
|
|
|
|
body,
|
|
|
|
status=status,
|
|
|
|
headers=headers,
|
|
|
|
content_type="text/html; charset=utf-8",
|
|
|
|
)
|
2016-10-24 09:21:06 +01:00
|
|
|
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
async def file(
|
|
|
|
location,
|
|
|
|
status=200,
|
|
|
|
mime_type=None,
|
|
|
|
headers=None,
|
|
|
|
filename=None,
|
|
|
|
_range=None,
|
|
|
|
):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Return a response object with file data.
|
|
|
|
|
2016-12-25 06:05:26 +00:00
|
|
|
:param location: Location of file on system.
|
|
|
|
:param mime_type: Specific mime_type.
|
|
|
|
:param headers: Custom Headers.
|
2017-10-09 15:45:22 +01:00
|
|
|
:param filename: Override filename.
|
2017-01-31 01:04:51 +00:00
|
|
|
:param _range:
|
2016-12-25 06:05:26 +00:00
|
|
|
"""
|
2017-10-09 15:45:22 +01:00
|
|
|
headers = headers or {}
|
|
|
|
if filename:
|
|
|
|
headers.setdefault(
|
2018-10-14 01:55:33 +01:00
|
|
|
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
|
|
|
)
|
2017-10-09 15:45:22 +01:00
|
|
|
filename = filename or path.split(location)[-1]
|
2016-10-24 09:21:06 +01:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
async with open_async(location, mode="rb") as _file:
|
2017-01-31 01:04:51 +00:00
|
|
|
if _range:
|
|
|
|
await _file.seek(_range.start)
|
|
|
|
out_stream = await _file.read(_range.size)
|
2018-10-14 01:55:33 +01:00
|
|
|
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
|
|
|
_range.start,
|
|
|
|
_range.end,
|
|
|
|
_range.total,
|
|
|
|
)
|
2018-11-07 13:36:56 +00:00
|
|
|
status = 206
|
2017-01-31 01:04:51 +00:00
|
|
|
else:
|
|
|
|
out_stream = await _file.read()
|
2016-10-24 09:21:06 +01:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
|
|
|
return HTTPResponse(
|
|
|
|
status=status,
|
|
|
|
headers=headers,
|
|
|
|
content_type=mime_type,
|
|
|
|
body_bytes=out_stream,
|
|
|
|
)
|
2017-01-14 05:41:54 +00:00
|
|
|
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
async def file_stream(
|
|
|
|
location,
|
|
|
|
status=200,
|
|
|
|
chunk_size=4096,
|
|
|
|
mime_type=None,
|
|
|
|
headers=None,
|
|
|
|
filename=None,
|
2019-04-22 08:52:38 +01:00
|
|
|
chunked=True,
|
2018-10-14 01:55:33 +01:00
|
|
|
_range=None,
|
|
|
|
):
|
2017-05-18 09:04:28 +01:00
|
|
|
"""Return a streaming response object with file data.
|
|
|
|
|
|
|
|
:param location: Location of file on system.
|
|
|
|
:param chunk_size: The size of each chunk in the stream (in bytes)
|
|
|
|
:param mime_type: Specific mime_type.
|
|
|
|
:param headers: Custom Headers.
|
2017-10-09 15:45:22 +01:00
|
|
|
:param filename: Override filename.
|
2019-04-22 08:52:38 +01:00
|
|
|
:param chunked: Enable or disable chunked transfer-encoding
|
2017-05-18 09:04:28 +01:00
|
|
|
:param _range:
|
|
|
|
"""
|
2017-10-09 15:45:22 +01:00
|
|
|
headers = headers or {}
|
|
|
|
if filename:
|
|
|
|
headers.setdefault(
|
2018-10-14 01:55:33 +01:00
|
|
|
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
|
|
|
)
|
2017-10-09 15:45:22 +01:00
|
|
|
filename = filename or path.split(location)[-1]
|
2017-05-18 09:04:28 +01:00
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
_file = await open_async(location, mode="rb")
|
2017-05-18 09:04:28 +01:00
|
|
|
|
|
|
|
async def _streaming_fn(response):
|
|
|
|
nonlocal _file, chunk_size
|
|
|
|
try:
|
|
|
|
if _range:
|
|
|
|
chunk_size = min((_range.size, chunk_size))
|
|
|
|
await _file.seek(_range.start)
|
|
|
|
to_send = _range.size
|
|
|
|
while to_send > 0:
|
|
|
|
content = await _file.read(chunk_size)
|
|
|
|
if len(content) < 1:
|
|
|
|
break
|
|
|
|
to_send -= len(content)
|
2018-08-19 02:12:13 +01:00
|
|
|
await response.write(content)
|
2017-05-18 09:04:28 +01:00
|
|
|
else:
|
|
|
|
while True:
|
|
|
|
content = await _file.read(chunk_size)
|
|
|
|
if len(content) < 1:
|
|
|
|
break
|
2018-08-19 02:12:13 +01:00
|
|
|
await response.write(content)
|
2017-05-18 09:04:28 +01:00
|
|
|
finally:
|
|
|
|
await _file.close()
|
|
|
|
return # Returning from this fn closes the stream
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
2017-05-18 09:04:28 +01:00
|
|
|
if _range:
|
2018-10-14 01:55:33 +01:00
|
|
|
headers["Content-Range"] = "bytes %s-%s/%s" % (
|
|
|
|
_range.start,
|
|
|
|
_range.end,
|
|
|
|
_range.total,
|
|
|
|
)
|
2018-11-07 13:36:56 +00:00
|
|
|
status = 206
|
2018-10-14 01:55:33 +01:00
|
|
|
return StreamingHTTPResponse(
|
|
|
|
streaming_fn=_streaming_fn,
|
|
|
|
status=status,
|
|
|
|
headers=headers,
|
|
|
|
content_type=mime_type,
|
2019-04-20 20:26:30 +01:00
|
|
|
chunked=chunked,
|
2018-10-14 01:55:33 +01:00
|
|
|
)
|
2017-05-18 09:04:28 +01:00
|
|
|
|
|
|
|
|
2017-02-21 16:05:06 +00:00
|
|
|
def stream(
|
2018-10-14 01:55:33 +01:00
|
|
|
streaming_fn,
|
|
|
|
status=200,
|
|
|
|
headers=None,
|
|
|
|
content_type="text/plain; charset=utf-8",
|
2019-04-22 08:52:38 +01:00
|
|
|
chunked=True,
|
2018-10-14 01:55:33 +01:00
|
|
|
):
|
2017-02-21 16:05:06 +00:00
|
|
|
"""Accepts an coroutine `streaming_fn` which can be used to
|
|
|
|
write chunks to a streaming response. Returns a `StreamingHTTPResponse`.
|
|
|
|
|
2017-03-16 05:52:18 +00:00
|
|
|
Example usage::
|
|
|
|
|
|
|
|
@app.route("/")
|
|
|
|
async def index(request):
|
|
|
|
async def streaming_fn(response):
|
|
|
|
await response.write('foo')
|
|
|
|
await response.write('bar')
|
2017-02-21 16:05:06 +00:00
|
|
|
|
2017-03-16 05:52:18 +00:00
|
|
|
return stream(streaming_fn, content_type='text/plain')
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
:param streaming_fn: A coroutine accepts a response and
|
|
|
|
writes content to that response.
|
|
|
|
:param mime_type: Specific mime_type.
|
|
|
|
:param headers: Custom Headers.
|
2019-04-22 08:52:38 +01:00
|
|
|
:param chunked: Enable or disable chunked transfer-encoding
|
2017-02-21 16:05:06 +00:00
|
|
|
"""
|
|
|
|
return StreamingHTTPResponse(
|
2019-04-20 20:26:30 +01:00
|
|
|
streaming_fn,
|
|
|
|
headers=headers,
|
|
|
|
content_type=content_type,
|
|
|
|
status=status,
|
|
|
|
chunked=chunked,
|
2017-03-29 04:47:52 +01:00
|
|
|
)
|
2017-02-21 16:05:06 +00:00
|
|
|
|
|
|
|
|
2018-10-14 01:55:33 +01:00
|
|
|
def redirect(
|
|
|
|
to, headers=None, status=302, content_type="text/html; charset=utf-8"
|
|
|
|
):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Abort execution and cause a 302 redirect (by default).
|
2017-01-14 05:41:54 +00:00
|
|
|
|
|
|
|
:param to: path or fully qualified URL to redirect to
|
|
|
|
:param headers: optional dict of headers to include in the new request
|
|
|
|
:param status: status code (int) of the new request, defaults to 302
|
2017-02-14 19:10:19 +00:00
|
|
|
:param content_type: the content type (string) of the response
|
2017-01-14 05:41:54 +00:00
|
|
|
:returns: the redirecting Response
|
|
|
|
"""
|
|
|
|
headers = headers or {}
|
|
|
|
|
2018-07-13 05:31:33 +01:00
|
|
|
# URL Quote the URL before redirecting
|
2018-10-16 05:53:11 +01:00
|
|
|
safe_to = quote_plus(to, safe=":/%#?&=@[]!$&'()*+,;")
|
2018-07-13 05:31:33 +01:00
|
|
|
|
2017-01-14 05:41:54 +00:00
|
|
|
# According to RFC 7231, a relative URI is now permitted.
|
2018-10-14 01:55:33 +01:00
|
|
|
headers["Location"] = safe_to
|
2017-01-14 05:41:54 +00:00
|
|
|
|
|
|
|
return HTTPResponse(
|
2018-10-14 01:55:33 +01:00
|
|
|
status=status, headers=headers, content_type=content_type
|
|
|
|
)
|