2017-05-15 03:11:25 +01:00
|
|
|
import sys
|
|
|
|
import json
|
2017-12-11 14:42:18 +00:00
|
|
|
import socket
|
2016-10-15 20:59:00 +01:00
|
|
|
from cgi import parse_header
|
|
|
|
from collections import namedtuple
|
2016-10-23 09:32:16 +01:00
|
|
|
from http.cookies import SimpleCookie
|
2016-10-15 20:59:00 +01:00
|
|
|
from httptools import parse_url
|
2017-03-03 16:44:50 +00:00
|
|
|
from urllib.parse import parse_qs, urlunparse
|
2017-02-16 02:54:00 +00:00
|
|
|
|
2017-02-16 20:03:52 +00:00
|
|
|
try:
|
|
|
|
from ujson import loads as json_loads
|
|
|
|
except ImportError:
|
2017-05-15 03:11:25 +01:00
|
|
|
if sys.version_info[:2] == (3, 5):
|
|
|
|
def json_loads(data):
|
|
|
|
# on Python 3.5 json.loads only supports str not bytes
|
|
|
|
return json.loads(data.decode())
|
|
|
|
else:
|
|
|
|
json_loads = json.loads
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2017-02-16 02:54:00 +00:00
|
|
|
from sanic.exceptions import InvalidUsage
|
2018-02-02 08:43:42 +00:00
|
|
|
from sanic.log import error_logger, logger
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2016-11-07 21:27:50 +00:00
|
|
|
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
|
2017-09-15 13:56:44 +01:00
|
|
|
|
|
|
|
|
2016-11-07 21:27:50 +00:00
|
|
|
# HTTP/1.1: https://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.2.1
|
|
|
|
# > If the media type remains unknown, the recipient SHOULD treat it
|
|
|
|
# > as type "application/octet-stream"
|
|
|
|
|
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
class RequestParameters(dict):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Hosts a dict with lists as values where get returns the first
|
2016-10-15 20:59:00 +01:00
|
|
|
value of the list and getlist returns the whole shebang
|
|
|
|
"""
|
|
|
|
|
|
|
|
def get(self, name, default=None):
|
2017-01-20 20:31:24 +00:00
|
|
|
"""Return the first value, either the default or actual"""
|
|
|
|
return super().get(name, [default])[0]
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
def getlist(self, name, default=None):
|
2017-01-20 20:31:24 +00:00
|
|
|
"""Return the entire list"""
|
|
|
|
return super().get(name, default)
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
|
2016-11-20 01:48:28 +00:00
|
|
|
class Request(dict):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Properties of an HTTP request such as URL, headers, etc."""
|
2016-10-15 20:59:00 +01:00
|
|
|
__slots__ = (
|
2017-03-03 16:44:50 +00:00
|
|
|
'app', 'headers', 'version', 'method', '_cookies', 'transport',
|
|
|
|
'body', 'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
|
2017-10-24 05:01:44 +01:00
|
|
|
'_ip', '_parsed_url', 'uri_template', 'stream', '_remote_addr',
|
2018-07-16 20:13:27 +01:00
|
|
|
'_socket', '_port', '__weakref__', 'raw_url'
|
2016-10-15 20:59:00 +01:00
|
|
|
)
|
|
|
|
|
2017-01-16 23:27:50 +00:00
|
|
|
def __init__(self, url_bytes, headers, version, method, transport):
|
2018-07-16 20:13:27 +01:00
|
|
|
self.raw_url = url_bytes
|
2016-10-15 20:59:00 +01:00
|
|
|
# TODO: Content-Encoding detection
|
2017-03-03 16:44:50 +00:00
|
|
|
self._parsed_url = parse_url(url_bytes)
|
2017-02-22 17:34:14 +00:00
|
|
|
self.app = None
|
2017-03-03 16:44:50 +00:00
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
self.headers = headers
|
|
|
|
self.version = version
|
|
|
|
self.method = method
|
2017-01-16 23:27:50 +00:00
|
|
|
self.transport = transport
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
# Init but do not inhale
|
2017-01-26 05:56:49 +00:00
|
|
|
self.body = []
|
2016-10-15 20:59:00 +01:00
|
|
|
self.parsed_json = None
|
|
|
|
self.parsed_form = None
|
|
|
|
self.parsed_files = None
|
|
|
|
self.parsed_args = None
|
2017-04-28 20:06:59 +01:00
|
|
|
self.uri_template = None
|
2016-10-23 09:32:16 +01:00
|
|
|
self._cookies = None
|
2017-05-05 12:09:32 +01:00
|
|
|
self.stream = None
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2017-09-15 11:34:56 +01:00
|
|
|
def __repr__(self):
|
2017-09-15 13:56:44 +01:00
|
|
|
if self.method is None or not self.path:
|
2017-09-15 14:15:05 +01:00
|
|
|
return '<{0}>'.format(self.__class__.__name__)
|
|
|
|
return '<{0}: {1} {2}>'.format(self.__class__.__name__,
|
|
|
|
self.method,
|
|
|
|
self.path)
|
2017-09-15 11:34:56 +01:00
|
|
|
|
2018-05-16 22:12:12 +01:00
|
|
|
def __bool__(self):
|
|
|
|
if self.transport:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
@property
|
|
|
|
def json(self):
|
2017-01-05 21:29:57 +00:00
|
|
|
if self.parsed_json is None:
|
2017-09-22 10:19:15 +01:00
|
|
|
self.load_json()
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
return self.parsed_json
|
|
|
|
|
2017-09-19 18:12:53 +01:00
|
|
|
def load_json(self, loads=json_loads):
|
|
|
|
try:
|
|
|
|
self.parsed_json = loads(self.body)
|
|
|
|
except Exception:
|
|
|
|
if not self.body:
|
|
|
|
return None
|
|
|
|
raise InvalidUsage("Failed when parsing body as json")
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
return self.parsed_json
|
|
|
|
|
2016-12-26 11:41:41 +00:00
|
|
|
@property
|
|
|
|
def token(self):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Attempt to return the auth header token.
|
|
|
|
|
2016-12-26 11:41:41 +00:00
|
|
|
:return: token related to request
|
|
|
|
"""
|
2017-06-29 09:23:49 +01:00
|
|
|
prefixes = ('Bearer', 'Token')
|
2016-12-26 11:41:41 +00:00
|
|
|
auth_header = self.headers.get('Authorization')
|
2017-06-22 17:11:23 +01:00
|
|
|
|
|
|
|
if auth_header is not None:
|
|
|
|
for prefix in prefixes:
|
|
|
|
if prefix in auth_header:
|
2017-06-29 09:23:49 +01:00
|
|
|
return auth_header.partition(prefix)[-1].strip()
|
2017-06-22 17:11:23 +01:00
|
|
|
|
|
|
|
return auth_header
|
2016-12-26 11:41:41 +00:00
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
@property
|
|
|
|
def form(self):
|
|
|
|
if self.parsed_form is None:
|
2016-11-07 21:27:50 +00:00
|
|
|
self.parsed_form = RequestParameters()
|
|
|
|
self.parsed_files = RequestParameters()
|
|
|
|
content_type = self.headers.get(
|
|
|
|
'Content-Type', DEFAULT_HTTP_CONTENT_TYPE)
|
|
|
|
content_type, parameters = parse_header(content_type)
|
2016-10-15 20:59:00 +01:00
|
|
|
try:
|
2016-11-07 21:27:50 +00:00
|
|
|
if content_type == 'application/x-www-form-urlencoded':
|
2016-10-16 14:01:59 +01:00
|
|
|
self.parsed_form = RequestParameters(
|
|
|
|
parse_qs(self.body.decode('utf-8')))
|
2016-10-15 20:59:00 +01:00
|
|
|
elif content_type == 'multipart/form-data':
|
|
|
|
# TODO: Stream this instead of reading to/from memory
|
|
|
|
boundary = parameters['boundary'].encode('utf-8')
|
2016-10-16 14:01:59 +01:00
|
|
|
self.parsed_form, self.parsed_files = (
|
|
|
|
parse_multipart_form(self.body, boundary))
|
2016-11-19 07:16:20 +00:00
|
|
|
except Exception:
|
2017-09-13 07:42:42 +01:00
|
|
|
error_logger.exception("Failed when parsing form")
|
2016-11-19 07:16:20 +00:00
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
return self.parsed_form
|
|
|
|
|
|
|
|
@property
|
|
|
|
def files(self):
|
|
|
|
if self.parsed_files is None:
|
2016-10-16 10:21:24 +01:00
|
|
|
self.form # compute form to get files
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
return self.parsed_files
|
|
|
|
|
|
|
|
@property
|
|
|
|
def args(self):
|
|
|
|
if self.parsed_args is None:
|
|
|
|
if self.query_string:
|
2016-10-16 14:01:59 +01:00
|
|
|
self.parsed_args = RequestParameters(
|
|
|
|
parse_qs(self.query_string))
|
2016-10-15 20:59:00 +01:00
|
|
|
else:
|
2017-02-16 21:54:20 +00:00
|
|
|
self.parsed_args = RequestParameters()
|
2016-10-15 20:59:00 +01:00
|
|
|
return self.parsed_args
|
|
|
|
|
2017-03-29 22:06:54 +01:00
|
|
|
@property
|
|
|
|
def raw_args(self):
|
|
|
|
return {k: v[0] for k, v in self.args.items()}
|
|
|
|
|
2016-10-23 09:32:16 +01:00
|
|
|
@property
|
|
|
|
def cookies(self):
|
|
|
|
if self._cookies is None:
|
2017-07-14 17:29:16 +01:00
|
|
|
cookie = self.headers.get('Cookie')
|
2016-11-27 13:30:46 +00:00
|
|
|
if cookie is not None:
|
2016-10-23 09:32:16 +01:00
|
|
|
cookies = SimpleCookie()
|
2016-11-27 13:30:46 +00:00
|
|
|
cookies.load(cookie)
|
2016-10-23 09:32:16 +01:00
|
|
|
self._cookies = {name: cookie.value
|
|
|
|
for name, cookie in cookies.items()}
|
|
|
|
else:
|
|
|
|
self._cookies = {}
|
|
|
|
return self._cookies
|
|
|
|
|
2017-01-16 23:27:50 +00:00
|
|
|
@property
|
|
|
|
def ip(self):
|
2017-10-24 05:01:44 +01:00
|
|
|
if not hasattr(self, '_socket'):
|
|
|
|
self._get_address()
|
2017-01-16 23:51:56 +00:00
|
|
|
return self._ip
|
2017-01-16 23:27:50 +00:00
|
|
|
|
2017-10-24 05:01:44 +01:00
|
|
|
@property
|
|
|
|
def port(self):
|
|
|
|
if not hasattr(self, '_socket'):
|
|
|
|
self._get_address()
|
|
|
|
return self._port
|
|
|
|
|
|
|
|
@property
|
|
|
|
def socket(self):
|
|
|
|
if not hasattr(self, '_socket'):
|
2018-01-13 16:56:29 +00:00
|
|
|
self._get_address()
|
2017-10-24 05:01:44 +01:00
|
|
|
return self._socket
|
|
|
|
|
|
|
|
def _get_address(self):
|
2017-12-11 14:42:18 +00:00
|
|
|
sock = self.transport.get_extra_info('socket')
|
|
|
|
|
|
|
|
if sock.family == socket.AF_INET:
|
|
|
|
self._socket = (self.transport.get_extra_info('peername') or
|
|
|
|
(None, None))
|
|
|
|
self._ip, self._port = self._socket
|
|
|
|
elif sock.family == socket.AF_INET6:
|
|
|
|
self._socket = (self.transport.get_extra_info('peername') or
|
|
|
|
(None, None, None, None))
|
|
|
|
self._ip, self._port, *_ = self._socket
|
|
|
|
else:
|
|
|
|
self._ip, self._port = (None, None)
|
2017-10-24 05:01:44 +01:00
|
|
|
|
2017-07-14 17:29:16 +01:00
|
|
|
@property
|
|
|
|
def remote_addr(self):
|
|
|
|
"""Attempt to return the original client ip based on X-Forwarded-For.
|
|
|
|
|
|
|
|
:return: original client ip.
|
|
|
|
"""
|
|
|
|
if not hasattr(self, '_remote_addr'):
|
|
|
|
forwarded_for = self.headers.get('X-Forwarded-For', '').split(',')
|
|
|
|
remote_addrs = [
|
2017-07-15 01:23:18 +01:00
|
|
|
addr for addr in [
|
|
|
|
addr.strip() for addr in forwarded_for
|
2017-09-15 13:56:44 +01:00
|
|
|
] if addr
|
|
|
|
]
|
2017-07-14 17:29:16 +01:00
|
|
|
if len(remote_addrs) > 0:
|
|
|
|
self._remote_addr = remote_addrs[0]
|
|
|
|
else:
|
|
|
|
self._remote_addr = ''
|
|
|
|
return self._remote_addr
|
|
|
|
|
2017-03-03 16:44:50 +00:00
|
|
|
@property
|
|
|
|
def scheme(self):
|
2017-03-13 05:28:35 +00:00
|
|
|
if self.app.websocket_enabled \
|
|
|
|
and self.headers.get('upgrade') == 'websocket':
|
2017-03-13 10:34:43 +00:00
|
|
|
scheme = 'ws'
|
|
|
|
else:
|
|
|
|
scheme = 'http'
|
2017-03-13 05:28:35 +00:00
|
|
|
|
2017-03-03 16:44:50 +00:00
|
|
|
if self.transport.get_extra_info('sslcontext'):
|
2017-03-13 10:34:43 +00:00
|
|
|
scheme += 's'
|
2017-03-03 16:44:50 +00:00
|
|
|
|
2017-03-13 10:34:43 +00:00
|
|
|
return scheme
|
2017-03-03 16:44:50 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def host(self):
|
|
|
|
# it appears that httptools doesn't return the host
|
|
|
|
# so pull it from the headers
|
2017-03-03 19:51:13 +00:00
|
|
|
return self.headers.get('Host', '')
|
2017-03-03 16:44:50 +00:00
|
|
|
|
2017-06-08 04:46:48 +01:00
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return self.headers.get('Content-Type', DEFAULT_HTTP_CONTENT_TYPE)
|
|
|
|
|
2017-06-17 17:47:58 +01:00
|
|
|
@property
|
|
|
|
def match_info(self):
|
|
|
|
"""return matched info after resolving route"""
|
|
|
|
return self.app.router.get(self)[2]
|
|
|
|
|
2017-03-03 16:44:50 +00:00
|
|
|
@property
|
|
|
|
def path(self):
|
|
|
|
return self._parsed_url.path.decode('utf-8')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def query_string(self):
|
|
|
|
if self._parsed_url.query:
|
|
|
|
return self._parsed_url.query.decode('utf-8')
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
@property
|
|
|
|
def url(self):
|
|
|
|
return urlunparse((
|
|
|
|
self.scheme,
|
|
|
|
self.host,
|
|
|
|
self.path,
|
|
|
|
None,
|
|
|
|
self.query_string,
|
|
|
|
None))
|
|
|
|
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
File = namedtuple('File', ['type', 'body', 'name'])
|
|
|
|
|
|
|
|
|
|
|
|
def parse_multipart_form(body, boundary):
|
2017-02-14 19:10:19 +00:00
|
|
|
"""Parse a request body and returns fields and files
|
2016-12-25 09:43:45 +00:00
|
|
|
|
2017-02-14 19:10:19 +00:00
|
|
|
:param body: bytes request body
|
|
|
|
:param boundary: bytes multipart boundary
|
2016-11-09 13:04:15 +00:00
|
|
|
:return: fields (RequestParameters), files (RequestParameters)
|
2016-10-15 20:59:00 +01:00
|
|
|
"""
|
2016-11-08 19:36:37 +00:00
|
|
|
files = RequestParameters()
|
|
|
|
fields = RequestParameters()
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
form_parts = body.split(boundary)
|
|
|
|
for form_part in form_parts[1:-1]:
|
|
|
|
file_name = None
|
2018-02-01 10:55:30 +00:00
|
|
|
content_type = 'text/plain'
|
|
|
|
content_charset = 'utf-8'
|
2016-10-15 20:59:00 +01:00
|
|
|
field_name = None
|
|
|
|
line_index = 2
|
|
|
|
line_end_index = 0
|
|
|
|
while not line_end_index == -1:
|
|
|
|
line_end_index = form_part.find(b'\r\n', line_index)
|
|
|
|
form_line = form_part[line_index:line_end_index].decode('utf-8')
|
|
|
|
line_index = line_end_index + 2
|
|
|
|
|
|
|
|
if not form_line:
|
|
|
|
break
|
|
|
|
|
|
|
|
colon_index = form_line.index(':')
|
2017-07-10 20:37:21 +01:00
|
|
|
form_header_field = form_line[0:colon_index].lower()
|
2016-10-16 14:01:59 +01:00
|
|
|
form_header_value, form_parameters = parse_header(
|
|
|
|
form_line[colon_index + 2:])
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2017-07-10 20:37:21 +01:00
|
|
|
if form_header_field == 'content-disposition':
|
2018-02-01 23:55:51 +00:00
|
|
|
file_name = form_parameters.get('filename')
|
|
|
|
field_name = form_parameters.get('name')
|
2017-07-10 20:37:21 +01:00
|
|
|
elif form_header_field == 'content-type':
|
2018-02-01 10:30:24 +00:00
|
|
|
content_type = form_header_value
|
2018-02-01 23:55:51 +00:00
|
|
|
content_charset = form_parameters.get('charset', 'utf-8')
|
2016-10-15 20:59:00 +01:00
|
|
|
|
2018-02-02 08:43:42 +00:00
|
|
|
if field_name:
|
|
|
|
post_data = form_part[line_index:-4]
|
|
|
|
if file_name:
|
2018-02-07 08:29:44 +00:00
|
|
|
form_file = File(type=content_type,
|
|
|
|
name=file_name,
|
|
|
|
body=post_data)
|
2018-02-02 08:43:42 +00:00
|
|
|
if field_name in files:
|
2018-02-03 02:07:07 +00:00
|
|
|
files[field_name].append(form_file)
|
2018-02-02 08:43:42 +00:00
|
|
|
else:
|
2018-02-03 02:07:07 +00:00
|
|
|
files[field_name] = [form_file]
|
2016-11-08 19:36:37 +00:00
|
|
|
else:
|
2018-02-02 08:43:42 +00:00
|
|
|
value = post_data.decode(content_charset)
|
|
|
|
if field_name in fields:
|
|
|
|
fields[field_name].append(value)
|
|
|
|
else:
|
|
|
|
fields[field_name] = [value]
|
2016-10-15 20:59:00 +01:00
|
|
|
else:
|
2018-02-02 08:57:06 +00:00
|
|
|
logger.debug('Form-data field does not have a \'name\' parameter \
|
|
|
|
in the Content-Disposition header')
|
2016-10-15 20:59:00 +01:00
|
|
|
|
|
|
|
return fields, files
|