Merge pull request #512 from subyraman/fix-url-building

Fix `request.url` and other url properties
This commit is contained in:
Raphael Deem
2017-03-10 00:38:16 -08:00
committed by GitHub
10 changed files with 168 additions and 18 deletions

View File

@@ -70,7 +70,7 @@ TRACEBACK_WRAPPER_HTML = '''
{frame_html}
<p class="summary">
<b>{exc_name}: {exc_value}</b>
while handling uri <code>{uri}</code>
while handling path <code>{path}</code>
</p>
</div>
</body>

View File

@@ -34,7 +34,7 @@ class ErrorHandler:
exc_name=exc_type.__name__,
exc_value=exc_value,
frame_html=''.join(frame_html),
uri=request.url)
path=request.path)
def add(self, exception, handler):
self.handlers[exception] = handler

View File

@@ -2,7 +2,7 @@ from cgi import parse_header
from collections import namedtuple
from http.cookies import SimpleCookie
from httptools import parse_url
from urllib.parse import parse_qs
from urllib.parse import parse_qs, urlunparse
try:
from ujson import loads as json_loads
@@ -36,24 +36,20 @@ class RequestParameters(dict):
class Request(dict):
"""Properties of an HTTP request such as URL, headers, etc."""
__slots__ = (
'app', 'url', 'headers', 'version', 'method', '_cookies', 'transport',
'query_string', 'body',
'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
'_ip',
'app', 'headers', 'version', 'method', '_cookies', 'transport',
'body', 'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
'_ip', '_parsed_url',
)
def __init__(self, url_bytes, headers, version, method, transport):
# TODO: Content-Encoding detection
url_parsed = parse_url(url_bytes)
self._parsed_url = parse_url(url_bytes)
self.app = None
self.url = url_parsed.path.decode('utf-8')
self.headers = headers
self.version = version
self.method = method
self.transport = transport
self.query_string = None
if url_parsed.query:
self.query_string = url_parsed.query.decode('utf-8')
# Init but do not inhale
self.body = []
@@ -144,6 +140,40 @@ class Request(dict):
self._ip = self.transport.get_extra_info('peername')
return self._ip
@property
def scheme(self):
if self.transport.get_extra_info('sslcontext'):
return 'https'
return 'http'
@property
def host(self):
# it appears that httptools doesn't return the host
# so pull it from the headers
return self.headers.get('Host', '')
@property
def path(self):
return self._parsed_url.path.decode('utf-8')
@property
def query_string(self):
if self._parsed_url.query:
return self._parsed_url.query.decode('utf-8')
else:
return ''
@property
def url(self):
return urlunparse((
self.scheme,
self.host,
self.path,
None,
self.query_string,
None))
File = namedtuple('File', ['type', 'body', 'name'])

View File

@@ -281,14 +281,14 @@ class Router:
"""
# No virtual hosts specified; default behavior
if not self.hosts:
return self._get(request.url, request.method, '')
return self._get(request.path, request.method, '')
# virtual hosts specified; try to match route to the host header
try:
return self._get(request.url, request.method,
return self._get(request.path, request.method,
request.headers.get("Host", ''))
# try default hosts
except NotFound:
return self._get(request.url, request.method, '')
return self._get(request.path, request.method, '')
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
def _get(self, url, method, host):

View File

@@ -17,7 +17,9 @@ class SanicTestClient:
host=HOST, port=PORT, uri=uri)
log.info(url)
async with aiohttp.ClientSession(cookies=cookies) as session:
conn = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(
cookies=cookies, connector=conn) as session:
async with getattr(
session, method.lower())(url, *args, **kwargs) as response:
response.text = await response.text()