Merge pull request #21 from kolanos/normalize-linebreaks

Introduce end-of-line normalization
This commit is contained in:
channelcat 2016-10-15 13:38:44 -07:00 committed by GitHub
commit caedd7284a
13 changed files with 912 additions and 911 deletions

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
* text=auto

6
.gitignore vendored
View File

@ -1,4 +1,4 @@
settings.py
*.pyc
.idea/*
settings.py
*.pyc
.idea/*
.cache/*

View File

@ -1,3 +1,3 @@
httptools
ujson
httptools
ujson
uvloop

View File

@ -1,24 +1,24 @@
class Config:
LOGO = """
_______________
/ \\
| Gotta go fast! |
| _________________/
|/
"""
REQUEST_MAX_SIZE = 100000000 # 100 megababies
REQUEST_TIMEOUT = 60 # 60 seconds
class Config:
LOGO = """
_______________
/ \\
| Gotta go fast! |
| _________________/
|/
"""
REQUEST_MAX_SIZE = 100000000 # 100 megababies
REQUEST_TIMEOUT = 60 # 60 seconds

View File

@ -1,4 +1,4 @@
import logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s: %(levelname)s: %(message)s")
log = logging.getLogger(__name__)
import logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s: %(levelname)s: %(message)s")
log = logging.getLogger(__name__)

View File

@ -1,143 +1,143 @@
from cgi import parse_header
from collections import namedtuple
from httptools import parse_url
from urllib.parse import parse_qs
from ujson import loads as json_loads
from .log import log
class RequestParameters(dict):
"""
Hosts a dict with lists as values where get returns the first
value of the list and getlist returns the whole shebang
"""
def __init__(self, *args, **kwargs):
self.super = super()
self.super.__init__(*args, **kwargs)
def get(self, name, default=None):
values = self.super.get(name)
return values[0] if values else default
def getlist(self, name, default=None):
return self.super.get(name, default)
class Request:
__slots__ = (
'url', 'headers', 'version', 'method',
'query_string', 'body',
'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
)
def __init__(self, url_bytes, headers, version, method):
# TODO: Content-Encoding detection
url_parsed = parse_url(url_bytes)
self.url = url_parsed.path.decode('utf-8')
self.headers = headers
self.version = version
self.method = method
self.query_string = url_parsed.query.decode('utf-8') if url_parsed.query else None
# Init but do not inhale
self.body = None
self.parsed_json = None
self.parsed_form = None
self.parsed_files = None
self.parsed_args = None
@property
def json(self):
if not self.parsed_json:
try:
self.parsed_json = json_loads(self.body)
except:
pass
return self.parsed_json
@property
def form(self):
if self.parsed_form is None:
self.parsed_form = {}
self.parsed_files = {}
content_type, parameters = parse_header(self.headers.get('Content-Type'))
try:
if content_type is None or content_type == 'application/x-www-form-urlencoded':
self.parsed_form = RequestParameters(parse_qs(self.body.decode('utf-8')))
elif content_type == 'multipart/form-data':
# TODO: Stream this instead of reading to/from memory
boundary = parameters['boundary'].encode('utf-8')
self.parsed_form, self.parsed_files = parse_multipart_form(self.body, boundary)
except Exception as e:
log.exception(e)
pass
return self.parsed_form
@property
def files(self):
if self.parsed_files is None:
_ = self.form # compute form to get files
return self.parsed_files
@property
def args(self):
if self.parsed_args is None:
if self.query_string:
self.parsed_args = RequestParameters(parse_qs(self.query_string))
else:
self.parsed_args = {}
return self.parsed_args
File = namedtuple('File', ['type', 'body', 'name'])
def parse_multipart_form(body, boundary):
"""
Parses a request body and returns fields and files
:param body: Bytes request body
:param boundary: Bytes multipart boundary
:return: fields (dict), files (dict)
"""
files = {}
fields = {}
form_parts = body.split(boundary)
for form_part in form_parts[1:-1]:
file_name = None
file_type = None
field_name = None
line_index = 2
line_end_index = 0
while not line_end_index == -1:
line_end_index = form_part.find(b'\r\n', line_index)
form_line = form_part[line_index:line_end_index].decode('utf-8')
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(':')
form_header_field = form_line[0:colon_index]
form_header_value, form_parameters = parse_header(form_line[colon_index + 2:])
if form_header_field == 'Content-Disposition':
if 'filename' in form_parameters:
file_name = form_parameters['filename']
field_name = form_parameters.get('name')
elif form_header_field == 'Content-Type':
file_type = form_header_value
post_data = form_part[line_index:-4]
if file_name or file_type:
files[field_name] = File(type=file_type, name=file_name, body=post_data)
else:
fields[field_name] = post_data.decode('utf-8')
return fields, files
from cgi import parse_header
from collections import namedtuple
from httptools import parse_url
from urllib.parse import parse_qs
from ujson import loads as json_loads
from .log import log
class RequestParameters(dict):
"""
Hosts a dict with lists as values where get returns the first
value of the list and getlist returns the whole shebang
"""
def __init__(self, *args, **kwargs):
self.super = super()
self.super.__init__(*args, **kwargs)
def get(self, name, default=None):
values = self.super.get(name)
return values[0] if values else default
def getlist(self, name, default=None):
return self.super.get(name, default)
class Request:
__slots__ = (
'url', 'headers', 'version', 'method',
'query_string', 'body',
'parsed_json', 'parsed_args', 'parsed_form', 'parsed_files',
)
def __init__(self, url_bytes, headers, version, method):
# TODO: Content-Encoding detection
url_parsed = parse_url(url_bytes)
self.url = url_parsed.path.decode('utf-8')
self.headers = headers
self.version = version
self.method = method
self.query_string = url_parsed.query.decode('utf-8') if url_parsed.query else None
# Init but do not inhale
self.body = None
self.parsed_json = None
self.parsed_form = None
self.parsed_files = None
self.parsed_args = None
@property
def json(self):
if not self.parsed_json:
try:
self.parsed_json = json_loads(self.body)
except:
pass
return self.parsed_json
@property
def form(self):
if self.parsed_form is None:
self.parsed_form = {}
self.parsed_files = {}
content_type, parameters = parse_header(self.headers.get('Content-Type'))
try:
if content_type is None or content_type == 'application/x-www-form-urlencoded':
self.parsed_form = RequestParameters(parse_qs(self.body.decode('utf-8')))
elif content_type == 'multipart/form-data':
# TODO: Stream this instead of reading to/from memory
boundary = parameters['boundary'].encode('utf-8')
self.parsed_form, self.parsed_files = parse_multipart_form(self.body, boundary)
except Exception as e:
log.exception(e)
pass
return self.parsed_form
@property
def files(self):
if self.parsed_files is None:
_ = self.form # compute form to get files
return self.parsed_files
@property
def args(self):
if self.parsed_args is None:
if self.query_string:
self.parsed_args = RequestParameters(parse_qs(self.query_string))
else:
self.parsed_args = {}
return self.parsed_args
File = namedtuple('File', ['type', 'body', 'name'])
def parse_multipart_form(body, boundary):
"""
Parses a request body and returns fields and files
:param body: Bytes request body
:param boundary: Bytes multipart boundary
:return: fields (dict), files (dict)
"""
files = {}
fields = {}
form_parts = body.split(boundary)
for form_part in form_parts[1:-1]:
file_name = None
file_type = None
field_name = None
line_index = 2
line_end_index = 0
while not line_end_index == -1:
line_end_index = form_part.find(b'\r\n', line_index)
form_line = form_part[line_index:line_end_index].decode('utf-8')
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(':')
form_header_field = form_line[0:colon_index]
form_header_value, form_parameters = parse_header(form_line[colon_index + 2:])
if form_header_field == 'Content-Disposition':
if 'filename' in form_parameters:
file_name = form_parameters['filename']
field_name = form_parameters.get('name')
elif form_header_field == 'Content-Type':
file_type = form_header_value
post_data = form_part[line_index:-4]
if file_name or file_type:
files[field_name] = File(type=file_type, name=file_name, body=post_data)
else:
fields[field_name] = post_data.decode('utf-8')
return fields, files

View File

@ -1,68 +1,68 @@
import ujson
import httptools
from ujson import loads as json_loads
from urllib.parse import parse_qs
STATUS_CODES = {
200: 'OK',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
}
class HTTPResponse:
__slots__ = ('body', 'status', 'content_type', 'headers')
def __init__(self, body=None, status=200, headers=[], content_type='text/plain', body_bytes=b''):
self.content_type = content_type
if not body is None:
self.body = body.encode('utf-8')
else:
self.body = body_bytes
self.status = status
self.headers = headers
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
# This is all returned in a kind-of funky way
# We tried to make this as fast as possible in pure python
additional_headers = []
if keep_alive and not keep_alive_timeout is None:
additional_headers = [b'Keep-Alive: timeout=', str(keep_alive_timeout).encode(), b's\r\n']
if self.headers:
for name, value in self.headers.items():
additional_headers.append('{}: {}\r\n'.format(name, value).encode('utf-8'))
return b''.join([
'HTTP/{} {} {}\r\n'.format(version, self.status,
STATUS_CODES.get(self.status, 'FAIL')).encode(),
b'Content-Type: ', self.content_type.encode(), b'\r\n',
b'Content-Length: ', str(len(self.body)).encode(), b'\r\n',
b'Connection: ', ('keep-alive' if keep_alive else 'close').encode(), b'\r\n',
] + additional_headers + [
b'\r\n',
self.body,
])
def json(body, status=200, headers=None):
return HTTPResponse(ujson.dumps(body), headers=headers, status=status,
content_type="application/json; charset=utf-8")
def text(body, status=200, headers=None):
return HTTPResponse(body, status=status, headers=headers, content_type="text/plain; charset=utf-8")
def html(body, status=200, headers=None):
return HTTPResponse(body, status=status, headers=headers, content_type="text/html; charset=utf-8")
import ujson
import httptools
from ujson import loads as json_loads
from urllib.parse import parse_qs
STATUS_CODES = {
200: 'OK',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
}
class HTTPResponse:
__slots__ = ('body', 'status', 'content_type', 'headers')
def __init__(self, body=None, status=200, headers=[], content_type='text/plain', body_bytes=b''):
self.content_type = content_type
if not body is None:
self.body = body.encode('utf-8')
else:
self.body = body_bytes
self.status = status
self.headers = headers
def output(self, version="1.1", keep_alive=False, keep_alive_timeout=None):
# This is all returned in a kind-of funky way
# We tried to make this as fast as possible in pure python
additional_headers = []
if keep_alive and not keep_alive_timeout is None:
additional_headers = [b'Keep-Alive: timeout=', str(keep_alive_timeout).encode(), b's\r\n']
if self.headers:
for name, value in self.headers.items():
additional_headers.append('{}: {}\r\n'.format(name, value).encode('utf-8'))
return b''.join([
'HTTP/{} {} {}\r\n'.format(version, self.status,
STATUS_CODES.get(self.status, 'FAIL')).encode(),
b'Content-Type: ', self.content_type.encode(), b'\r\n',
b'Content-Length: ', str(len(self.body)).encode(), b'\r\n',
b'Connection: ', ('keep-alive' if keep_alive else 'close').encode(), b'\r\n',
] + additional_headers + [
b'\r\n',
self.body,
])
def json(body, status=200, headers=None):
return HTTPResponse(ujson.dumps(body), headers=headers, status=status,
content_type="application/json; charset=utf-8")
def text(body, status=200, headers=None):
return HTTPResponse(body, status=status, headers=headers, content_type="text/plain; charset=utf-8")
def html(body, status=200, headers=None):
return HTTPResponse(body, status=status, headers=headers, content_type="text/html; charset=utf-8")

View File

@ -1,128 +1,128 @@
import re
from collections import namedtuple
from .exceptions import NotFound, InvalidUsage
Route = namedtuple("Route", ['handler', 'methods', 'pattern', 'parameters'])
Parameter = namedtuple("Parameter", ['name', 'cast'])
class Router:
"""
Router supports basic routing with parameters and method checks
Usage:
@sanic.route('/my/url/<my_parameter>', methods=['GET', 'POST', ...])
def my_route(request, my_parameter):
do stuff...
Parameters will be passed as keyword arguments to the request handling function provided
Parameters can also have a type by appending :type to the <parameter>. If no type is provided,
a string is expected. A regular expression can also be passed in as the type
TODO:
This probably needs optimization for larger sets of routes,
since it checks every route until it finds a match which is bad and I should feel bad
"""
routes = None
regex_types = {
"string": (None, "\w+"),
"int": (int, "\d+"),
"number": (float, "[0-9\\.]+"),
"alpha": (None, "[A-Za-z]+"),
}
def __init__(self):
self.routes = []
def add(self, uri, methods, handler):
"""
Adds a handler to the route list
:param uri: Path to match
:param methods: Array of accepted method names. If none are provided, any method is allowed
:param handler: Request handler function. When executed, it should provide a response object.
:return: Nothing
"""
# Dict for faster lookups of if method allowed
methods_dict = {method: True for method in methods} if methods else None
parameters = []
def add_parameter(match):
# We could receive NAME or NAME:PATTERN
parts = match.group(1).split(':')
if len(parts) == 2:
parameter_name, parameter_pattern = parts
else:
parameter_name = parts[0]
parameter_pattern = 'string'
# Pull from pre-configured types
parameter_regex = self.regex_types.get(parameter_pattern)
if parameter_regex:
parameter_type, parameter_pattern = parameter_regex
else:
parameter_type = None
parameter = Parameter(name=parameter_name, cast=parameter_type)
parameters.append(parameter)
return "({})".format(parameter_pattern)
pattern_string = re.sub("<(.+?)>", add_parameter, uri)
pattern = re.compile("^{}$".format(pattern_string))
route = Route(handler=handler, methods=methods_dict, pattern=pattern, parameters=parameters)
self.routes.append(route)
def get(self, request):
"""
Gets a request handler based on the URL of the request, or raises an error
:param request: Request object
:return: handler, arguments, keyword arguments
"""
route = None
args = []
kwargs = {}
for _route in self.routes:
match = _route.pattern.match(request.url)
if match:
for index, parameter in enumerate(_route.parameters, start=1):
value = match.group(index)
kwargs[parameter.name] = parameter.cast(value) if parameter.cast is not None else value
route = _route
break
if route:
if route.methods and not request.method in route.methods:
raise InvalidUsage("Method {} not allowed for URL {}".format(request.method, request.url),
status_code=405)
return route.handler, args, kwargs
else:
raise NotFound("Requested URL {} not found".format(request.url))
class SimpleRouter:
"""
Simple router records and reads all routes from a dictionary
It does not support parameters in routes, but is very fast
"""
routes = None
def __init__(self):
self.routes = {}
def add(self, uri, methods, handler):
# Dict for faster lookups of method allowed
methods_dict = {method: True for method in methods} if methods else None
self.routes[uri] = Route(handler=handler, methods=methods_dict, pattern=uri, parameters=None)
def get(self, request):
route = self.routes.get(request.url)
if route:
if route.methods and not request.method in route.methods:
raise InvalidUsage("Method {} not allowed for URL {}".format(request.method, request.url),
status_code=405)
return route.handler, [], {}
else:
raise NotFound("Requested URL {} not found".format(request.url))
import re
from collections import namedtuple
from .exceptions import NotFound, InvalidUsage
Route = namedtuple("Route", ['handler', 'methods', 'pattern', 'parameters'])
Parameter = namedtuple("Parameter", ['name', 'cast'])
class Router:
"""
Router supports basic routing with parameters and method checks
Usage:
@sanic.route('/my/url/<my_parameter>', methods=['GET', 'POST', ...])
def my_route(request, my_parameter):
do stuff...
Parameters will be passed as keyword arguments to the request handling function provided
Parameters can also have a type by appending :type to the <parameter>. If no type is provided,
a string is expected. A regular expression can also be passed in as the type
TODO:
This probably needs optimization for larger sets of routes,
since it checks every route until it finds a match which is bad and I should feel bad
"""
routes = None
regex_types = {
"string": (None, "\w+"),
"int": (int, "\d+"),
"number": (float, "[0-9\\.]+"),
"alpha": (None, "[A-Za-z]+"),
}
def __init__(self):
self.routes = []
def add(self, uri, methods, handler):
"""
Adds a handler to the route list
:param uri: Path to match
:param methods: Array of accepted method names. If none are provided, any method is allowed
:param handler: Request handler function. When executed, it should provide a response object.
:return: Nothing
"""
# Dict for faster lookups of if method allowed
methods_dict = {method: True for method in methods} if methods else None
parameters = []
def add_parameter(match):
# We could receive NAME or NAME:PATTERN
parts = match.group(1).split(':')
if len(parts) == 2:
parameter_name, parameter_pattern = parts
else:
parameter_name = parts[0]
parameter_pattern = 'string'
# Pull from pre-configured types
parameter_regex = self.regex_types.get(parameter_pattern)
if parameter_regex:
parameter_type, parameter_pattern = parameter_regex
else:
parameter_type = None
parameter = Parameter(name=parameter_name, cast=parameter_type)
parameters.append(parameter)
return "({})".format(parameter_pattern)
pattern_string = re.sub("<(.+?)>", add_parameter, uri)
pattern = re.compile("^{}$".format(pattern_string))
route = Route(handler=handler, methods=methods_dict, pattern=pattern, parameters=parameters)
self.routes.append(route)
def get(self, request):
"""
Gets a request handler based on the URL of the request, or raises an error
:param request: Request object
:return: handler, arguments, keyword arguments
"""
route = None
args = []
kwargs = {}
for _route in self.routes:
match = _route.pattern.match(request.url)
if match:
for index, parameter in enumerate(_route.parameters, start=1):
value = match.group(index)
kwargs[parameter.name] = parameter.cast(value) if parameter.cast is not None else value
route = _route
break
if route:
if route.methods and not request.method in route.methods:
raise InvalidUsage("Method {} not allowed for URL {}".format(request.method, request.url),
status_code=405)
return route.handler, args, kwargs
else:
raise NotFound("Requested URL {} not found".format(request.url))
class SimpleRouter:
"""
Simple router records and reads all routes from a dictionary
It does not support parameters in routes, but is very fast
"""
routes = None
def __init__(self):
self.routes = {}
def add(self, uri, methods, handler):
# Dict for faster lookups of method allowed
methods_dict = {method: True for method in methods} if methods else None
self.routes[uri] = Route(handler=handler, methods=methods_dict, pattern=uri, parameters=None)
def get(self, request):
route = self.routes.get(request.url)
if route:
if route.methods and not request.method in route.methods:
raise InvalidUsage("Method {} not allowed for URL {}".format(request.method, request.url),
status_code=405)
return route.handler, [], {}
else:
raise NotFound("Requested URL {} not found".format(request.url))

View File

@ -1,191 +1,191 @@
import asyncio
from inspect import isawaitable
from traceback import format_exc
from types import FunctionType
from .config import Config
from .exceptions import Handler
from .log import log, logging
from .middleware import Middleware
from .response import HTTPResponse
from .router import Router
from .server import serve
from .exceptions import ServerError
class Sanic:
def __init__(self, name, router=None, error_handler=None):
self.name = name
self.router = router or Router()
self.router = router or Router()
self.error_handler = error_handler or Handler(self)
self.config = Config()
self.request_middleware = []
self.response_middleware = []
# -------------------------------------------------------------------- #
# Registration
# -------------------------------------------------------------------- #
# Decorator
def route(self, uri, methods=None):
"""
Decorates a function to be registered as a route
:param uri: path of the URL
:param methods: list or tuple of methods allowed
:return: decorated function
"""
def response(handler):
self.router.add(uri=uri, methods=methods, handler=handler)
return handler
return response
# Decorator
def exception(self, *exceptions):
"""
Decorates a function to be registered as a route
:param uri: path of the URL
:param methods: list or tuple of methods allowed
:return: decorated function
"""
def response(handler):
for exception in exceptions:
self.error_handler.add(exception, handler)
return handler
return response
# Decorator
def middleware(self, *args, **kwargs):
"""
Decorates and registers middleware to be called before a request
can either be called as @app.middleware or @app.middleware('request')
"""
middleware = None
attach_to = 'request'
def register_middleware(middleware):
if attach_to == 'request':
self.request_middleware.append(middleware)
if attach_to == 'response':
self.response_middleware.append(middleware)
return middleware
# Detect which way this was called, @middleware or @middleware('AT')
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return register_middleware(args[0])
else:
attach_to = args[0]
return register_middleware
if isinstance(middleware, FunctionType):
middleware = Middleware(process_request=middleware)
return middleware
# -------------------------------------------------------------------- #
# Request Handling
# -------------------------------------------------------------------- #
async def handle_request(self, request, response_callback):
"""
Takes a request from the HTTP Server and returns a response object to be sent back
The HTTP Server only expects a response object, so exception handling must be done here
:param request: HTTP Request object
:param response_callback: Response function to be called with the response as the only argument
:return: Nothing
"""
try:
# Middleware process_request
response = False
# The if improves speed. I don't know why
if self.request_middleware:
for middleware in self.request_middleware:
response = middleware(request)
if isawaitable(response):
response = await response
if response:
break
# No middleware results
if not response:
# Fetch handler from router
handler, args, kwargs = self.router.get(request)
if handler is None:
raise ServerError("'None' was returned while requesting a handler from the router")
# Run response handler
response = handler(request, *args, **kwargs)
if isawaitable(response):
response = await response
# Middleware process_response
if self.response_middleware:
for middleware in self.response_middleware:
_response = middleware(request, response)
if isawaitable(_response):
_response = await _response
if _response:
response = _response
break
except Exception as e:
try:
response = self.error_handler.response(request, e)
if isawaitable(response):
response = await response
except Exception as e:
if self.debug:
response = HTTPResponse("Error while handling error: {}\nStack: {}".format(e, format_exc()))
else:
response = HTTPResponse("An error occured while handling an error")
response_callback(response)
# -------------------------------------------------------------------- #
# Execution
# -------------------------------------------------------------------- #
def run(self, host="127.0.0.1", port=8000, debug=False, after_start=None, before_stop=None):
"""
Runs the HTTP Server and listens until keyboard interrupt or term signal.
On termination, drains connections before closing.
:param host: Address to host on
:param port: Port to host on
:param debug: Enables debug output (slows server)
:param after_start: Function to be executed after the server starts listening
:param before_stop: Function to be executed when a stop signal is received before it is respected
:return: Nothing
"""
self.error_handler.debug = True
self.debug = debug
if debug:
log.setLevel(logging.DEBUG)
log.debug(self.config.LOGO)
# Serve
log.info('Goin\' Fast @ http://{}:{}'.format(host, port))
try:
serve(
host=host,
port=port,
debug=debug,
after_start=after_start,
before_stop=before_stop,
request_handler=self.handle_request,
request_timeout=self.config.REQUEST_TIMEOUT,
request_max_size=self.config.REQUEST_MAX_SIZE,
)
except:
pass
def stop(self):
"""
This kills the Sanic
"""
asyncio.get_event_loop().stop()
import asyncio
from inspect import isawaitable
from traceback import format_exc
from types import FunctionType
from .config import Config
from .exceptions import Handler
from .log import log, logging
from .middleware import Middleware
from .response import HTTPResponse
from .router import Router
from .server import serve
from .exceptions import ServerError
class Sanic:
def __init__(self, name, router=None, error_handler=None):
self.name = name
self.router = router or Router()
self.router = router or Router()
self.error_handler = error_handler or Handler(self)
self.config = Config()
self.request_middleware = []
self.response_middleware = []
# -------------------------------------------------------------------- #
# Registration
# -------------------------------------------------------------------- #
# Decorator
def route(self, uri, methods=None):
"""
Decorates a function to be registered as a route
:param uri: path of the URL
:param methods: list or tuple of methods allowed
:return: decorated function
"""
def response(handler):
self.router.add(uri=uri, methods=methods, handler=handler)
return handler
return response
# Decorator
def exception(self, *exceptions):
"""
Decorates a function to be registered as a route
:param uri: path of the URL
:param methods: list or tuple of methods allowed
:return: decorated function
"""
def response(handler):
for exception in exceptions:
self.error_handler.add(exception, handler)
return handler
return response
# Decorator
def middleware(self, *args, **kwargs):
"""
Decorates and registers middleware to be called before a request
can either be called as @app.middleware or @app.middleware('request')
"""
middleware = None
attach_to = 'request'
def register_middleware(middleware):
if attach_to == 'request':
self.request_middleware.append(middleware)
if attach_to == 'response':
self.response_middleware.append(middleware)
return middleware
# Detect which way this was called, @middleware or @middleware('AT')
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return register_middleware(args[0])
else:
attach_to = args[0]
return register_middleware
if isinstance(middleware, FunctionType):
middleware = Middleware(process_request=middleware)
return middleware
# -------------------------------------------------------------------- #
# Request Handling
# -------------------------------------------------------------------- #
async def handle_request(self, request, response_callback):
"""
Takes a request from the HTTP Server and returns a response object to be sent back
The HTTP Server only expects a response object, so exception handling must be done here
:param request: HTTP Request object
:param response_callback: Response function to be called with the response as the only argument
:return: Nothing
"""
try:
# Middleware process_request
response = False
# The if improves speed. I don't know why
if self.request_middleware:
for middleware in self.request_middleware:
response = middleware(request)
if isawaitable(response):
response = await response
if response:
break
# No middleware results
if not response:
# Fetch handler from router
handler, args, kwargs = self.router.get(request)
if handler is None:
raise ServerError("'None' was returned while requesting a handler from the router")
# Run response handler
response = handler(request, *args, **kwargs)
if isawaitable(response):
response = await response
# Middleware process_response
if self.response_middleware:
for middleware in self.response_middleware:
_response = middleware(request, response)
if isawaitable(_response):
_response = await _response
if _response:
response = _response
break
except Exception as e:
try:
response = self.error_handler.response(request, e)
if isawaitable(response):
response = await response
except Exception as e:
if self.debug:
response = HTTPResponse("Error while handling error: {}\nStack: {}".format(e, format_exc()))
else:
response = HTTPResponse("An error occured while handling an error")
response_callback(response)
# -------------------------------------------------------------------- #
# Execution
# -------------------------------------------------------------------- #
def run(self, host="127.0.0.1", port=8000, debug=False, after_start=None, before_stop=None):
"""
Runs the HTTP Server and listens until keyboard interrupt or term signal.
On termination, drains connections before closing.
:param host: Address to host on
:param port: Port to host on
:param debug: Enables debug output (slows server)
:param after_start: Function to be executed after the server starts listening
:param before_stop: Function to be executed when a stop signal is received before it is respected
:return: Nothing
"""
self.error_handler.debug = True
self.debug = debug
if debug:
log.setLevel(logging.DEBUG)
log.debug(self.config.LOGO)
# Serve
log.info('Goin\' Fast @ http://{}:{}'.format(host, port))
try:
serve(
host=host,
port=port,
debug=debug,
after_start=after_start,
before_stop=before_stop,
request_handler=self.handle_request,
request_timeout=self.config.REQUEST_TIMEOUT,
request_max_size=self.config.REQUEST_MAX_SIZE,
)
except:
pass
def stop(self):
"""
This kills the Sanic
"""
asyncio.get_event_loop().stop()

View File

@ -1,205 +1,205 @@
import asyncio
from inspect import isawaitable
from signal import SIGINT, SIGTERM
import httptools
try:
import uvloop as async_loop
except:
async_loop = asyncio
from .log import log
from .request import Request
class Signal:
stopped = False
class HttpProtocol(asyncio.Protocol):
__slots__ = ('loop', 'transport', 'connections', 'signal', # event loop, connection
'parser', 'request', 'url', 'headers', # request params
'request_handler', 'request_timeout', 'request_max_size', # request config
'_total_request_size', '_timeout_handler') # connection management
def __init__(self, *, loop, request_handler, signal=Signal(), connections={}, request_timeout=60,
request_max_size=None):
self.loop = loop
self.transport = None
self.request = None
self.parser = None
self.url = None
self.headers = None
self.signal = signal
self.connections = connections
self.request_handler = request_handler
self.request_timeout = request_timeout
self.request_max_size = request_max_size
self._total_request_size = 0
self._timeout_handler = None
# -------------------------------------------- #
# Connection
# -------------------------------------------- #
def connection_made(self, transport):
self.connections[self] = True
self._timeout_handler = self.loop.call_later(self.request_timeout, self.connection_timeout)
self.transport = transport
def connection_lost(self, exc):
del self.connections[self]
self._timeout_handler.cancel()
self.cleanup()
def connection_timeout(self):
self.bail_out("Request timed out, connection closed")
# -------------------------------------------- #
# Parsing
# -------------------------------------------- #
def data_received(self, data):
# Check for the request itself getting too large and exceeding memory limits
self._total_request_size += len(data)
if self._total_request_size > self.request_max_size:
return self.bail_out("Request too large ({}), connection closed".format(self._total_request_size))
# Create parser if this is the first time we're receiving data
if self.parser is None:
assert self.request is None
self.headers = []
self.parser = httptools.HttpRequestParser(self)
# Parse request chunk or close connection
try:
self.parser.feed_data(data)
except httptools.parser.errors.HttpParserError as e:
self.bail_out("Invalid request data, connection closed ({})".format(e))
def on_url(self, url):
self.url = url
def on_header(self, name, value):
if name == b'Content-Length' and int(value) > self.request_max_size:
return self.bail_out("Request body too large ({}), connection closed".format(value))
self.headers.append((name.decode(), value.decode('utf-8')))
def on_headers_complete(self):
self.request = Request(
url_bytes=self.url,
headers=dict(self.headers),
version=self.parser.get_http_version(),
method=self.parser.get_method().decode()
)
def on_body(self, body):
self.request.body = body
def on_message_complete(self):
self.loop.create_task(self.request_handler(self.request, self.write_response))
# -------------------------------------------- #
# Responding
# -------------------------------------------- #
def write_response(self, response):
try:
keep_alive = self.parser.should_keep_alive() and not self.signal.stopped
self.transport.write(response.output(self.request.version, keep_alive, self.request_timeout))
if not keep_alive:
self.transport.close()
else:
self.cleanup()
except Exception as e:
self.bail_out("Writing request failed, connection closed {}".format(e))
def bail_out(self, message):
log.error(message)
self.transport.close()
def cleanup(self):
self.parser = None
self.request = None
self.url = None
self.headers = None
self._total_request_size = 0
def close_if_idle(self):
"""
Close the connection if a request is not being sent or received
:return: boolean - True if closed, false if staying open
"""
if not self.parser:
self.transport.close()
return True
return False
def serve(host, port, request_handler, after_start=None, before_stop=None, debug=False, request_timeout=60,
request_max_size=None):
# Create Event Loop
loop = async_loop.new_event_loop()
asyncio.set_event_loop(loop)
# I don't think we take advantage of this
# And it slows everything waaayyy down
# loop.set_debug(debug)
connections = {}
signal = Signal()
server_coroutine = loop.create_server(lambda: HttpProtocol(
loop=loop,
connections=connections,
signal=signal,
request_handler=request_handler,
request_timeout=request_timeout,
request_max_size=request_max_size,
), host, port)
try:
http_server = loop.run_until_complete(server_coroutine)
except OSError as e:
log.error("Unable to start server: {}".format(e))
return
except:
log.exception("Unable to start server")
return
# Run the on_start function if provided
if after_start:
result = after_start(loop)
if isawaitable(result):
loop.run_until_complete(result)
# Register signals for graceful termination
for _signal in (SIGINT, SIGTERM):
loop.add_signal_handler(_signal, loop.stop)
try:
loop.run_forever()
finally:
log.info("Stop requested, draining connections...")
# Run the on_stop function if provided
if before_stop:
result = before_stop(loop)
if isawaitable(result):
loop.run_until_complete(result)
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections.keys():
connection.close_if_idle()
while connections:
loop.run_until_complete(asyncio.sleep(0.1))
loop.close()
log.info("Server Stopped")
import asyncio
from inspect import isawaitable
from signal import SIGINT, SIGTERM
import httptools
try:
import uvloop as async_loop
except:
async_loop = asyncio
from .log import log
from .request import Request
class Signal:
stopped = False
class HttpProtocol(asyncio.Protocol):
__slots__ = ('loop', 'transport', 'connections', 'signal', # event loop, connection
'parser', 'request', 'url', 'headers', # request params
'request_handler', 'request_timeout', 'request_max_size', # request config
'_total_request_size', '_timeout_handler') # connection management
def __init__(self, *, loop, request_handler, signal=Signal(), connections={}, request_timeout=60,
request_max_size=None):
self.loop = loop
self.transport = None
self.request = None
self.parser = None
self.url = None
self.headers = None
self.signal = signal
self.connections = connections
self.request_handler = request_handler
self.request_timeout = request_timeout
self.request_max_size = request_max_size
self._total_request_size = 0
self._timeout_handler = None
# -------------------------------------------- #
# Connection
# -------------------------------------------- #
def connection_made(self, transport):
self.connections[self] = True
self._timeout_handler = self.loop.call_later(self.request_timeout, self.connection_timeout)
self.transport = transport
def connection_lost(self, exc):
del self.connections[self]
self._timeout_handler.cancel()
self.cleanup()
def connection_timeout(self):
self.bail_out("Request timed out, connection closed")
# -------------------------------------------- #
# Parsing
# -------------------------------------------- #
def data_received(self, data):
# Check for the request itself getting too large and exceeding memory limits
self._total_request_size += len(data)
if self._total_request_size > self.request_max_size:
return self.bail_out("Request too large ({}), connection closed".format(self._total_request_size))
# Create parser if this is the first time we're receiving data
if self.parser is None:
assert self.request is None
self.headers = []
self.parser = httptools.HttpRequestParser(self)
# Parse request chunk or close connection
try:
self.parser.feed_data(data)
except httptools.parser.errors.HttpParserError as e:
self.bail_out("Invalid request data, connection closed ({})".format(e))
def on_url(self, url):
self.url = url
def on_header(self, name, value):
if name == b'Content-Length' and int(value) > self.request_max_size:
return self.bail_out("Request body too large ({}), connection closed".format(value))
self.headers.append((name.decode(), value.decode('utf-8')))
def on_headers_complete(self):
self.request = Request(
url_bytes=self.url,
headers=dict(self.headers),
version=self.parser.get_http_version(),
method=self.parser.get_method().decode()
)
def on_body(self, body):
self.request.body = body
def on_message_complete(self):
self.loop.create_task(self.request_handler(self.request, self.write_response))
# -------------------------------------------- #
# Responding
# -------------------------------------------- #
def write_response(self, response):
try:
keep_alive = self.parser.should_keep_alive() and not self.signal.stopped
self.transport.write(response.output(self.request.version, keep_alive, self.request_timeout))
if not keep_alive:
self.transport.close()
else:
self.cleanup()
except Exception as e:
self.bail_out("Writing request failed, connection closed {}".format(e))
def bail_out(self, message):
log.error(message)
self.transport.close()
def cleanup(self):
self.parser = None
self.request = None
self.url = None
self.headers = None
self._total_request_size = 0
def close_if_idle(self):
"""
Close the connection if a request is not being sent or received
:return: boolean - True if closed, false if staying open
"""
if not self.parser:
self.transport.close()
return True
return False
def serve(host, port, request_handler, after_start=None, before_stop=None, debug=False, request_timeout=60,
request_max_size=None):
# Create Event Loop
loop = async_loop.new_event_loop()
asyncio.set_event_loop(loop)
# I don't think we take advantage of this
# And it slows everything waaayyy down
# loop.set_debug(debug)
connections = {}
signal = Signal()
server_coroutine = loop.create_server(lambda: HttpProtocol(
loop=loop,
connections=connections,
signal=signal,
request_handler=request_handler,
request_timeout=request_timeout,
request_max_size=request_max_size,
), host, port)
try:
http_server = loop.run_until_complete(server_coroutine)
except OSError as e:
log.error("Unable to start server: {}".format(e))
return
except:
log.exception("Unable to start server")
return
# Run the on_start function if provided
if after_start:
result = after_start(loop)
if isawaitable(result):
loop.run_until_complete(result)
# Register signals for graceful termination
for _signal in (SIGINT, SIGTERM):
loop.add_signal_handler(_signal, loop.stop)
try:
loop.run_forever()
finally:
log.info("Stop requested, draining connections...")
# Run the on_stop function if provided
if before_stop:
result = before_stop(loop)
if isawaitable(result):
loop.run_until_complete(result)
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections.keys():
connection.close_if_idle()
while connections:
loop.run_until_complete(asyncio.sleep(0.1))
loop.close()
log.info("Server Stopped")

View File

@ -1,33 +1,33 @@
import asyncpg
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
import timeit
from sanic.response import json
print(json({"test": True}).output())
print("Running New 100,000 times")
times = 0
total_time = 0
for n in range(6):
time = timeit.timeit('json({ "test":True }).output()', setup='from sanic.response import json', number=100000)
print("Took {} seconds".format(time))
total_time += time
times += 1
print("Average: {}".format(total_time / times))
print("Running Old 100,000 times")
times = 0
total_time = 0
for n in range(6):
time = timeit.timeit('json({ "test":True }).output_old()', setup='from sanic.response import json', number=100000)
print("Took {} seconds".format(time))
total_time += time
times += 1
print("Average: {}".format(total_time / times))
import asyncpg
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
import timeit
from sanic.response import json
print(json({"test": True}).output())
print("Running New 100,000 times")
times = 0
total_time = 0
for n in range(6):
time = timeit.timeit('json({ "test":True }).output()', setup='from sanic.response import json', number=100000)
print("Took {} seconds".format(time))
total_time += time
times += 1
print("Average: {}".format(total_time / times))
print("Running Old 100,000 times")
times = 0
total_time = 0
for n in range(6):
time = timeit.timeit('json({ "test":True }).output_old()', setup='from sanic.response import json', number=100000)
print("Took {} seconds".format(time))
total_time += time
times += 1
print("Average: {}".format(total_time / times))

View File

@ -1,19 +1,19 @@
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
from sanic import Sanic
from sanic.response import json
app = Sanic("test")
@app.route("/")
async def test(request):
return json({"test": True})
app.run(host="0.0.0.0", port=sys.argv[1])
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
from sanic import Sanic
from sanic.response import json
app = Sanic("test")
@app.route("/")
async def test(request):
return json({"test": True})
app.run(host="0.0.0.0", port=sys.argv[1])

View File

@ -1,91 +1,91 @@
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
from sanic import Sanic
from sanic.response import json, text
from sanic.exceptions import ServerError
app = Sanic("test")
@app.route("/")
async def test(request):
return json({"test": True})
@app.route("/sync", methods=['GET', 'POST'])
def test(request):
return json({"test": True})
@app.route("/text/<name>/<butt:int>")
def rtext(request, name, butt):
return text("yeehaww {} {}".format(name, butt))
@app.route("/exception")
def exception(request):
raise ServerError("yep")
@app.route("/exception/async")
async def test(request):
raise ServerError("asunk")
@app.route("/post_json")
def post_json(request):
return json({"received": True, "message": request.json})
@app.route("/query_string")
def query_string(request):
return json({"parsed": True, "args": request.args, "url": request.url, "query_string": request.query_string})
import sys
app.run(host="0.0.0.0", port=sys.argv[1])
# import asyncio_redis
# import asyncpg
# async def setup(sanic, loop):
# sanic.conn = []
# sanic.redis = []
# for x in range(10):
# sanic.conn.append(await asyncpg.connect(user='postgres', password='zomgdev', database='postgres', host='192.168.99.100'))
# for n in range(30):
# connection = await asyncio_redis.Connection.create(host='192.168.99.100', port=6379)
# sanic.redis.append(connection)
# c=0
# @app.route("/postgres")
# async def postgres(request):
# global c
# values = await app.conn[c].fetch('''SELECT * FROM players''')
# c += 1
# if c == 10:
# c = 0
# return text("yep")
# r=0
# @app.route("/redis")
# async def redis(request):
# global r
# try:
# values = await app.redis[r].get('my_key')
# except asyncio_redis.exceptions.ConnectionLostError:
# app.redis[r] = await asyncio_redis.Connection.create(host='127.0.0.1', port=6379)
# values = await app.redis[r].get('my_key')
# r += 1
# if r == 30:
# r = 0
# return text(values)
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
sys.path.insert(0, currentdir + '/../../../')
from sanic import Sanic
from sanic.response import json, text
from sanic.exceptions import ServerError
app = Sanic("test")
@app.route("/")
async def test(request):
return json({"test": True})
@app.route("/sync", methods=['GET', 'POST'])
def test(request):
return json({"test": True})
@app.route("/text/<name>/<butt:int>")
def rtext(request, name, butt):
return text("yeehaww {} {}".format(name, butt))
@app.route("/exception")
def exception(request):
raise ServerError("yep")
@app.route("/exception/async")
async def test(request):
raise ServerError("asunk")
@app.route("/post_json")
def post_json(request):
return json({"received": True, "message": request.json})
@app.route("/query_string")
def query_string(request):
return json({"parsed": True, "args": request.args, "url": request.url, "query_string": request.query_string})
import sys
app.run(host="0.0.0.0", port=sys.argv[1])
# import asyncio_redis
# import asyncpg
# async def setup(sanic, loop):
# sanic.conn = []
# sanic.redis = []
# for x in range(10):
# sanic.conn.append(await asyncpg.connect(user='postgres', password='zomgdev', database='postgres', host='192.168.99.100'))
# for n in range(30):
# connection = await asyncio_redis.Connection.create(host='192.168.99.100', port=6379)
# sanic.redis.append(connection)
# c=0
# @app.route("/postgres")
# async def postgres(request):
# global c
# values = await app.conn[c].fetch('''SELECT * FROM players''')
# c += 1
# if c == 10:
# c = 0
# return text("yep")
# r=0
# @app.route("/redis")
# async def redis(request):
# global r
# try:
# values = await app.redis[r].get('my_key')
# except asyncio_redis.exceptions.ConnectionLostError:
# app.redis[r] = await asyncio_redis.Connection.create(host='127.0.0.1', port=6379)
# values = await app.redis[r].get('my_key')
# r += 1
# if r == 30:
# r = 0
# return text(values)