Merge remote-tracking branch 'upstream/master'

This commit is contained in:
monobot 2017-03-29 23:54:11 +01:00
commit 6c003f71f4
14 changed files with 255 additions and 65 deletions

6
Dockerfile Normal file
View File

@ -0,0 +1,6 @@
FROM python:3.6
ADD . /app
WORKDIR /app
RUN pip install tox

4
Makefile Normal file
View File

@ -0,0 +1,4 @@
test:
find . -name "*.pyc" -delete
docker build -t sanic/test-image .
docker run -t sanic/test-image tox

View File

@ -29,6 +29,14 @@ In general the convention is to only have UPPERCASE configuration parameters. Th
There are several ways how to load configuration.
### From environment variables.
Any variables defined with the `SANIC_` prefix will be applied to the sanic config. For example, setting `SANIC_REQUEST_TIMEOUT` will be loaded by the application automatically. You can pass the `load_vars` boolean to the Sanic constructor to override that:
```python
app = Sanic(load_vars=False)
```
### From an Object
If there are a lot of configuration values and they have sensible defaults it might be helpful to put them into a module:

View File

@ -55,8 +55,8 @@ from sanic import response
@app.route("/streaming")
async def index(request):
async def streaming_fn(response):
await response.write('foo')
await response.write('bar')
response.write('foo')
response.write('bar')
return response.stream(streaming_fn, content_type='text/plain')
```

View File

@ -0,0 +1,136 @@
# This demo requires aioredis and environmental variables established in ENV_VARS
import json
import logging
import os
from datetime import datetime
import aioredis
import sanic
from sanic import Sanic
ENV_VARS = ["REDIS_HOST", "REDIS_PORT",
"REDIS_MINPOOL", "REDIS_MAXPOOL",
"REDIS_PASS", "APP_LOGFILE"]
app = Sanic(name=__name__)
logger = None
@app.middleware("request")
async def log_uri(request):
# Simple middleware to log the URI endpoint that was called
logger.info("URI called: {0}".format(request.url))
@app.listener('before_server_start')
async def before_server_start(app, loop):
logger.info("Starting redis pool")
app.redis_pool = await aioredis.create_pool(
(app.config.REDIS_HOST, int(app.config.REDIS_PORT)),
minsize=int(app.config.REDIS_MINPOOL),
maxsize=int(app.config.REDIS_MAXPOOL),
password=app.config.REDIS_PASS)
@app.listener('after_server_stop')
async def after_server_stop(app, loop):
logger.info("Closing redis pool")
app.redis_pool.close()
await app.redis_pool.wait_closed()
@app.middleware("request")
async def attach_db_connectors(request):
# Just put the db objects in the request for easier access
logger.info("Passing redis pool to request object")
request["redis"] = request.app.redis_pool
@app.route("/state/<user_id>", methods=["GET"])
async def access_state(request, user_id):
try:
# Check to see if the value is in cache, if so lets return that
with await request["redis"] as redis_conn:
state = await redis_conn.get(user_id, encoding="utf-8")
if state:
return sanic.response.json({"msg": "Success",
"status": 200,
"success": True,
"data": json.loads(state),
"finished_at": datetime.now().isoformat()})
# Then state object is not in redis
logger.critical("Unable to find user_data in cache.")
return sanic.response.HTTPResponse({"msg": "User state not found",
"success": False,
"status": 404,
"finished_at": datetime.now().isoformat()}, status=404)
except aioredis.ProtocolError:
logger.critical("Unable to connect to state cache")
return sanic.response.HTTPResponse({"msg": "Internal Server Error",
"status": 500,
"success": False,
"finished_at": datetime.now().isoformat()}, status=500)
@app.route("/state/<user_id>/push", methods=["POST"])
async def set_state(request, user_id):
try:
# Pull a connection from the pool
with await request["redis"] as redis_conn:
# Set the value in cache to your new value
await redis_conn.set(user_id, json.dumps(request.json), expire=1800)
logger.info("Successfully pushed state to cache")
return sanic.response.HTTPResponse({"msg": "Successfully pushed state to cache",
"success": True,
"status": 200,
"finished_at": datetime.now().isoformat()})
except aioredis.ProtocolError:
logger.critical("Unable to connect to state cache")
return sanic.response.HTTPResponse({"msg": "Internal Server Error",
"status": 500,
"success": False,
"finished_at": datetime.now().isoformat()}, status=500)
def configure():
# Setup environment variables
env_vars = [os.environ.get(v, None) for v in ENV_VARS]
if not all(env_vars):
# Send back environment variables that were not set
return False, ", ".join([ENV_VARS[i] for i, flag in env_vars if not flag])
else:
# Add all the env vars to our app config
app.config.update({k: v for k, v in zip(ENV_VARS, env_vars)})
setup_logging()
return True, None
def setup_logging():
logging_format = "[%(asctime)s] %(process)d-%(levelname)s "
logging_format += "%(module)s::%(funcName)s():l%(lineno)d: "
logging_format += "%(message)s"
logging.basicConfig(
filename=app.config.APP_LOGFILE,
format=logging_format,
level=logging.DEBUG)
def main(result, missing):
if result:
try:
app.run(host="0.0.0.0", port=8080, debug=True)
except:
logging.critical("User killed server. Closing")
else:
logging.critical("Unable to start. Missing environment variables [{0}]".format(missing))
if __name__ == "__main__":
result, missing = configure()
logger = logging.getLogger()
main(result, missing)

View File

@ -1,18 +1,10 @@
aiocache
aiofiles
aiohttp
aiohttp==1.3.5
beautifulsoup4
bottle
coverage
falcon
gunicorn
httptools
kyoukai
flake8
pytest
recommonmark
sphinx
sphinx_rtd_theme
tornado
tox
ujson
uvloop

View File

@ -25,7 +25,8 @@ from sanic.websocket import WebSocketProtocol, ConnectionClosed
class Sanic:
def __init__(self, name=None, router=None, error_handler=None):
def __init__(self, name=None, router=None, error_handler=None,
load_env=True):
# Only set up a default log handler if the
# end-user application didn't set anything up.
if not logging.root.handlers and log.level == logging.NOTSET:
@ -44,7 +45,7 @@ class Sanic:
self.name = name
self.router = router or Router()
self.error_handler = error_handler or ErrorHandler()
self.config = Config()
self.config = Config(load_env=load_env)
self.request_middleware = deque()
self.response_middleware = deque()
self.blueprints = {}
@ -554,19 +555,24 @@ class Sanic:
if protocol is None:
protocol = (WebSocketProtocol if self.websocket_enabled
else HttpProtocol)
if stop_event is not None:
if debug:
warnings.simplefilter('default')
warnings.warn("stop_event will be removed from future versions.",
DeprecationWarning)
server_settings = self._helper(
host=host, port=port, debug=debug, before_start=before_start,
after_start=after_start, before_stop=before_stop,
after_stop=after_stop, ssl=ssl, sock=sock, workers=workers,
loop=loop, protocol=protocol, backlog=backlog,
stop_event=stop_event, register_sys_signals=register_sys_signals)
register_sys_signals=register_sys_signals)
try:
self.is_running = True
if workers == 1:
serve(**server_settings)
else:
serve_multiple(server_settings, workers, stop_event)
serve_multiple(server_settings, workers)
except:
log.exception(
'Experienced exception while trying to serve')
@ -595,13 +601,17 @@ class Sanic:
if protocol is None:
protocol = (WebSocketProtocol if self.websocket_enabled
else HttpProtocol)
if stop_event is not None:
if debug:
warnings.simplefilter('default')
warnings.warn("stop_event will be removed from future versions.",
DeprecationWarning)
server_settings = self._helper(
host=host, port=port, debug=debug, before_start=before_start,
after_start=after_start, before_stop=before_stop,
after_stop=after_stop, ssl=ssl, sock=sock,
loop=loop or get_event_loop(), protocol=protocol,
backlog=backlog, stop_event=stop_event,
run_async=True)
backlog=backlog, run_async=True)
return await serve(**server_settings)
@ -621,7 +631,11 @@ class Sanic:
context = create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain(cert, keyfile=key)
ssl = context
if stop_event is not None:
if debug:
warnings.simplefilter('default')
warnings.warn("stop_event will be removed from future versions.",
DeprecationWarning)
if loop is not None:
if debug:
warnings.simplefilter('default')

View File

@ -1,9 +1,11 @@
import os
import types
SANIC_PREFIX = 'SANIC_'
class Config(dict):
def __init__(self, defaults=None):
def __init__(self, defaults=None, load_env=True):
super().__init__(defaults or {})
self.LOGO = """
@ -29,6 +31,9 @@ class Config(dict):
self.REQUEST_MAX_SIZE = 100000000 # 100 megababies
self.REQUEST_TIMEOUT = 60 # 60 seconds
if load_env:
self.load_environment_vars()
def __getattr__(self, attr):
try:
return self[attr]
@ -90,3 +95,13 @@ class Config(dict):
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def load_environment_vars(self):
for k, v in os.environ.items():
"""
Looks for any SANIC_ prefixed environment variables and applies
them to the configuration if present.
"""
if k.startswith(SANIC_PREFIX):
_, config_key = k.split(SANIC_PREFIX, 1)
self[config_key] = v

View File

@ -132,8 +132,8 @@ class StreamingHTTPResponse(BaseHTTPResponse):
async def stream(
self, version="1.1", keep_alive=False, keep_alive_timeout=None):
"""Streams headers, runs the `streaming_fn` callback that writes content
to the response body, then finalizes the response body.
"""Streams headers, runs the `streaming_fn` callback that writes
content to the response body, then finalizes the response body.
"""
headers = self.get_headers(
version, keep_alive=keep_alive,
@ -331,7 +331,11 @@ def stream(
:param headers: Custom Headers.
"""
return StreamingHTTPResponse(
streaming_fn, headers=headers, content_type=content_type, status=status)
streaming_fn,
headers=headers,
content_type=content_type,
status=status
)
def redirect(to, headers=None, status=302,

View File

@ -4,10 +4,13 @@ import traceback
import warnings
from functools import partial
from inspect import isawaitable
from multiprocessing import Process, Event
from multiprocessing import Process
from os import set_inheritable
from signal import SIGTERM, SIGINT
from signal import signal as signal_func
from signal import (
SIGTERM, SIGINT,
signal as signal_func,
Signals
)
from socket import socket, SOL_SOCKET, SO_REUSEADDR
from time import time
@ -421,7 +424,7 @@ def serve(host, port, request_handler, error_handler, before_start=None,
loop.close()
def serve_multiple(server_settings, workers, stop_event=None):
def serve_multiple(server_settings, workers):
"""Start multiple server processes simultaneously. Stop on interrupt
and terminate signals, and drain connections when complete.
@ -448,11 +451,12 @@ def serve_multiple(server_settings, workers, stop_event=None):
server_settings['host'] = None
server_settings['port'] = None
if stop_event is None:
stop_event = Event()
def sig_handler(signal, frame):
log.info("Received signal {}. Shutting down.".format(
Signals(signal).name))
signal_func(SIGINT, lambda s, f: stop_event.set())
signal_func(SIGTERM, lambda s, f: stop_event.set())
signal_func(SIGINT, lambda s, f: sig_handler(s, f))
signal_func(SIGTERM, lambda s, f: sig_handler(s, f))
processes = []
for _ in range(workers):

View File

@ -16,6 +16,17 @@ def test_load_from_object():
assert app.config.CONFIG_VALUE == 'should be used'
assert 'not_for_config' not in app.config
def test_auto_load_env():
environ["SANIC_TEST_ANSWER"] = "42"
app = Sanic()
assert app.config.TEST_ANSWER == "42"
del environ["SANIC_TEST_ANSWER"]
def test_auto_load_env():
environ["SANIC_TEST_ANSWER"] = "42"
app = Sanic(load_env=False)
assert getattr(app.config, 'TEST_ANSWER', None) == None
del environ["SANIC_TEST_ANSWER"]
def test_load_from_file():
app = Sanic('test_load_from_file')

View File

@ -2,48 +2,46 @@ from sanic import Sanic
from sanic.response import text
from sanic.exceptions import PayloadTooLarge
data_received_app = Sanic('data_received')
data_received_app.config.REQUEST_MAX_SIZE = 1
data_received_default_app = Sanic('data_received_default')
data_received_default_app.config.REQUEST_MAX_SIZE = 1
on_header_default_app = Sanic('on_header')
on_header_default_app.config.REQUEST_MAX_SIZE = 500
@data_received_app.route('/1')
async def handler1(request):
return text('OK')
@data_received_app.exception(PayloadTooLarge)
def handler_exception(request, exception):
return text('Payload Too Large from error_handler.', 413)
def test_payload_too_large_from_error_handler():
data_received_app = Sanic('data_received')
data_received_app.config.REQUEST_MAX_SIZE = 1
@data_received_app.route('/1')
async def handler1(request):
return text('OK')
@data_received_app.exception(PayloadTooLarge)
def handler_exception(request, exception):
return text('Payload Too Large from error_handler.', 413)
response = data_received_app.test_client.get('/1', gather_request=False)
assert response.status == 413
assert response.text == 'Payload Too Large from error_handler.'
@data_received_default_app.route('/1')
async def handler2(request):
return text('OK')
def test_payload_too_large_at_data_received_default():
data_received_default_app = Sanic('data_received_default')
data_received_default_app.config.REQUEST_MAX_SIZE = 1
@data_received_default_app.route('/1')
async def handler2(request):
return text('OK')
response = data_received_default_app.test_client.get(
'/1', gather_request=False)
assert response.status == 413
assert response.text == 'Error: Payload Too Large'
@on_header_default_app.route('/1')
async def handler3(request):
return text('OK')
def test_payload_too_large_at_on_header_default():
on_header_default_app = Sanic('on_header')
on_header_default_app.config.REQUEST_MAX_SIZE = 500
@on_header_default_app.post('/1')
async def handler3(request):
return text('OK')
data = 'a' * 1000
response = on_header_default_app.test_client.post(
'/1', gather_request=False, data=data)

View File

@ -88,4 +88,7 @@ def test_chained_redirect(redirect_app):
assert request.url.endswith('/1')
assert response.status == 200
assert response.text == 'OK'
assert response.url.path.endswith('/3')
try:
assert response.url.endswith('/3')
except AttributeError:
assert response.url.path.endswith('/3')

View File

@ -10,12 +10,7 @@ python =
[testenv]
deps =
aiofiles
aiohttp
websockets
pytest
beautifulsoup4
coverage
-rrequirements-dev.txt
commands =
pytest tests {posargs}