From 2dca53a696064d5f3447484febb2294648b37d1f Mon Sep 17 00:00:00 2001 From: Raphael Deem Date: Sun, 26 Feb 2017 16:31:39 -0800 Subject: [PATCH 01/15] remove stop_event --- sanic/app.py | 23 ++++++++++++++++++----- sanic/server.py | 20 ++++++++++++-------- 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/sanic/app.py b/sanic/app.py index 63700e9d..45e71ca0 100644 --- a/sanic/app.py +++ b/sanic/app.py @@ -464,19 +464,24 @@ class Sanic: :param protocol: Subclass of asyncio protocol class :return: Nothing """ + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) server_settings = self._helper( host=host, port=port, debug=debug, before_start=before_start, after_start=after_start, before_stop=before_stop, after_stop=after_stop, ssl=ssl, sock=sock, workers=workers, loop=loop, protocol=protocol, backlog=backlog, - stop_event=stop_event, register_sys_signals=register_sys_signals) + register_sys_signals=register_sys_signals) try: self.is_running = True if workers == 1: serve(**server_settings) else: - serve_multiple(server_settings, workers, stop_event) + serve_multiple(server_settings, workers) except: log.exception( 'Experienced exception while trying to serve') @@ -498,13 +503,17 @@ class Sanic: NOTE: This does not support multiprocessing and is not the preferred way to run a Sanic application. """ + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) server_settings = self._helper( host=host, port=port, debug=debug, before_start=before_start, after_start=after_start, before_stop=before_stop, after_stop=after_stop, ssl=ssl, sock=sock, loop=loop or get_event_loop(), protocol=protocol, - backlog=backlog, stop_event=stop_event, - run_async=True) + backlog=backlog, run_async=True) return await serve(**server_settings) @@ -514,7 +523,11 @@ class Sanic: protocol=HttpProtocol, backlog=100, stop_event=None, register_sys_signals=True, run_async=False): """Helper function used by `run` and `create_server`.""" - + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) if loop is not None: if debug: warnings.simplefilter('default') diff --git a/sanic/server.py b/sanic/server.py index 83bda3fc..9d0fa0c9 100644 --- a/sanic/server.py +++ b/sanic/server.py @@ -4,10 +4,13 @@ import traceback import warnings from functools import partial from inspect import isawaitable -from multiprocessing import Process, Event +from multiprocessing import Process from os import set_inheritable -from signal import SIGTERM, SIGINT -from signal import signal as signal_func +from signal import ( + SIGTERM, SIGINT, + signal as signal_func, + Signals +) from socket import socket, SOL_SOCKET, SO_REUSEADDR from time import time @@ -379,7 +382,7 @@ def serve(host, port, request_handler, error_handler, before_start=None, loop.close() -def serve_multiple(server_settings, workers, stop_event=None): +def serve_multiple(server_settings, workers): """Start multiple server processes simultaneously. Stop on interrupt and terminate signals, and drain connections when complete. @@ -404,11 +407,12 @@ def serve_multiple(server_settings, workers, stop_event=None): server_settings['host'] = None server_settings['port'] = None - if stop_event is None: - stop_event = Event() + def sig_handler(signal, frame): + log.info("Received signal {}. Shutting down.".format( + Signals(signal).name)) - signal_func(SIGINT, lambda s, f: stop_event.set()) - signal_func(SIGTERM, lambda s, f: stop_event.set()) + signal_func(SIGINT, lambda s, f: sig_handler(s, f)) + signal_func(SIGTERM, lambda s, f: sig_handler(s, f)) processes = [] for _ in range(workers): From 65ae7669f9acd0fa1e3d68310d143e647f5a94ca Mon Sep 17 00:00:00 2001 From: Pete Wildsmith Date: Fri, 24 Mar 2017 10:11:30 +0000 Subject: [PATCH 02/15] Document synchronous response.write in streaming The Streaming section of the docs was updated to make clear that a synchronous write should be used in the callback, but this section was not updated. --- docs/sanic/response.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sanic/response.md b/docs/sanic/response.md index 1a336d47..12718ca1 100644 --- a/docs/sanic/response.md +++ b/docs/sanic/response.md @@ -55,8 +55,8 @@ from sanic import response @app.route("/streaming") async def index(request): async def streaming_fn(response): - await response.write('foo') - await response.write('bar') + response.write('foo') + response.write('bar') return response.stream(streaming_fn, content_type='text/plain') ``` From c5b50fe3cfb4cd3eb84b6eb1d9692f3bc0a888b9 Mon Sep 17 00:00:00 2001 From: Raphael Deem Date: Tue, 21 Mar 2017 18:37:46 -0700 Subject: [PATCH 03/15] allow setting config from individual env variables --- docs/sanic/config.md | 8 ++++++++ sanic/app.py | 5 +++-- sanic/config.py | 16 +++++++++++++++- tests/test_config.py | 11 +++++++++++ 4 files changed, 37 insertions(+), 3 deletions(-) diff --git a/docs/sanic/config.md b/docs/sanic/config.md index 0c22de4b..3ed40fda 100644 --- a/docs/sanic/config.md +++ b/docs/sanic/config.md @@ -29,6 +29,14 @@ In general the convention is to only have UPPERCASE configuration parameters. Th There are several ways how to load configuration. +### From environment variables. + +Any variables defined with the `SANIC_` prefix will be applied to the sanic config. For example, setting `SANIC_REQUEST_TIMEOUT` will be loaded by the application automatically. You can pass the `load_vars` boolean to the Sanic constructor to override that: + +```python +app = Sanic(load_vars=False) +``` + ### From an Object If there are a lot of configuration values and they have sensible defaults it might be helpful to put them into a module: diff --git a/sanic/app.py b/sanic/app.py index 2fd52fae..646981ec 100644 --- a/sanic/app.py +++ b/sanic/app.py @@ -25,7 +25,8 @@ from sanic.websocket import WebSocketProtocol, ConnectionClosed class Sanic: - def __init__(self, name=None, router=None, error_handler=None): + def __init__(self, name=None, router=None, error_handler=None, + load_env=True): # Only set up a default log handler if the # end-user application didn't set anything up. if not logging.root.handlers and log.level == logging.NOTSET: @@ -44,7 +45,7 @@ class Sanic: self.name = name self.router = router or Router() self.error_handler = error_handler or ErrorHandler() - self.config = Config() + self.config = Config(load_env=load_env) self.request_middleware = deque() self.response_middleware = deque() self.blueprints = {} diff --git a/sanic/config.py b/sanic/config.py index 3b9a102a..9fb09cbf 100644 --- a/sanic/config.py +++ b/sanic/config.py @@ -1,9 +1,10 @@ import os import types +SANIC_PREFIX = 'SANIC_' class Config(dict): - def __init__(self, defaults=None): + def __init__(self, defaults=None, load_env=True): super().__init__(defaults or {}) self.LOGO = """ ▄▄▄▄▄ @@ -29,6 +30,9 @@ class Config(dict): self.REQUEST_MAX_SIZE = 100000000 # 100 megababies self.REQUEST_TIMEOUT = 60 # 60 seconds + if load_env: + self.load_environment_vars() + def __getattr__(self, attr): try: return self[attr] @@ -90,3 +94,13 @@ class Config(dict): for key in dir(obj): if key.isupper(): self[key] = getattr(obj, key) + + def load_environment_vars(self): + for k, v in os.environ.items(): + """ + Looks for any SANIC_ prefixed environment variables and applies + them to the configuration if present. + """ + if k.startswith(SANIC_PREFIX): + _, config_key = k.split(SANIC_PREFIX, 1) + self[config_key] = v diff --git a/tests/test_config.py b/tests/test_config.py index c7e41ade..aa7a0e4d 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -16,6 +16,17 @@ def test_load_from_object(): assert app.config.CONFIG_VALUE == 'should be used' assert 'not_for_config' not in app.config +def test_auto_load_env(): + environ["SANIC_TEST_ANSWER"] = "42" + app = Sanic() + assert app.config.TEST_ANSWER == "42" + del environ["SANIC_TEST_ANSWER"] + +def test_auto_load_env(): + environ["SANIC_TEST_ANSWER"] = "42" + app = Sanic(load_env=False) + assert getattr(app.config, 'TEST_ANSWER', None) == None + del environ["SANIC_TEST_ANSWER"] def test_load_from_file(): app = Sanic('test_load_from_file') From 1ddb01ac44332b2ab56c73ba68f27699013c4d86 Mon Sep 17 00:00:00 2001 From: Raphael Deem Date: Sun, 26 Feb 2017 16:31:39 -0800 Subject: [PATCH 04/15] remove stop_event --- sanic/app.py | 28 ++++++++++++++++++++++++---- sanic/server.py | 20 ++++++++++++-------- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/sanic/app.py b/sanic/app.py index e101caf5..a8a1e32a 100644 --- a/sanic/app.py +++ b/sanic/app.py @@ -554,19 +554,24 @@ class Sanic: if protocol is None: protocol = (WebSocketProtocol if self.websocket_enabled else HttpProtocol) + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) server_settings = self._helper( host=host, port=port, debug=debug, before_start=before_start, after_start=after_start, before_stop=before_stop, after_stop=after_stop, ssl=ssl, sock=sock, workers=workers, loop=loop, protocol=protocol, backlog=backlog, - stop_event=stop_event, register_sys_signals=register_sys_signals) + register_sys_signals=register_sys_signals) try: self.is_running = True if workers == 1: serve(**server_settings) else: - serve_multiple(server_settings, workers, stop_event) + serve_multiple(server_settings, workers) except: log.exception( 'Experienced exception while trying to serve') @@ -592,16 +597,23 @@ class Sanic: NOTE: This does not support multiprocessing and is not the preferred way to run a Sanic application. """ +<<<<<<< df9884de3c7ca6ad248162c8f404afd0ed774359 if protocol is None: protocol = (WebSocketProtocol if self.websocket_enabled else HttpProtocol) +======= + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) +>>>>>>> remove stop_event server_settings = self._helper( host=host, port=port, debug=debug, before_start=before_start, after_start=after_start, before_stop=before_stop, after_stop=after_stop, ssl=ssl, sock=sock, loop=loop or get_event_loop(), protocol=protocol, - backlog=backlog, stop_event=stop_event, - run_async=True) + backlog=backlog, run_async=True) return await serve(**server_settings) @@ -611,6 +623,7 @@ class Sanic: protocol=HttpProtocol, backlog=100, stop_event=None, register_sys_signals=True, run_async=False): """Helper function used by `run` and `create_server`.""" +<<<<<<< df9884de3c7ca6ad248162c8f404afd0ed774359 if isinstance(ssl, dict): # try common aliaseses @@ -622,6 +635,13 @@ class Sanic: context.load_cert_chain(cert, keyfile=key) ssl = context +======= + if stop_event is not None: + if debug: + warnings.simplefilter('default') + warnings.warn("stop_event will be removed from future versions.", + DeprecationWarning) +>>>>>>> remove stop_event if loop is not None: if debug: warnings.simplefilter('default') diff --git a/sanic/server.py b/sanic/server.py index 00bb2331..14ce1ffd 100644 --- a/sanic/server.py +++ b/sanic/server.py @@ -4,10 +4,13 @@ import traceback import warnings from functools import partial from inspect import isawaitable -from multiprocessing import Process, Event +from multiprocessing import Process from os import set_inheritable -from signal import SIGTERM, SIGINT -from signal import signal as signal_func +from signal import ( + SIGTERM, SIGINT, + signal as signal_func, + Signals +) from socket import socket, SOL_SOCKET, SO_REUSEADDR from time import time @@ -421,7 +424,7 @@ def serve(host, port, request_handler, error_handler, before_start=None, loop.close() -def serve_multiple(server_settings, workers, stop_event=None): +def serve_multiple(server_settings, workers): """Start multiple server processes simultaneously. Stop on interrupt and terminate signals, and drain connections when complete. @@ -448,11 +451,12 @@ def serve_multiple(server_settings, workers, stop_event=None): server_settings['host'] = None server_settings['port'] = None - if stop_event is None: - stop_event = Event() + def sig_handler(signal, frame): + log.info("Received signal {}. Shutting down.".format( + Signals(signal).name)) - signal_func(SIGINT, lambda s, f: stop_event.set()) - signal_func(SIGTERM, lambda s, f: stop_event.set()) + signal_func(SIGINT, lambda s, f: sig_handler(s, f)) + signal_func(SIGTERM, lambda s, f: sig_handler(s, f)) processes = [] for _ in range(workers): From 748ca281855e540a40929ce9871b36f31fba702a Mon Sep 17 00:00:00 2001 From: Joir-dan Gumbs Date: Mon, 27 Mar 2017 15:42:13 -0700 Subject: [PATCH 05/15] Created detailed example of using sanic. Adds configurations based on various environment variables, handles database access (using aioredis), uses middleware to check for db object and attach it to request object, and logs events to a logfile (which is set using environment variables). --- examples/detailed_example.py | 96 ++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 examples/detailed_example.py diff --git a/examples/detailed_example.py b/examples/detailed_example.py new file mode 100644 index 00000000..aaca6fd1 --- /dev/null +++ b/examples/detailed_example.py @@ -0,0 +1,96 @@ +import json +import logging +import os + +import aioredis + +import sanic +from sanic import Sanic + + +ENV_VARS = ["REDIS_HOST", "REDIS_PORT", + "REDIS_MINPOOL", "REDIS_MAXPOOL", + "REDIS_PASS", "APP_LOGFILE"] + +app = Sanic(name=__name__) + +logger = None + + +@app.middleware("request") +async def attach_db_connectors(request): + if not hasattr(request.app.config, "REDIS"): + logger.info("Setting up connection to Redis Cache") + request.app.config.REDIS = await aioredis.create_pool((app.config.REDIS_HOST, int(app.config.REDIS_PORT)), + minsize=int(app.config.REDIS_MINPOOL), + maxsize=int(app.config.REDIS_MAXPOOL), + password=app.config.REDIS_PASS) + # Just put the db objects in the request for easier access + request["redis"] = request.app.config.REDIS + + +@app.route("/state/", methods=["GET"]) +async def access_state(request, user_id): + try: + # Check to see if the value is in cache, if so lets return that + with await request["redis"] as redis_conn: + state = await redis_conn.get(user_id, encoding="utf-8") + if state: + return sanic.response.json(json.loads(state)) + # Then state object is not in redis + logger.critical("Unable to find user_data in cache.") + return sanic.response.HTTPResponse({"msg": "User state not found"}, status=404) + + except aioredis.ProtocolError: + logger.critical("Unable to connect to state cache") + return sanic.response.HTTPResponse({"msg": "Internal Server Error"}, status=500) + + +@app.route("/state//push", methods=["POST"]) +async def set_state(request, user_id): + try: + with await request["redis"] as redis_conn: + # Set the value in cache to your new value + await redis_conn.set(user_id, json.dumps(request.json), expire=1800) + logger.info("Successfully retrieved from cache") + return sanic.response.HTTPResponse({"msg": "Successfully pushed state to cache"}) + except aioredis.ProtocolError: + logger.critical("UNable to connect to state cache") + return sanic.response.HTTPResponse({"msg": "Interal Server Error"}, status=500) + + +def configure(): + # Setup environment variables + env_vars = [os.environ.get(v, None) for v in ENV_VARS] + if not all(env_vars): + # Send back environment variables that were not set + return False, [ENV_VARS[i] for i, flag in env_vars if not flag] + else: + app.config.update({k: v for k, v in zip(ENV_VARS, env_vars)}) + setup_logging() + logging.info("Configuraiton complete") + return True, [] + +def setup_logging(): + logging_format = "[%(asctime)s] %(process)d-%(levelname)s " + logging_format += "%(module)s::%(funcName)s():l%(lineno)d: " + logging_format += "%(message)s" + + print(app.config.APP_LOGFILE) + logging.basicConfig( + filename=app.config.APP_LOGFILE, + format=logging_format, + level=logging.DEBUG + ) + +if __name__ == "__main__": + result, missing = configure() + logger = logging.getLogger() + if result: + try: + app.run(host="0.0.0.0", port=8080, debug=True) + except: + logger.critical("User killed server. Closing") + else: + need_string = ", ".join(missing) + logger.critical("Unable to start. Missing environment variables [{0}]".format(need_string)) From ee79750a220f49f2439209b0359551acf4afab2b Mon Sep 17 00:00:00 2001 From: Joir-dan Gumbs Date: Tue, 28 Mar 2017 01:22:36 -0700 Subject: [PATCH 06/15] Cleaned up functions. Added extra middleware function to log endpoint being called. Added documentation to make easier to understand. --- examples/detailed_example.py | 41 ++++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/examples/detailed_example.py b/examples/detailed_example.py index aaca6fd1..2f66006a 100644 --- a/examples/detailed_example.py +++ b/examples/detailed_example.py @@ -17,8 +17,17 @@ app = Sanic(name=__name__) logger = None +@app.middleware("request") +async def log_uri(request): + # Simple middleware to log the URI endpoint that was called + logger.info("URI called: {0}".format(request.url)) + + @app.middleware("request") async def attach_db_connectors(request): + # We will check to see if our redis pool has been created + # If you have access to the app object, you can set app.config directly + # If you don't have access to the app object, you can use request.app if not hasattr(request.app.config, "REDIS"): logger.info("Setting up connection to Redis Cache") request.app.config.REDIS = await aioredis.create_pool((app.config.REDIS_HOST, int(app.config.REDIS_PORT)), @@ -26,6 +35,7 @@ async def attach_db_connectors(request): maxsize=int(app.config.REDIS_MAXPOOL), password=app.config.REDIS_PASS) # Just put the db objects in the request for easier access + logger.info("Passing pool to request object") request["redis"] = request.app.config.REDIS @@ -40,7 +50,6 @@ async def access_state(request, user_id): # Then state object is not in redis logger.critical("Unable to find user_data in cache.") return sanic.response.HTTPResponse({"msg": "User state not found"}, status=404) - except aioredis.ProtocolError: logger.critical("Unable to connect to state cache") return sanic.response.HTTPResponse({"msg": "Internal Server Error"}, status=500) @@ -49,13 +58,14 @@ async def access_state(request, user_id): @app.route("/state//push", methods=["POST"]) async def set_state(request, user_id): try: + # Pull a connection from the pool with await request["redis"] as redis_conn: # Set the value in cache to your new value await redis_conn.set(user_id, json.dumps(request.json), expire=1800) logger.info("Successfully retrieved from cache") return sanic.response.HTTPResponse({"msg": "Successfully pushed state to cache"}) except aioredis.ProtocolError: - logger.critical("UNable to connect to state cache") + logger.critical("Unable to connect to state cache") return sanic.response.HTTPResponse({"msg": "Interal Server Error"}, status=500) @@ -64,33 +74,36 @@ def configure(): env_vars = [os.environ.get(v, None) for v in ENV_VARS] if not all(env_vars): # Send back environment variables that were not set - return False, [ENV_VARS[i] for i, flag in env_vars if not flag] + return False, ", ".join([ENV_VARS[i] for i, flag in env_vars if not flag]) else: + # Add all the env vars to our app config app.config.update({k: v for k, v in zip(ENV_VARS, env_vars)}) setup_logging() - logging.info("Configuraiton complete") - return True, [] + return True, None + def setup_logging(): logging_format = "[%(asctime)s] %(process)d-%(levelname)s " logging_format += "%(module)s::%(funcName)s():l%(lineno)d: " logging_format += "%(message)s" - print(app.config.APP_LOGFILE) logging.basicConfig( filename=app.config.APP_LOGFILE, format=logging_format, - level=logging.DEBUG - ) + level=logging.DEBUG) -if __name__ == "__main__": - result, missing = configure() - logger = logging.getLogger() + +def main(result, missing): if result: try: app.run(host="0.0.0.0", port=8080, debug=True) except: - logger.critical("User killed server. Closing") + logging.critical("User killed server. Closing") else: - need_string = ", ".join(missing) - logger.critical("Unable to start. Missing environment variables [{0}]".format(need_string)) + logging.critical("Unable to start. Missing environment variables [{0}]".format(missing)) + + +if __name__ == "__main__": + result, missing = configure() + logger = logging.getLogger() + main(result, missing) From e3cf50f791fb189624384bc855292daaa1780efc Mon Sep 17 00:00:00 2001 From: Joir-dan Gumbs Date: Tue, 28 Mar 2017 15:00:23 -0700 Subject: [PATCH 07/15] Changed out redis middleware for redis listeners (open/close). Fleshed out the payloads of both endpoints. Added comment about required packages. --- examples/detailed_example.py | 61 ++++++++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 17 deletions(-) diff --git a/examples/detailed_example.py b/examples/detailed_example.py index 2f66006a..99e71cb1 100644 --- a/examples/detailed_example.py +++ b/examples/detailed_example.py @@ -1,7 +1,10 @@ +# This demo requires aioredis and environmental variables established in ENV_VARS import json import logging import os +from datetime import datetime + import aioredis import sanic @@ -23,20 +26,28 @@ async def log_uri(request): logger.info("URI called: {0}".format(request.url)) +@app.listener('before_server_start') +async def before_server_start(app, loop): + logger.info("Starting redis pool") + app.redis_pool = await aioredis.create_pool( + (app.config.REDIS_HOST, int(app.config.REDIS_PORT)), + minsize=int(app.config.REDIS_MINPOOL), + maxsize=int(app.config.REDIS_MAXPOOL), + password=app.config.REDIS_PASS) + + +@app.listener('after_server_stop') +async def after_server_stop(app, loop): + logger.info("Closing redis pool") + app.redis_pool.close() + await app.redis_pool.wait_closed() + + @app.middleware("request") async def attach_db_connectors(request): - # We will check to see if our redis pool has been created - # If you have access to the app object, you can set app.config directly - # If you don't have access to the app object, you can use request.app - if not hasattr(request.app.config, "REDIS"): - logger.info("Setting up connection to Redis Cache") - request.app.config.REDIS = await aioredis.create_pool((app.config.REDIS_HOST, int(app.config.REDIS_PORT)), - minsize=int(app.config.REDIS_MINPOOL), - maxsize=int(app.config.REDIS_MAXPOOL), - password=app.config.REDIS_PASS) # Just put the db objects in the request for easier access - logger.info("Passing pool to request object") - request["redis"] = request.app.config.REDIS + logger.info("Passing redis pool to request object") + request["redis"] = request.app.redis_pool @app.route("/state/", methods=["GET"]) @@ -46,13 +57,23 @@ async def access_state(request, user_id): with await request["redis"] as redis_conn: state = await redis_conn.get(user_id, encoding="utf-8") if state: - return sanic.response.json(json.loads(state)) + return sanic.response.json({"msg": "Success", + "status": 200, + "success": True, + "data": json.loads(state), + "finished_at": datetime.now().isoformat()}) # Then state object is not in redis logger.critical("Unable to find user_data in cache.") - return sanic.response.HTTPResponse({"msg": "User state not found"}, status=404) + return sanic.response.HTTPResponse({"msg": "User state not found", + "success": False, + "status": 404, + "finished_at": datetime.now().isoformat()}, status=404) except aioredis.ProtocolError: logger.critical("Unable to connect to state cache") - return sanic.response.HTTPResponse({"msg": "Internal Server Error"}, status=500) + return sanic.response.HTTPResponse({"msg": "Internal Server Error", + "status": 500, + "success": False, + "finished_at": datetime.now().isoformat()}, status=500) @app.route("/state//push", methods=["POST"]) @@ -62,11 +83,17 @@ async def set_state(request, user_id): with await request["redis"] as redis_conn: # Set the value in cache to your new value await redis_conn.set(user_id, json.dumps(request.json), expire=1800) - logger.info("Successfully retrieved from cache") - return sanic.response.HTTPResponse({"msg": "Successfully pushed state to cache"}) + logger.info("Successfully pushed state to cache") + return sanic.response.HTTPResponse({"msg": "Successfully pushed state to cache", + "success": True, + "status": 200, + "finished_at": datetime.now().isoformat()}) except aioredis.ProtocolError: logger.critical("Unable to connect to state cache") - return sanic.response.HTTPResponse({"msg": "Interal Server Error"}, status=500) + return sanic.response.HTTPResponse({"msg": "Internal Server Error", + "status": 500, + "success": False, + "finished_at": datetime.now().isoformat()}, status=500) def configure(): From 8ba1b5fc35ce0d023fa68d8e9c693f5dbcf7c9c3 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:33:55 -0500 Subject: [PATCH 08/15] Add docker support for local unit testing Addresses consistency across different OS's by making it very similar to the base Travis image. --- Dockerfile | 6 ++++++ Makefile | 4 ++++ 2 files changed, 10 insertions(+) create mode 100644 Dockerfile create mode 100644 Makefile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..ee8ca2be --- /dev/null +++ b/Dockerfile @@ -0,0 +1,6 @@ +FROM python:3.6 + +ADD . /app +WORKDIR /app + +RUN pip install tox diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..ad64412f --- /dev/null +++ b/Makefile @@ -0,0 +1,4 @@ +test: + find . -name "*.pyc" -delete + docker build -t sanic/test-image . + docker run -t sanic/test-image tox From 75a4df0f32e60e103404758901b1f8965cdacf79 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:34:34 -0500 Subject: [PATCH 09/15] Simplify this, it had a lot of fluff --- requirements-dev.txt | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 1f11a90c..65dd0d7d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,18 +1,10 @@ -aiocache aiofiles aiohttp beautifulsoup4 -bottle coverage -falcon -gunicorn httptools -kyoukai +flake8 pytest -recommonmark -sphinx -sphinx_rtd_theme -tornado tox ujson uvloop From 1ef69adc6f8967ac8b227d4c6ad9bd0270220bb0 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:34:44 -0500 Subject: [PATCH 10/15] Simplify this as well, it replicated effort --- tox.ini | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tox.ini b/tox.ini index 33e4298f..0e6dc7c6 100644 --- a/tox.ini +++ b/tox.ini @@ -10,12 +10,7 @@ python = [testenv] deps = - aiofiles - aiohttp - websockets - pytest - beautifulsoup4 - coverage + -rrequirements-dev.txt commands = pytest tests {posargs} From dcc19d17d4a43b0b7c6d4472dfea52146bd2dbb9 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:38:35 -0500 Subject: [PATCH 11/15] Lock to aiohttp 1.3.5 for now --- requirements-dev.txt | 2 +- tests/test_redirect.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 65dd0d7d..28014eb6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,5 @@ aiofiles -aiohttp +aiohttp==1.3.5 beautifulsoup4 coverage httptools diff --git a/tests/test_redirect.py b/tests/test_redirect.py index 25efe1f3..421ee1cf 100644 --- a/tests/test_redirect.py +++ b/tests/test_redirect.py @@ -88,4 +88,4 @@ def test_chained_redirect(redirect_app): assert request.url.endswith('/1') assert response.status == 200 assert response.text == 'OK' - assert response.url.path.endswith('/3') + assert response.url.endswith('/3') From 3a8cfb1f45b68eb7ff8f9fbef3ec0fdf8226baed Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:38:45 -0500 Subject: [PATCH 12/15] Make these tests not so far apart --- tests/test_payload_too_large.py | 52 ++++++++++++++++----------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/tests/test_payload_too_large.py b/tests/test_payload_too_large.py index a1a58d3d..70ec56ce 100644 --- a/tests/test_payload_too_large.py +++ b/tests/test_payload_too_large.py @@ -2,48 +2,46 @@ from sanic import Sanic from sanic.response import text from sanic.exceptions import PayloadTooLarge -data_received_app = Sanic('data_received') -data_received_app.config.REQUEST_MAX_SIZE = 1 -data_received_default_app = Sanic('data_received_default') -data_received_default_app.config.REQUEST_MAX_SIZE = 1 -on_header_default_app = Sanic('on_header') -on_header_default_app.config.REQUEST_MAX_SIZE = 500 - - -@data_received_app.route('/1') -async def handler1(request): - return text('OK') - - -@data_received_app.exception(PayloadTooLarge) -def handler_exception(request, exception): - return text('Payload Too Large from error_handler.', 413) - def test_payload_too_large_from_error_handler(): + data_received_app = Sanic('data_received') + data_received_app.config.REQUEST_MAX_SIZE = 1 + + @data_received_app.route('/1') + async def handler1(request): + return text('OK') + + @data_received_app.exception(PayloadTooLarge) + def handler_exception(request, exception): + return text('Payload Too Large from error_handler.', 413) + response = data_received_app.test_client.get('/1', gather_request=False) assert response.status == 413 assert response.text == 'Payload Too Large from error_handler.' -@data_received_default_app.route('/1') -async def handler2(request): - return text('OK') - - def test_payload_too_large_at_data_received_default(): + data_received_default_app = Sanic('data_received_default') + data_received_default_app.config.REQUEST_MAX_SIZE = 1 + + @data_received_default_app.route('/1') + async def handler2(request): + return text('OK') + response = data_received_default_app.test_client.get( '/1', gather_request=False) assert response.status == 413 assert response.text == 'Error: Payload Too Large' -@on_header_default_app.route('/1') -async def handler3(request): - return text('OK') - - def test_payload_too_large_at_on_header_default(): + on_header_default_app = Sanic('on_header') + on_header_default_app.config.REQUEST_MAX_SIZE = 500 + + @on_header_default_app.post('/1') + async def handler3(request): + return text('OK') + data = 'a' * 1000 response = on_header_default_app.test_client.post( '/1', gather_request=False, data=data) From 04a0774ee598445db681a4a7db2fca200be65897 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:44:01 -0500 Subject: [PATCH 13/15] Fix line length --- sanic/response.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sanic/response.py b/sanic/response.py index 38cd68db..6afcb061 100644 --- a/sanic/response.py +++ b/sanic/response.py @@ -132,8 +132,8 @@ class StreamingHTTPResponse(BaseHTTPResponse): async def stream( self, version="1.1", keep_alive=False, keep_alive_timeout=None): - """Streams headers, runs the `streaming_fn` callback that writes content - to the response body, then finalizes the response body. + """Streams headers, runs the `streaming_fn` callback that writes + content to the response body, then finalizes the response body. """ headers = self.get_headers( version, keep_alive=keep_alive, From f0a55b5cbb5542ba3ec769be28e7fb02aaf2a359 Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:47:52 -0500 Subject: [PATCH 14/15] Fix line length again... --- sanic/response.py | 6 +++++- tests/test_redirect.py | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/sanic/response.py b/sanic/response.py index 6afcb061..3da9ac5e 100644 --- a/sanic/response.py +++ b/sanic/response.py @@ -331,7 +331,11 @@ def stream( :param headers: Custom Headers. """ return StreamingHTTPResponse( - streaming_fn, headers=headers, content_type=content_type, status=status) + streaming_fn, + headers=headers, + content_type=content_type, + status=status + ) def redirect(to, headers=None, status=302, diff --git a/tests/test_redirect.py b/tests/test_redirect.py index 421ee1cf..f5b734e3 100644 --- a/tests/test_redirect.py +++ b/tests/test_redirect.py @@ -88,4 +88,7 @@ def test_chained_redirect(redirect_app): assert request.url.endswith('/1') assert response.status == 200 assert response.text == 'OK' - assert response.url.endswith('/3') + try: + assert response.url.endswith('/3') + except AttributeError: + assert response.url.path.endswith('/3') From 18405b39080ff80c1b854075f57a218b66b963cf Mon Sep 17 00:00:00 2001 From: Eli Uriegas Date: Tue, 28 Mar 2017 22:57:58 -0500 Subject: [PATCH 15/15] There was a line missing here? --- sanic/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sanic/config.py b/sanic/config.py index 9fb09cbf..99d39a9c 100644 --- a/sanic/config.py +++ b/sanic/config.py @@ -3,6 +3,7 @@ import types SANIC_PREFIX = 'SANIC_' + class Config(dict): def __init__(self, defaults=None, load_env=True): super().__init__(defaults or {})