
* Streaming request by async for. * Make all requests streaming and preload body for non-streaming handlers. * Cleanup of code and avoid mixing streaming responses. * Async http protocol loop. * Change of test: don't require early bad request error but only after CRLF-CRLF. * Add back streaming requests. * Rewritten request body parser. * Misc. cleanup, down to 4 failing tests. * All tests OK. * Entirely remove request body queue. * Let black f*ckup the layout * Better testing error messages on protocol errors. * Remove StreamBuffer tests because the type is about to be removed. * Remove tests using the deprecated get_headers function that can no longer be supported. Chunked mode is now autodetected, so do not put content-length header if chunked mode is preferred. * Major refactoring of HTTP protocol handling (new module http.py added), all requests made streaming. A few compatibility issues and a lot of cleanup to be done remain, 16 tests failing. * Terminate check_timeouts once connection_task finishes. * Code cleanup, 14 tests failing. * Much cleanup, 12 failing... * Even more cleanup and error checking, 8 failing tests. * Remove keep-alive header from responses. First of all, it should say timeout=<value> which wasn't the case with existing implementation, and secondly none of the other web servers I tried include this header. * Everything but CustomServer OK. * Linter * Disable custom protocol test * Remove unnecessary variables, optimise performance. * A test was missing that body_init/body_push/body_finish are never called. Rewritten using receive_body and case switching to make it fail if bypassed. * Minor fixes. * Remove unused code. * Py 3.8 check for deprecated loop argument. * Fix a middleware cancellation handling test with py38. * Linter 'n fixes * Typing * Stricter handling of request header size * More specific error messages on Payload Too Large. * Init http.response = None * Messages further tuned. * Always try to consume request body, plus minor cleanup. * Add a missing check in case of close_if_idle on a dead connection. * Avoid error messages on PayloadTooLarge. * Add test for new API. * json takes str, not bytes * Default to no maximum request size for streaming handlers. * Fix chunked mode crash. * Header values should be strictly ASCII but both UTF-8 and Latin-1 exist. Use UTF-8B to cope with all. * Refactoring and cleanup. * Unify response header processing of ASGI and asyncio modes. * Avoid special handling of StreamingHTTPResponse. * 35 % speedup in HTTP/1.1 response formatting (not so much overall effect). * Duplicate set-cookie headers were being produced. * Cleanup processed_headers some more. * Linting * Import ordering * Response middleware ran by async request.respond(). * Need to check if transport is closing to avoid getting stuck in sending loops after peer has disconnected. * Middleware and error handling refactoring. * Linter * Fix tracking of HTTP stage when writing to transport fails. * Add clarifying comment * Add a check for request body functions and a test for NotImplementedError. * Linter and typing * These must be tuples + hack mypy warnings away. * New streaming test and minor fixes. * Constant receive buffer size. * 256 KiB send and receive buffers. * Revert "256 KiB send and receive buffers." This reverts commit abc1e3edb21a5e6925fa4c856657559608a8d65b. * app.handle_exception already sends the response. * Improved handling of errors during request. * An odd hack to avoid an httpx limitation that causes test failures. * Limit request header size to 8 KiB at most. * Remove unnecessary use of format string. * Cleanup tests * Remove artifact * Fix type checking * Mark test for skipping * Cleanup some edge cases * Add ignore_body flag to safe methods * Add unit tests for timeout logic * Add unit tests for timeout logic * Fix Mock usage in timeout test * Change logging test to only logger in handler * Windows py3.8 logging issue with current testing client * Add test_header_size_exceeded * Resolve merge conflicts * Add request middleware to hard exception handling * Add request middleware to hard exception handling * Request middleware on exception handlers * Linting * Cleanup deprecations Co-authored-by: L. Kärkkäinen <tronic@users.noreply.github.com> Co-authored-by: Adam Hopkins <admhpkns@gmail.com>
101 lines
2.9 KiB
Python
101 lines
2.9 KiB
Python
import multiprocessing
|
|
import pickle
|
|
import random
|
|
import signal
|
|
|
|
import pytest
|
|
|
|
from sanic import Blueprint
|
|
from sanic.response import text
|
|
from sanic.testing import HOST, PORT
|
|
|
|
|
|
@pytest.mark.skipif(
|
|
not hasattr(signal, "SIGALRM"),
|
|
reason="SIGALRM is not implemented for this platform, we have to come "
|
|
"up with another timeout strategy to test these",
|
|
)
|
|
def test_multiprocessing(app):
|
|
"""Tests that the number of children we produce is correct"""
|
|
# Selects a number at random so we can spot check
|
|
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
|
|
process_list = set()
|
|
|
|
def stop_on_alarm(*args):
|
|
for process in multiprocessing.active_children():
|
|
process_list.add(process.pid)
|
|
process.terminate()
|
|
|
|
signal.signal(signal.SIGALRM, stop_on_alarm)
|
|
signal.alarm(3)
|
|
app.run(HOST, PORT, workers=num_workers)
|
|
|
|
assert len(process_list) == num_workers
|
|
|
|
|
|
@pytest.mark.skipif(
|
|
not hasattr(signal, "SIGALRM"),
|
|
reason="SIGALRM is not implemented for this platform",
|
|
)
|
|
def test_multiprocessing_with_blueprint(app):
|
|
# Selects a number at random so we can spot check
|
|
num_workers = random.choice(range(2, multiprocessing.cpu_count() * 2 + 1))
|
|
process_list = set()
|
|
|
|
def stop_on_alarm(*args):
|
|
for process in multiprocessing.active_children():
|
|
process_list.add(process.pid)
|
|
process.terminate()
|
|
|
|
signal.signal(signal.SIGALRM, stop_on_alarm)
|
|
signal.alarm(3)
|
|
|
|
bp = Blueprint("test_text")
|
|
app.blueprint(bp)
|
|
app.run(HOST, PORT, workers=num_workers)
|
|
|
|
assert len(process_list) == num_workers
|
|
|
|
|
|
# this function must be outside a test function so that it can be
|
|
# able to be pickled (local functions cannot be pickled).
|
|
def handler(request):
|
|
return text("Hello")
|
|
|
|
|
|
# Multiprocessing on Windows requires app to be able to be pickled
|
|
@pytest.mark.parametrize("protocol", [3, 4])
|
|
def test_pickle_app(app, protocol):
|
|
app.route("/")(handler)
|
|
p_app = pickle.dumps(app, protocol=protocol)
|
|
del app
|
|
up_p_app = pickle.loads(p_app)
|
|
assert up_p_app
|
|
request, response = up_p_app.test_client.get("/")
|
|
assert response.text == "Hello"
|
|
|
|
|
|
@pytest.mark.parametrize("protocol", [3, 4])
|
|
def test_pickle_app_with_bp(app, protocol):
|
|
bp = Blueprint("test_text")
|
|
bp.route("/")(handler)
|
|
app.blueprint(bp)
|
|
p_app = pickle.dumps(app, protocol=protocol)
|
|
del app
|
|
up_p_app = pickle.loads(p_app)
|
|
assert up_p_app
|
|
request, response = up_p_app.test_client.get("/")
|
|
assert response.text == "Hello"
|
|
|
|
|
|
@pytest.mark.parametrize("protocol", [3, 4])
|
|
def test_pickle_app_with_static(app, protocol):
|
|
app.route("/")(handler)
|
|
app.static("/static", "/tmp/static")
|
|
p_app = pickle.dumps(app, protocol=protocol)
|
|
del app
|
|
up_p_app = pickle.loads(p_app)
|
|
assert up_p_app
|
|
request, response = up_p_app.test_client.get("/static/missing.txt")
|
|
assert response.status == 404
|