Format with default line-length=88 (was 79 with Sanic).
This commit is contained in:
parent
e4daf1ab21
commit
1afea39cb2
|
@ -35,9 +35,7 @@ def proxy(request, path):
|
|||
|
||||
@https.main_process_start
|
||||
async def start(app, _):
|
||||
http_server = await http.create_server(
|
||||
port=HTTP_PORT, return_asyncio_server=True
|
||||
)
|
||||
http_server = await http.create_server(port=HTTP_PORT, return_asyncio_server=True)
|
||||
app.add_task(runner(http, http_server))
|
||||
app.ctx.http_server = http_server
|
||||
app.ctx.http = http
|
||||
|
|
|
@ -33,9 +33,7 @@ logdna_options = {
|
|||
"mac": get_mac_address(),
|
||||
}
|
||||
|
||||
logdna_handler = LogDNAHandler(
|
||||
getenv("LOGDNA_API_KEY"), options=logdna_options
|
||||
)
|
||||
logdna_handler = LogDNAHandler(getenv("LOGDNA_API_KEY"), options=logdna_options)
|
||||
|
||||
logdna = logging.getLogger(__name__)
|
||||
logdna.setLevel(logging.INFO)
|
||||
|
|
|
@ -42,9 +42,7 @@ async def handler_file(request):
|
|||
|
||||
@app.route("/file_stream")
|
||||
async def handler_file_stream(request):
|
||||
return await response.file_stream(
|
||||
Path("../") / "setup.py", chunk_size=1024
|
||||
)
|
||||
return await response.file_stream(Path("../") / "setup.py", chunk_size=1024)
|
||||
|
||||
|
||||
@app.post("/stream", stream=True)
|
||||
|
|
|
@ -36,9 +36,7 @@ async def test(request):
|
|||
|
||||
if __name__ == "__main__":
|
||||
asyncio.set_event_loop(uvloop.new_event_loop())
|
||||
serv_coro = app.create_server(
|
||||
host="0.0.0.0", port=8000, return_asyncio_server=True
|
||||
)
|
||||
serv_coro = app.create_server(host="0.0.0.0", port=8000, return_asyncio_server=True)
|
||||
loop = asyncio.get_event_loop()
|
||||
serv_task = asyncio.ensure_future(serv_coro, loop=loop)
|
||||
signal(SIGINT, lambda s, f: loop.stop())
|
||||
|
|
|
@ -42,9 +42,7 @@ async def test_file(request):
|
|||
|
||||
@app.route("/file_stream")
|
||||
async def test_file_stream(request):
|
||||
return await response.file_stream(
|
||||
os.path.abspath("setup.py"), chunk_size=1024
|
||||
)
|
||||
return await response.file_stream(os.path.abspath("setup.py"), chunk_size=1024)
|
||||
|
||||
|
||||
# ----------------------------------------------- #
|
||||
|
|
|
@ -11,9 +11,7 @@ app = Sanic("Example")
|
|||
bp = Blueprint("bp", host="bp.example.com")
|
||||
|
||||
|
||||
@app.route(
|
||||
"/", host=["example.com", "somethingelse.com", "therestofyourdomains.com"]
|
||||
)
|
||||
@app.route("/", host=["example.com", "somethingelse.com", "therestofyourdomains.com"])
|
||||
async def hello_0(request):
|
||||
return response.text("Some defaults")
|
||||
|
||||
|
|
|
@ -7,14 +7,10 @@ from emoji import EMOJI
|
|||
COLUMN_PATTERN = re.compile(r"---:1\s*(.*?)\s*:--:1\s*(.*?)\s*:---", re.DOTALL)
|
||||
PYTHON_HIGHLIGHT_PATTERN = re.compile(r"```python\{+.*?\}", re.DOTALL)
|
||||
BASH_HIGHLIGHT_PATTERN = re.compile(r"```bash\{+.*?\}", re.DOTALL)
|
||||
NOTIFICATION_PATTERN = re.compile(
|
||||
r":::\s*(\w+)\s*(.*?)\n([\s\S]*?):::", re.MULTILINE
|
||||
)
|
||||
NOTIFICATION_PATTERN = re.compile(r":::\s*(\w+)\s*(.*?)\n([\s\S]*?):::", re.MULTILINE)
|
||||
EMOJI_PATTERN = re.compile(r":(\w+):")
|
||||
CURRENT_DIR = Path(__file__).parent
|
||||
SOURCE_DIR = (
|
||||
CURRENT_DIR.parent.parent.parent.parent / "sanic-guide" / "src" / "en"
|
||||
)
|
||||
SOURCE_DIR = CURRENT_DIR.parent.parent.parent.parent / "sanic-guide" / "src" / "en"
|
||||
|
||||
|
||||
def convert_columns(content: str):
|
||||
|
|
|
@ -13,9 +13,7 @@ def do_footer(builder: Builder, request: Request) -> None:
|
|||
|
||||
|
||||
def _pagination(request: Request) -> Builder:
|
||||
return E.div(
|
||||
_pagination_left(request), _pagination_right(request), class_="level"
|
||||
)
|
||||
return E.div(_pagination_left(request), _pagination_right(request), class_="level")
|
||||
|
||||
|
||||
def _pagination_left(request: Request) -> Builder:
|
||||
|
@ -64,9 +62,7 @@ def _content() -> Builder:
|
|||
href="https://github.com/sanic-org/sanic/blob/master/LICENSE",
|
||||
target="_blank",
|
||||
rel="nofollow noopener noreferrer",
|
||||
).br()(
|
||||
E.small(f"Copyright © 2018-{year} Sanic Community Organization")
|
||||
),
|
||||
).br()(E.small(f"Copyright © 2018-{year} Sanic Community Organization")),
|
||||
)
|
||||
return E.div(
|
||||
inner,
|
||||
|
|
|
@ -6,12 +6,9 @@ from webapp.display.layouts.models import MenuItem
|
|||
|
||||
def do_navbar(builder: Builder, request: Request) -> None:
|
||||
navbar_items = [
|
||||
_render_navbar_item(item, request)
|
||||
for item in request.app.config.NAVBAR
|
||||
_render_navbar_item(item, request) for item in request.app.config.NAVBAR
|
||||
]
|
||||
container = E.div(
|
||||
_search_form(request), *navbar_items, class_="navbar-end"
|
||||
)
|
||||
container = E.div(_search_form(request), *navbar_items, class_="navbar-end")
|
||||
|
||||
builder.nav(
|
||||
E.div(container, class_="navbar-menu"),
|
||||
|
@ -46,10 +43,7 @@ def _render_navbar_item(item: MenuItem, request: Request) -> Builder:
|
|||
return E.div(
|
||||
E.a(item.label, class_="navbar-link"),
|
||||
E.div(
|
||||
*(
|
||||
_render_navbar_item(subitem, request)
|
||||
for subitem in item.items
|
||||
),
|
||||
*(_render_navbar_item(subitem, request) for subitem in item.items),
|
||||
class_="navbar-dropdown",
|
||||
),
|
||||
class_="navbar-item has-dropdown is-hoverable",
|
||||
|
|
|
@ -15,9 +15,7 @@ def _menu_items(request: Request) -> list[Builder]:
|
|||
_sanic_logo(request),
|
||||
*_sidebar_items(request),
|
||||
E.hr(),
|
||||
E.p("Current with version ").strong(
|
||||
request.app.config.GENERAL.current_version
|
||||
),
|
||||
E.p("Current with version ").strong(request.app.config.GENERAL.current_version),
|
||||
E.hr(),
|
||||
E.p("Want more? ").a(
|
||||
"sanicbook.com", href="https://sanicbook.com", target="_blank"
|
||||
|
@ -73,9 +71,7 @@ def _single_sidebar_item(item: MenuItem, request: Request) -> Builder:
|
|||
kwargs = {}
|
||||
classes: list[str] = []
|
||||
li_classes = "menu-item"
|
||||
_, page, _ = request.app.ctx.get_page(
|
||||
request.ctx.language, item.path or ""
|
||||
)
|
||||
_, page, _ = request.app.ctx.get_page(request.ctx.language, item.path or "")
|
||||
if request.path == path:
|
||||
classes.append("is-active")
|
||||
if item.href:
|
||||
|
|
|
@ -36,9 +36,9 @@ class DocsRenderer(HTMLRenderer):
|
|||
class_="code-block__copy",
|
||||
onclick="copyCode(this)",
|
||||
):
|
||||
builder.div(
|
||||
class_="code-block__rectangle code-block__filled"
|
||||
).div(class_="code-block__rectangle code-block__outlined")
|
||||
builder.div(class_="code-block__rectangle code-block__filled").div(
|
||||
class_="code-block__rectangle code-block__outlined"
|
||||
)
|
||||
else:
|
||||
builder.pre(E.code(escape(code)))
|
||||
return str(builder)
|
||||
|
@ -46,9 +46,7 @@ class DocsRenderer(HTMLRenderer):
|
|||
def heading(self, text: str, level: int, **attrs) -> str:
|
||||
ident = slugify(text)
|
||||
if level > 1:
|
||||
text += self._make_tag(
|
||||
"a", {"href": f"#{ident}", "class": "anchor"}, "#"
|
||||
)
|
||||
text += self._make_tag("a", {"href": f"#{ident}", "class": "anchor"}, "#")
|
||||
return self._make_tag(
|
||||
f"h{level}", {"id": ident, "class": f"is-size-{level}"}, text
|
||||
)
|
||||
|
@ -92,9 +90,7 @@ class DocsRenderer(HTMLRenderer):
|
|||
def _make_tag(
|
||||
self, tag: str, attributes: dict[str, str], text: str | None = None
|
||||
) -> str:
|
||||
attrs = " ".join(
|
||||
f'{key}="{value}"' for key, value in attributes.items()
|
||||
)
|
||||
attrs = " ".join(f'{key}="{value}"' for key, value in attributes.items())
|
||||
if text is None:
|
||||
return f"<{tag} {attrs} />"
|
||||
return f"<{tag} {attrs}>{text}</{tag}>"
|
||||
|
|
|
@ -119,9 +119,7 @@ def _extract_docobjects(package_name: str) -> dict[str, DocObject]:
|
|||
docstrings = {}
|
||||
package = importlib.import_module(package_name)
|
||||
|
||||
for _, name, _ in pkgutil.walk_packages(
|
||||
package.__path__, package_name + "."
|
||||
):
|
||||
for _, name, _ in pkgutil.walk_packages(package.__path__, package_name + "."):
|
||||
module = importlib.import_module(name)
|
||||
for obj_name, obj in inspect.getmembers(module):
|
||||
if (
|
||||
|
@ -155,9 +153,7 @@ def _docobject_to_html(
|
|||
) -> None:
|
||||
anchor_id = slugify(docobject.full_name.replace(".", "-"))
|
||||
anchor = E.a("#", class_="anchor", href=f"#{anchor_id}")
|
||||
class_name, heading = _define_heading_and_class(
|
||||
docobject, anchor, as_method
|
||||
)
|
||||
class_name, heading = _define_heading_and_class(docobject, anchor, as_method)
|
||||
|
||||
with builder.div(class_=class_name):
|
||||
builder(heading)
|
||||
|
@ -211,9 +207,7 @@ def _docobject_to_html(
|
|||
|
||||
if docobject.docstring.params:
|
||||
with builder.div(class_="box mt-5"):
|
||||
builder.h5(
|
||||
"Parameters", class_="is-size-5 has-text-weight-bold"
|
||||
)
|
||||
builder.h5("Parameters", class_="is-size-5 has-text-weight-bold")
|
||||
_render_params(builder, docobject.docstring.params)
|
||||
|
||||
if docobject.docstring.returns:
|
||||
|
@ -238,9 +232,7 @@ def _signature_to_html(
|
|||
parts = []
|
||||
parts.append("<span class='function-signature'>")
|
||||
for decorator in decorators:
|
||||
parts.append(
|
||||
f"<span class='function-decorator'>@{decorator}</span><br>"
|
||||
)
|
||||
parts.append(f"<span class='function-decorator'>@{decorator}</span><br>")
|
||||
parts.append(
|
||||
f"<span class='is-italic'>{object_type}</span> "
|
||||
f"<span class='has-text-weight-bold'>{name}</span>("
|
||||
|
@ -254,9 +246,7 @@ def _signature_to_html(
|
|||
annotation = ""
|
||||
if param.annotation != inspect.Parameter.empty:
|
||||
annotation = escape(str(param.annotation))
|
||||
parts.append(
|
||||
f": <span class='param-annotation'>{annotation}</span>"
|
||||
)
|
||||
parts.append(f": <span class='param-annotation'>{annotation}</span>")
|
||||
if param.default != inspect.Parameter.empty:
|
||||
default = escape(str(param.default))
|
||||
if annotation == "str":
|
||||
|
@ -267,9 +257,7 @@ def _signature_to_html(
|
|||
parts.append(")")
|
||||
if signature.return_annotation != inspect.Signature.empty:
|
||||
return_annotation = escape(str(signature.return_annotation))
|
||||
parts.append(
|
||||
f": -> <span class='return-annotation'>{return_annotation}</span>"
|
||||
)
|
||||
parts.append(f": -> <span class='return-annotation'>{return_annotation}</span>")
|
||||
parts.append("</span>")
|
||||
return "".join(parts)
|
||||
|
||||
|
@ -317,10 +305,7 @@ def _render_params(builder: Builder, params: list[DocstringParam]) -> None:
|
|||
builder.dd(
|
||||
HTML(
|
||||
render_markdown(
|
||||
param.description
|
||||
or param.arg_name
|
||||
or param.type_name
|
||||
or ""
|
||||
param.description or param.arg_name or param.type_name or ""
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -333,11 +318,7 @@ def _render_raises(builder: Builder, raises: list[DocstringRaises]) -> None:
|
|||
with builder.dl(class_="mt-2"):
|
||||
builder.dt(raise_.type_name, class_="is-family-monospace")
|
||||
builder.dd(
|
||||
HTML(
|
||||
render_markdown(
|
||||
raise_.description or raise_.type_name or ""
|
||||
)
|
||||
)
|
||||
HTML(render_markdown(raise_.description or raise_.type_name or ""))
|
||||
)
|
||||
|
||||
|
||||
|
@ -353,11 +334,7 @@ def _render_returns(builder: Builder, docobject: DocObject) -> None:
|
|||
if not return_type or return_type == inspect.Signature.empty:
|
||||
return_type = "N/A"
|
||||
|
||||
term = (
|
||||
"Return"
|
||||
if not docobject.docstring.returns.is_generator
|
||||
else "Yields"
|
||||
)
|
||||
term = "Return" if not docobject.docstring.returns.is_generator else "Yields"
|
||||
builder.h5(term, class_="is-size-5 has-text-weight-bold")
|
||||
with builder.dl(class_="mt-2"):
|
||||
builder.dt(return_type, class_="is-family-monospace")
|
||||
|
@ -372,17 +349,11 @@ def _render_returns(builder: Builder, docobject: DocObject) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _render_examples(
|
||||
builder: Builder, examples: list[DocstringExample]
|
||||
) -> None:
|
||||
def _render_examples(builder: Builder, examples: list[DocstringExample]) -> None:
|
||||
with builder.div(class_="box mt-5"):
|
||||
builder.h5("Examples", class_="is-size-5 has-text-weight-bold")
|
||||
for example in examples:
|
||||
with builder.div(class_="mt-2"):
|
||||
builder(
|
||||
HTML(
|
||||
render_markdown(
|
||||
example.description or example.snippet or ""
|
||||
)
|
||||
)
|
||||
HTML(render_markdown(example.description or example.snippet or ""))
|
||||
)
|
||||
|
|
|
@ -11,9 +11,7 @@ from ..layouts.main import MainLayout
|
|||
from ..markdown import render_markdown
|
||||
from .docobject import organize_docobjects
|
||||
|
||||
_PAGE_CACHE: dict[
|
||||
str, dict[str, tuple[Page | None, Page | None, Page | None]]
|
||||
] = {}
|
||||
_PAGE_CACHE: dict[str, dict[str, tuple[Page | None, Page | None, Page | None]]] = {}
|
||||
_LAYOUTS_CACHE: dict[str, type[BaseLayout]] = {
|
||||
"home": HomeLayout,
|
||||
"main": MainLayout,
|
||||
|
|
|
@ -20,13 +20,9 @@ class PageRenderer(BaseRenderer):
|
|||
self._body(request, builder, language, path)
|
||||
return builder
|
||||
|
||||
def _body(
|
||||
self, request: Request, builder: Builder, language: str, path: str
|
||||
):
|
||||
def _body(self, request: Request, builder: Builder, language: str, path: str):
|
||||
prev_page, current_page, next_page = Page.get(language, path)
|
||||
request.ctx.language = (
|
||||
Page.DEFAULT_LANGUAGE if language == "api" else language
|
||||
)
|
||||
request.ctx.language = Page.DEFAULT_LANGUAGE if language == "api" else language
|
||||
request.ctx.current_page = current_page
|
||||
request.ctx.previous_page = prev_page
|
||||
request.ctx.next_page = next_page
|
||||
|
@ -38,9 +34,7 @@ class PageRenderer(BaseRenderer):
|
|||
|
||||
@contextmanager
|
||||
def _base(self, request: Request, builder: Builder, page: Page | None):
|
||||
layout_type: type[BaseLayout] = (
|
||||
page.get_layout() if page else BaseLayout
|
||||
)
|
||||
layout_type: type[BaseLayout] = page.get_layout() if page else BaseLayout
|
||||
layout = layout_type(builder)
|
||||
with layout(request, builder.full):
|
||||
yield
|
||||
|
|
|
@ -15,9 +15,7 @@ class Attributes(DirectivePlugin):
|
|||
if md.renderer.NAME == "html":
|
||||
md.renderer.register("attrs", self._render)
|
||||
|
||||
def parse(
|
||||
self, block: BlockParser, m: Match, state: BlockState
|
||||
) -> dict[str, Any]:
|
||||
def parse(self, block: BlockParser, m: Match, state: BlockState) -> dict[str, Any]:
|
||||
info = m.groupdict()
|
||||
options = dict(self.parse_options(m))
|
||||
new_state = block.state_cls()
|
||||
|
|
|
@ -10,9 +10,7 @@ from mistune.markdown import Markdown
|
|||
|
||||
|
||||
class Column(DirectivePlugin):
|
||||
def parse(
|
||||
self, block: BlockParser, m: Match, state: BlockState
|
||||
) -> dict[str, Any]:
|
||||
def parse(self, block: BlockParser, m: Match, state: BlockState) -> dict[str, Any]:
|
||||
info = m.groupdict()
|
||||
|
||||
new_state = block.state_cls()
|
||||
|
@ -36,9 +34,7 @@ class Column(DirectivePlugin):
|
|||
|
||||
def _render_column(self, renderer: HTMLRenderer, text: str, **attrs):
|
||||
start = (
|
||||
'<div class="columns mt-3 is-multiline">\n'
|
||||
if attrs.get("first")
|
||||
else ""
|
||||
'<div class="columns mt-3 is-multiline">\n' if attrs.get("first") else ""
|
||||
)
|
||||
end = "</div>\n" if attrs.get("last") else ""
|
||||
col = f'<div class="column is-half">{text}</div>\n'
|
||||
|
|
|
@ -16,16 +16,12 @@ class Hook(DirectivePlugin):
|
|||
for type_ in ("column", "tab"):
|
||||
if token["type"] == type_:
|
||||
maybe_next = (
|
||||
state.tokens[idx + 1]
|
||||
if idx + 1 < len(state.tokens)
|
||||
else None
|
||||
state.tokens[idx + 1] if idx + 1 < len(state.tokens) else None
|
||||
)
|
||||
token.setdefault("attrs", {})
|
||||
if prev and prev["type"] != type_:
|
||||
token["attrs"]["first"] = True
|
||||
if (
|
||||
maybe_next and maybe_next["type"] != type_
|
||||
) or not maybe_next:
|
||||
if (maybe_next and maybe_next["type"] != type_) or not maybe_next:
|
||||
token["attrs"]["last"] = True
|
||||
|
||||
prev = token
|
||||
|
|
|
@ -12,9 +12,7 @@ from mistune.markdown import Markdown
|
|||
|
||||
|
||||
class Mermaid(DirectivePlugin):
|
||||
def parse(
|
||||
self, block: BlockParser, m: Match, state: BlockState
|
||||
) -> dict[str, Any]:
|
||||
def parse(self, block: BlockParser, m: Match, state: BlockState) -> dict[str, Any]:
|
||||
info = m.groupdict()
|
||||
|
||||
new_state = block.state_cls()
|
||||
|
|
|
@ -19,12 +19,8 @@ class Notification(Admonition):
|
|||
|
||||
if md.renderer.NAME == "html":
|
||||
md.renderer.register("admonition", self._render_admonition)
|
||||
md.renderer.register(
|
||||
"admonition_title", self._render_admonition_title
|
||||
)
|
||||
md.renderer.register(
|
||||
"admonition_content", self._render_admonition_content
|
||||
)
|
||||
md.renderer.register("admonition_title", self._render_admonition_title)
|
||||
md.renderer.register("admonition_content", self._render_admonition_content)
|
||||
|
||||
def _render_admonition(self, _, text, name, **attrs) -> str:
|
||||
return str(
|
||||
|
|
|
@ -10,9 +10,7 @@ from mistune.markdown import Markdown
|
|||
|
||||
|
||||
class Tabs(DirectivePlugin):
|
||||
def parse(
|
||||
self, block: BlockParser, m: Match, state: BlockState
|
||||
) -> dict[str, Any]:
|
||||
def parse(self, block: BlockParser, m: Match, state: BlockState) -> dict[str, Any]:
|
||||
info = m.groupdict()
|
||||
|
||||
new_state = block.state_cls()
|
||||
|
@ -41,9 +39,7 @@ class Tabs(DirectivePlugin):
|
|||
def _render_tab(self, renderer: HTMLRenderer, text: str, **attrs):
|
||||
start = '<div class="tabs mt-6"><ul>\n' if attrs.get("first") else ""
|
||||
end = (
|
||||
'</ul></div><div class="tab-display"></div>\n'
|
||||
if attrs.get("last")
|
||||
else ""
|
||||
'</ul></div><div class="tab-display"></div>\n' if attrs.get("last") else ""
|
||||
)
|
||||
content = f'<div class="tab-content">{text}</div>\n'
|
||||
tab = f'<li><a>{attrs["title"]}</a>{content}</li>\n'
|
||||
|
|
|
@ -92,9 +92,7 @@ def _inverse_document_frequency(docs: list[Document]) -> dict[str, float]:
|
|||
return {word: num_docs / count for word, count in word_count.items()}
|
||||
|
||||
|
||||
def _tf_idf_vector(
|
||||
document: Document, idf: dict[str, float]
|
||||
) -> dict[str, float]:
|
||||
def _tf_idf_vector(document: Document, idf: dict[str, float]) -> dict[str, float]:
|
||||
"""Calculate the TF-IDF vector for a document."""
|
||||
return {
|
||||
word: tf * idf[word]
|
||||
|
@ -103,9 +101,7 @@ def _tf_idf_vector(
|
|||
}
|
||||
|
||||
|
||||
def _cosine_similarity(
|
||||
vec1: dict[str, float], vec2: dict[str, float]
|
||||
) -> float:
|
||||
def _cosine_similarity(vec1: dict[str, float], vec2: dict[str, float]) -> float:
|
||||
"""Calculate the cosine similarity between two vectors."""
|
||||
if not vec1 or not vec2:
|
||||
return 0.0
|
||||
|
@ -127,9 +123,7 @@ def _search(
|
|||
tf_idf_query = _tf_idf_vector(
|
||||
Document(page=dummy_page, language=language).process(stemmer), idf
|
||||
)
|
||||
similarities = [
|
||||
_cosine_similarity(tf_idf_query, vector) for vector in vectors
|
||||
]
|
||||
similarities = [_cosine_similarity(tf_idf_query, vector) for vector in vectors]
|
||||
return [
|
||||
(similarity, document)
|
||||
for similarity, document in sorted(
|
||||
|
@ -156,16 +150,13 @@ class Searcher:
|
|||
}
|
||||
self._vectors = {
|
||||
language: [
|
||||
_tf_idf_vector(document, self._idf[language])
|
||||
for document in documents
|
||||
_tf_idf_vector(document, self._idf[language]) for document in documents
|
||||
]
|
||||
for language, documents in self._documents.items()
|
||||
}
|
||||
self._stemmer = stemmer
|
||||
|
||||
def search(
|
||||
self, query: str, language: str
|
||||
) -> list[tuple[float, Document]]:
|
||||
def search(self, query: str, language: str) -> list[tuple[float, Document]]:
|
||||
return _search(
|
||||
query,
|
||||
language,
|
||||
|
|
|
@ -28,13 +28,9 @@ def create_app(root: Path) -> Sanic:
|
|||
app.config.STYLE_DIR = root / "style"
|
||||
app.config.NODE_MODULES_DIR = root / "node_modules"
|
||||
app.config.LANGUAGES = ["en"]
|
||||
app.config.SIDEBAR = load_menu(
|
||||
app.config.CONFIG_DIR / "en" / "sidebar.yaml"
|
||||
)
|
||||
app.config.SIDEBAR = load_menu(app.config.CONFIG_DIR / "en" / "sidebar.yaml")
|
||||
app.config.NAVBAR = load_menu(app.config.CONFIG_DIR / "en" / "navbar.yaml")
|
||||
app.config.GENERAL = load_config(
|
||||
app.config.CONFIG_DIR / "en" / "general.yaml"
|
||||
)
|
||||
app.config.GENERAL = load_config(app.config.CONFIG_DIR / "en" / "general.yaml")
|
||||
|
||||
setup_livereload(app)
|
||||
setup_style(app)
|
||||
|
@ -66,8 +62,6 @@ def create_app(root: Path) -> Sanic:
|
|||
|
||||
@app.on_request
|
||||
async def set_language(request: Request):
|
||||
request.ctx.language = request.match_info.get(
|
||||
"language", Page.DEFAULT_LANGUAGE
|
||||
)
|
||||
request.ctx.language = request.match_info.get("language", Page.DEFAULT_LANGUAGE)
|
||||
|
||||
return app
|
||||
|
|
|
@ -53,16 +53,12 @@ class Livereload:
|
|||
"serverName": SERVER_NAME,
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self, reload_queue: Queue, debug: bool, state: dict[str, Any]
|
||||
):
|
||||
def __init__(self, reload_queue: Queue, debug: bool, state: dict[str, Any]):
|
||||
self.reload_queue = reload_queue
|
||||
self.app = Sanic(self.SERVER_NAME)
|
||||
self.debug = debug
|
||||
self.state = state
|
||||
self.app.static(
|
||||
"/livereload.js", Path(__file__).parent / "livereload.js"
|
||||
)
|
||||
self.app.static("/livereload.js", Path(__file__).parent / "livereload.js")
|
||||
self.app.add_websocket_route(
|
||||
self.livereload_handler, "/livereload", name="livereload"
|
||||
)
|
||||
|
@ -108,7 +104,5 @@ class Livereload:
|
|||
break
|
||||
|
||||
|
||||
def _run_reload_server(
|
||||
reload_queue: Queue, debug: bool, state: dict[str, Any]
|
||||
):
|
||||
def _run_reload_server(reload_queue: Queue, debug: bool, state: dict[str, Any]):
|
||||
Livereload(reload_queue, debug, state).run()
|
||||
|
|
126
sanic/app.py
126
sanic/app.py
|
@ -309,9 +309,7 @@ class Sanic(
|
|||
self.asgi = False
|
||||
self.auto_reload = False
|
||||
self.blueprints: dict[str, Blueprint] = {}
|
||||
self.certloader_class: type[CertLoader] = (
|
||||
certloader_class or CertLoader
|
||||
)
|
||||
self.certloader_class: type[CertLoader] = certloader_class or CertLoader
|
||||
self.configure_logging: bool = configure_logging
|
||||
self.ctx: ctx_type = cast(ctx_type, ctx or SimpleNamespace())
|
||||
self.error_handler: ErrorHandler = error_handler or ErrorHandler()
|
||||
|
@ -387,15 +385,11 @@ class Sanic(
|
|||
try:
|
||||
_event = ListenerEvent[event.upper()]
|
||||
except (ValueError, AttributeError):
|
||||
valid = ", ".join(
|
||||
x.lower() for x in ListenerEvent.__members__.keys()
|
||||
)
|
||||
valid = ", ".join(x.lower() for x in ListenerEvent.__members__.keys())
|
||||
raise BadRequest(f"Invalid event: {event}. Use one of: {valid}")
|
||||
|
||||
if "." in _event:
|
||||
self.signal(_event.value)(
|
||||
partial(self._listener, listener=listener)
|
||||
)
|
||||
self.signal(_event.value)(partial(self._listener, listener=listener))
|
||||
else:
|
||||
self.listeners[_event.value].append(listener)
|
||||
|
||||
|
@ -522,9 +516,7 @@ class Sanic(
|
|||
def _apply_listener(self, listener: FutureListener):
|
||||
return self.register_listener(listener.listener, listener.event)
|
||||
|
||||
def _apply_route(
|
||||
self, route: FutureRoute, overwrite: bool = False
|
||||
) -> list[Route]:
|
||||
def _apply_route(self, route: FutureRoute, overwrite: bool = False) -> list[Route]:
|
||||
params = route._asdict()
|
||||
params["overwrite"] = overwrite
|
||||
websocket = params.pop("websocket", False)
|
||||
|
@ -653,9 +645,7 @@ class Sanic(
|
|||
fail_not_found=fail_not_found,
|
||||
)
|
||||
|
||||
async def event(
|
||||
self, event: str, timeout: int | float | None = None
|
||||
) -> None:
|
||||
async def event(self, event: str, timeout: int | float | None = None) -> None:
|
||||
"""Wait for a specific event to be triggered.
|
||||
|
||||
This method waits for a named event to be triggered and can be used
|
||||
|
@ -740,9 +730,7 @@ class Sanic(
|
|||
async def report(exception: Exception) -> None:
|
||||
await handler(self, exception)
|
||||
|
||||
self.add_signal(
|
||||
handler=report, event=Event.SERVER_EXCEPTION_REPORT.value
|
||||
)
|
||||
self.add_signal(handler=report, event=Event.SERVER_EXCEPTION_REPORT.value)
|
||||
|
||||
return report
|
||||
|
||||
|
@ -831,14 +819,12 @@ class Sanic(
|
|||
|
||||
for _attr in ["version", "strict_slashes"]:
|
||||
if getattr(item, _attr) is None:
|
||||
params[_attr] = getattr(
|
||||
blueprint, _attr
|
||||
) or options.get(_attr)
|
||||
params[_attr] = getattr(blueprint, _attr) or options.get(
|
||||
_attr
|
||||
)
|
||||
if item.version_prefix == "/v":
|
||||
if blueprint.version_prefix == "/v":
|
||||
params["version_prefix"] = options.get(
|
||||
"version_prefix"
|
||||
)
|
||||
params["version_prefix"] = options.get("version_prefix")
|
||||
else:
|
||||
params["version_prefix"] = blueprint.version_prefix
|
||||
name_prefix = getattr(blueprint, "name_prefix", None)
|
||||
|
@ -855,10 +841,7 @@ class Sanic(
|
|||
self.blueprints[blueprint.name] = blueprint
|
||||
self._blueprint_order.append(blueprint)
|
||||
|
||||
if (
|
||||
self.strict_slashes is not None
|
||||
and blueprint.strict_slashes is None
|
||||
):
|
||||
if self.strict_slashes is not None and blueprint.strict_slashes is None:
|
||||
blueprint.strict_slashes = self.strict_slashes
|
||||
blueprint.register(self, options)
|
||||
|
||||
|
@ -928,9 +911,7 @@ class Sanic(
|
|||
|
||||
route = self.router.find_route_by_view_name(view_name, **kw)
|
||||
if not route:
|
||||
raise URLBuildError(
|
||||
f"Endpoint with name `{view_name}` was not found"
|
||||
)
|
||||
raise URLBuildError(f"Endpoint with name `{view_name}` was not found")
|
||||
|
||||
uri = route.path
|
||||
|
||||
|
@ -969,9 +950,7 @@ class Sanic(
|
|||
scheme = kwargs.pop("_scheme", "")
|
||||
if route.extra.hosts and external:
|
||||
if not host and len(route.extra.hosts) > 1:
|
||||
raise ValueError(
|
||||
f"Host is ambiguous: {', '.join(route.extra.hosts)}"
|
||||
)
|
||||
raise ValueError(f"Host is ambiguous: {', '.join(route.extra.hosts)}")
|
||||
elif host and host not in route.extra.hosts:
|
||||
raise ValueError(
|
||||
f"Requested host ({host}) is not available for this "
|
||||
|
@ -1087,10 +1066,7 @@ class Sanic(
|
|||
context={"request": request, "exception": exception},
|
||||
)
|
||||
|
||||
if (
|
||||
request.stream is not None
|
||||
and request.stream.stage is not Stage.HANDLER
|
||||
):
|
||||
if request.stream is not None and request.stream.stage is not Stage.HANDLER:
|
||||
error_logger.exception(exception, exc_info=True)
|
||||
logger.error(
|
||||
"The error response will not be sent to the client for "
|
||||
|
@ -1137,10 +1113,7 @@ class Sanic(
|
|||
response = self.error_handler.default(request, e)
|
||||
elif self.debug:
|
||||
response = HTTPResponse(
|
||||
(
|
||||
f"Error while handling error: {e}\n"
|
||||
f"Stack: {format_exc()}"
|
||||
),
|
||||
(f"Error while handling error: {e}\n" f"Stack: {format_exc()}"),
|
||||
status=500,
|
||||
)
|
||||
else:
|
||||
|
@ -1185,9 +1158,7 @@ class Sanic(
|
|||
)
|
||||
await response.eof()
|
||||
else:
|
||||
raise ServerError(
|
||||
f"Invalid response type {response!r} (need HTTPResponse)"
|
||||
)
|
||||
raise ServerError(f"Invalid response type {response!r} (need HTTPResponse)")
|
||||
|
||||
async def handle_request(self, request: Request) -> None: # no cov
|
||||
"""Handles a request by dispatching it to the appropriate handler.
|
||||
|
@ -1334,17 +1305,14 @@ class Sanic(
|
|||
else:
|
||||
if not hasattr(handler, "is_websocket"):
|
||||
raise ServerError(
|
||||
f"Invalid response type {response!r} "
|
||||
"(need HTTPResponse)"
|
||||
f"Invalid response type {response!r} " "(need HTTPResponse)"
|
||||
)
|
||||
|
||||
except CancelledError: # type: ignore
|
||||
raise
|
||||
except Exception as e:
|
||||
# Response Generation Failed
|
||||
await self.handle_exception(
|
||||
request, e, run_middleware=run_middleware
|
||||
)
|
||||
await self.handle_exception(request, e, run_middleware=run_middleware)
|
||||
|
||||
async def _websocket_handler(
|
||||
self, handler, request, *args, subprotocols=None, **kwargs
|
||||
|
@ -1423,9 +1391,7 @@ class Sanic(
|
|||
# Execution
|
||||
# -------------------------------------------------------------------- #
|
||||
|
||||
async def _run_request_middleware(
|
||||
self, request, middleware_collection
|
||||
): # no cov
|
||||
async def _run_request_middleware(self, request, middleware_collection): # no cov
|
||||
request._request_middleware_started = True
|
||||
|
||||
for middleware in middleware_collection:
|
||||
|
@ -1502,9 +1468,7 @@ class Sanic(
|
|||
task.cancel()
|
||||
|
||||
@staticmethod
|
||||
async def _listener(
|
||||
app: Sanic, loop: AbstractEventLoop, listener: ListenerType
|
||||
):
|
||||
async def _listener(app: Sanic, loop: AbstractEventLoop, listener: ListenerType):
|
||||
try:
|
||||
maybe_coro = listener(app) # type: ignore
|
||||
except TypeError:
|
||||
|
@ -1533,9 +1497,7 @@ class Sanic(
|
|||
if isawaitable(task):
|
||||
await task
|
||||
except CancelledError:
|
||||
error_logger.warning(
|
||||
f"Task {task} was cancelled before it completed."
|
||||
)
|
||||
error_logger.warning(f"Task {task} was cancelled before it completed.")
|
||||
raise
|
||||
except Exception as e:
|
||||
await app.dispatch(
|
||||
|
@ -1644,18 +1606,14 @@ class Sanic(
|
|||
""" # noqa: E501
|
||||
try:
|
||||
loop = self.loop # Will raise SanicError if loop is not started
|
||||
return self._loop_add_task(
|
||||
task, self, loop, name=name, register=register
|
||||
)
|
||||
return self._loop_add_task(task, self, loop, name=name, register=register)
|
||||
except SanicException:
|
||||
task_name = f"sanic.delayed_task.{hash(task)}"
|
||||
if not self._delayed_tasks:
|
||||
self.after_server_start(partial(self.dispatch_delayed_tasks))
|
||||
|
||||
if name:
|
||||
raise RuntimeError(
|
||||
"Cannot name task outside of a running application"
|
||||
)
|
||||
raise RuntimeError("Cannot name task outside of a running application")
|
||||
|
||||
self.signal(task_name)(partial(self.run_delayed_task, task=task))
|
||||
self._delayed_tasks.append(task_name)
|
||||
|
@ -1666,18 +1624,14 @@ class Sanic(
|
|||
...
|
||||
|
||||
@overload
|
||||
def get_task(
|
||||
self, name: str, *, raise_exception: Literal[False]
|
||||
) -> Task | None:
|
||||
def get_task(self, name: str, *, raise_exception: Literal[False]) -> Task | None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get_task(self, name: str, *, raise_exception: bool) -> Task | None:
|
||||
...
|
||||
|
||||
def get_task(
|
||||
self, name: str, *, raise_exception: bool = True
|
||||
) -> Task | None:
|
||||
def get_task(self, name: str, *, raise_exception: bool = True) -> Task | None:
|
||||
"""Get a named task.
|
||||
|
||||
This method is used to get a task by its name. Optionally, you can
|
||||
|
@ -1695,9 +1649,7 @@ class Sanic(
|
|||
return self._task_registry[name]
|
||||
except KeyError:
|
||||
if raise_exception:
|
||||
raise SanicException(
|
||||
f'Registered task named "{name}" not found.'
|
||||
)
|
||||
raise SanicException(f'Registered task named "{name}" not found.')
|
||||
return None
|
||||
|
||||
async def cancel_task(
|
||||
|
@ -1809,11 +1761,7 @@ class Sanic(
|
|||
Iterable[Task[Any]]: The tasks that are currently registered with
|
||||
the application.
|
||||
"""
|
||||
return (
|
||||
task
|
||||
for task in iter(self._task_registry.values())
|
||||
if task is not None
|
||||
)
|
||||
return (task for task in iter(self._task_registry.values()) if task is not None)
|
||||
|
||||
# -------------------------------------------------------------------- #
|
||||
# ASGI
|
||||
|
@ -2055,9 +2003,7 @@ class Sanic(
|
|||
del cls._app_registry[name]
|
||||
|
||||
@classmethod
|
||||
def get_app(
|
||||
cls, name: str | None = None, *, force_create: bool = False
|
||||
) -> Sanic:
|
||||
def get_app(cls, name: str | None = None, *, force_create: bool = False) -> Sanic:
|
||||
"""Retrieve an instantiated Sanic instance by name.
|
||||
|
||||
This method is best used when needing to get access to an already
|
||||
|
@ -2264,9 +2210,7 @@ class Sanic(
|
|||
self.finalize()
|
||||
|
||||
route_names = [route.extra.ident for route in self.router.routes]
|
||||
duplicates = {
|
||||
name for name in route_names if route_names.count(name) > 1
|
||||
}
|
||||
duplicates = {name for name in route_names if route_names.count(name) > 1}
|
||||
if duplicates:
|
||||
names = ", ".join(duplicates)
|
||||
message = (
|
||||
|
@ -2311,9 +2255,7 @@ class Sanic(
|
|||
"shutdown",
|
||||
):
|
||||
raise SanicException(f"Invalid server event: {event}")
|
||||
logger.debug(
|
||||
f"Triggering server events: {event}", extra={"verbosity": 1}
|
||||
)
|
||||
logger.debug(f"Triggering server events: {event}", extra={"verbosity": 1})
|
||||
reverse = concern == "shutdown"
|
||||
if loop is None:
|
||||
loop = self.loop
|
||||
|
@ -2379,9 +2321,7 @@ class Sanic(
|
|||
Inspector: An instance of Inspector.
|
||||
"""
|
||||
if environ.get("SANIC_WORKER_PROCESS") or not self._inspector:
|
||||
raise SanicException(
|
||||
"Can only access the inspector from the main process"
|
||||
)
|
||||
raise SanicException("Can only access the inspector from the main process")
|
||||
return self._inspector
|
||||
|
||||
@property
|
||||
|
@ -2414,7 +2354,5 @@ class Sanic(
|
|||
"""
|
||||
|
||||
if environ.get("SANIC_WORKER_PROCESS") or not self._manager:
|
||||
raise SanicException(
|
||||
"Can only access the manager from the main process"
|
||||
)
|
||||
raise SanicException("Can only access the manager from the main process")
|
||||
return self._manager
|
||||
|
|
|
@ -61,10 +61,7 @@ def get_logo(full: bool = False, coffee: bool = False) -> str:
|
|||
else BASE_LOGO
|
||||
)
|
||||
|
||||
if (
|
||||
sys.platform == "darwin"
|
||||
and environ.get("TERM_PROGRAM") == "Apple_Terminal"
|
||||
):
|
||||
if sys.platform == "darwin" and environ.get("TERM_PROGRAM") == "Apple_Terminal":
|
||||
logo = ansi_pattern.sub("", logo)
|
||||
|
||||
return logo
|
||||
|
|
|
@ -79,9 +79,7 @@ class MOTDTTY(MOTD):
|
|||
def set_variables(self): # no cov
|
||||
"""Set the variables used for display."""
|
||||
fallback = (108, 24)
|
||||
terminal_width = max(
|
||||
get_terminal_size(fallback=fallback).columns, fallback[0]
|
||||
)
|
||||
terminal_width = max(get_terminal_size(fallback=fallback).columns, fallback[0])
|
||||
self.max_value_width = terminal_width - fallback[0] + 36
|
||||
|
||||
self.key_width = 4
|
||||
|
|
|
@ -52,13 +52,9 @@ class Spinner: # noqa
|
|||
if os.name == "nt":
|
||||
ci = _CursorInfo()
|
||||
handle = ctypes.windll.kernel32.GetStdHandle(-11)
|
||||
ctypes.windll.kernel32.GetConsoleCursorInfo(
|
||||
handle, ctypes.byref(ci)
|
||||
)
|
||||
ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci))
|
||||
ci.visible = False
|
||||
ctypes.windll.kernel32.SetConsoleCursorInfo(
|
||||
handle, ctypes.byref(ci)
|
||||
)
|
||||
ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci))
|
||||
elif os.name == "posix":
|
||||
sys.stdout.write("\033[?25l")
|
||||
sys.stdout.flush()
|
||||
|
@ -68,13 +64,9 @@ class Spinner: # noqa
|
|||
if os.name == "nt":
|
||||
ci = _CursorInfo()
|
||||
handle = ctypes.windll.kernel32.GetStdHandle(-11)
|
||||
ctypes.windll.kernel32.GetConsoleCursorInfo(
|
||||
handle, ctypes.byref(ci)
|
||||
)
|
||||
ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci))
|
||||
ci.visible = True
|
||||
ctypes.windll.kernel32.SetConsoleCursorInfo(
|
||||
handle, ctypes.byref(ci)
|
||||
)
|
||||
ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci))
|
||||
elif os.name == "posix":
|
||||
sys.stdout.write("\033[?25h")
|
||||
sys.stdout.flush()
|
||||
|
|
|
@ -62,9 +62,7 @@ class ApplicationState:
|
|||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
if self._init and name == "_init":
|
||||
raise RuntimeError(
|
||||
"Cannot change the value of _init after instantiation"
|
||||
)
|
||||
raise RuntimeError("Cannot change the value of _init after instantiation")
|
||||
super().__setattr__(name, value)
|
||||
if self._init and hasattr(self, f"set_{name}"):
|
||||
getattr(self, f"set_{name}")(value)
|
||||
|
@ -105,9 +103,7 @@ class ApplicationState:
|
|||
|
||||
if all(info.stage is ServerStage.SERVING for info in self.server_info):
|
||||
return ServerStage.SERVING
|
||||
elif any(
|
||||
info.stage is ServerStage.SERVING for info in self.server_info
|
||||
):
|
||||
elif any(info.stage is ServerStage.SERVING for info in self.server_info):
|
||||
return ServerStage.PARTIAL
|
||||
|
||||
return ServerStage.STOPPED
|
||||
|
|
|
@ -140,9 +140,7 @@ class ASGIApp:
|
|||
]
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
raise BadRequest(
|
||||
"Header names can only contain US-ASCII characters"
|
||||
)
|
||||
raise BadRequest("Header names can only contain US-ASCII characters")
|
||||
|
||||
if scope["type"] == "http":
|
||||
version = scope["http_version"]
|
||||
|
@ -151,9 +149,7 @@ class ASGIApp:
|
|||
version = "1.1"
|
||||
method = "GET"
|
||||
|
||||
instance.ws = instance.transport.create_websocket_connection(
|
||||
send, receive
|
||||
)
|
||||
instance.ws = instance.transport.create_websocket_connection(send, receive)
|
||||
else:
|
||||
raise ServerError("Received unknown ASGI scope")
|
||||
|
||||
|
|
|
@ -24,9 +24,7 @@ class BaseSanic(
|
|||
):
|
||||
__slots__ = ("name",)
|
||||
|
||||
def __init__(
|
||||
self, name: Optional[str] = None, *args: Any, **kwargs: Any
|
||||
) -> None:
|
||||
def __init__(self, name: Optional[str] = None, *args: Any, **kwargs: Any) -> None:
|
||||
class_name = self.__class__.__name__
|
||||
|
||||
if name is None:
|
||||
|
|
|
@ -128,9 +128,7 @@ class Blueprint(BaseSanic):
|
|||
self.host = host
|
||||
self.strict_slashes = strict_slashes
|
||||
self.url_prefix = (
|
||||
url_prefix[:-1]
|
||||
if url_prefix and url_prefix.endswith("/")
|
||||
else url_prefix
|
||||
url_prefix[:-1] if url_prefix and url_prefix.endswith("/") else url_prefix
|
||||
)
|
||||
self.version = version
|
||||
self.version_prefix = version_prefix
|
||||
|
@ -164,9 +162,7 @@ class Blueprint(BaseSanic):
|
|||
an app.
|
||||
"""
|
||||
if not self._apps:
|
||||
raise SanicException(
|
||||
f"{self} has not yet been registered to an app"
|
||||
)
|
||||
raise SanicException(f"{self} has not yet been registered to an app")
|
||||
return self._apps
|
||||
|
||||
@property
|
||||
|
@ -345,9 +341,7 @@ class Blueprint(BaseSanic):
|
|||
opt_strict_slashes = options.get("strict_slashes", None)
|
||||
opt_version_prefix = options.get("version_prefix", self.version_prefix)
|
||||
opt_name_prefix = options.get("name_prefix", None)
|
||||
error_format = options.get(
|
||||
"error_format", app.config.FALLBACK_ERROR_FORMAT
|
||||
)
|
||||
error_format = options.get("error_format", app.config.FALLBACK_ERROR_FORMAT)
|
||||
|
||||
routes = []
|
||||
middleware = []
|
||||
|
@ -373,9 +367,7 @@ class Blueprint(BaseSanic):
|
|||
version_prefix = prefix
|
||||
break
|
||||
|
||||
version = self._extract_value(
|
||||
future.version, opt_version, self.version
|
||||
)
|
||||
version = self._extract_value(future.version, opt_version, self.version)
|
||||
strict_slashes = self._extract_value(
|
||||
future.strict_slashes, opt_strict_slashes, self.strict_slashes
|
||||
)
|
||||
|
@ -411,22 +403,16 @@ class Blueprint(BaseSanic):
|
|||
continue
|
||||
|
||||
registered.add(apply_route)
|
||||
route = app._apply_route(
|
||||
apply_route, overwrite=self._allow_route_overwrite
|
||||
)
|
||||
route = app._apply_route(apply_route, overwrite=self._allow_route_overwrite)
|
||||
|
||||
# If it is a copied BP, then make sure all of the names of routes
|
||||
# matchup with the new BP name
|
||||
if self.copied_from:
|
||||
for r in route:
|
||||
r.name = r.name.replace(self.copied_from, self.name)
|
||||
r.extra.ident = r.extra.ident.replace(
|
||||
self.copied_from, self.name
|
||||
)
|
||||
r.extra.ident = r.extra.ident.replace(self.copied_from, self.name)
|
||||
|
||||
operation = (
|
||||
routes.extend if isinstance(route, list) else routes.append
|
||||
)
|
||||
operation = routes.extend if isinstance(route, list) else routes.append
|
||||
operation(route)
|
||||
|
||||
# Static Files
|
||||
|
@ -504,9 +490,7 @@ class Blueprint(BaseSanic):
|
|||
condition = kwargs.pop("condition", {})
|
||||
condition.update({"__blueprint__": self.name})
|
||||
kwargs["condition"] = condition
|
||||
await asyncio.gather(
|
||||
*[app.dispatch(*args, **kwargs) for app in self.apps]
|
||||
)
|
||||
await asyncio.gather(*[app.dispatch(*args, **kwargs) for app in self.apps])
|
||||
|
||||
def event(self, event: str, timeout: int | float | None = None):
|
||||
"""Wait for a signal event to be dispatched.
|
||||
|
@ -747,9 +731,7 @@ class BlueprintGroup(bpg_base):
|
|||
def __getitem__(self, item: slice) -> MutableSequence[Blueprint]:
|
||||
...
|
||||
|
||||
def __getitem__(
|
||||
self, item: int | slice
|
||||
) -> Blueprint | MutableSequence[Blueprint]:
|
||||
def __getitem__(self, item: int | slice) -> Blueprint | MutableSequence[Blueprint]:
|
||||
"""Get the Blueprint object at the specified index.
|
||||
|
||||
This method returns a blueprint inside the group specified by
|
||||
|
|
|
@ -56,9 +56,7 @@ Or, a path to a directory to run as a simple HTTP server:
|
|||
)
|
||||
self.parser._positionals.title = "Required\n========\n Positional"
|
||||
self.parser._optionals.title = "Optional\n========\n General"
|
||||
self.main_process = (
|
||||
os.environ.get("SANIC_RELOADER_PROCESS", "") != "true"
|
||||
)
|
||||
self.main_process = os.environ.get("SANIC_RELOADER_PROCESS", "") != "true"
|
||||
self.args: Namespace = Namespace()
|
||||
self.groups: List[Group] = []
|
||||
self.inspecting = False
|
||||
|
@ -126,11 +124,7 @@ Or, a path to a directory to run as a simple HTTP server:
|
|||
key = key.lstrip("-")
|
||||
except ValueError:
|
||||
value = False if arg.startswith("--no-") else True
|
||||
key = (
|
||||
arg.replace("--no-", "")
|
||||
.lstrip("-")
|
||||
.replace("-", "_")
|
||||
)
|
||||
key = arg.replace("--no-", "").lstrip("-").replace("-", "_")
|
||||
setattr(self.args, key, value)
|
||||
|
||||
kwargs = {**self.args.__dict__}
|
||||
|
@ -180,8 +174,7 @@ Or, a path to a directory to run as a simple HTTP server:
|
|||
" Example Module: project.sanic_server.app"
|
||||
)
|
||||
error_logger.error(
|
||||
"\nThe error below might have caused the above one:\n"
|
||||
f"{e.msg}"
|
||||
"\nThe error below might have caused the above one:\n" f"{e.msg}"
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
|
|
|
@ -244,10 +244,7 @@ class DevelopmentGroup(Group):
|
|||
"--auto-reload",
|
||||
dest="auto_reload",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Watch source directory for file changes and reload on "
|
||||
"changes"
|
||||
),
|
||||
help=("Watch source directory for file changes and reload on " "changes"),
|
||||
)
|
||||
self.container.add_argument(
|
||||
"-R",
|
||||
|
|
|
@ -45,11 +45,7 @@ class InspectorClient:
|
|||
return
|
||||
result = self.request(action, **kwargs).get("result")
|
||||
if result:
|
||||
out = (
|
||||
dumps(result)
|
||||
if isinstance(result, (list, dict))
|
||||
else str(result)
|
||||
)
|
||||
out = dumps(result) if isinstance(result, (list, dict)) else str(result)
|
||||
sys.stdout.write(out + "\n")
|
||||
|
||||
def info(self) -> None:
|
||||
|
|
|
@ -12,9 +12,7 @@ from multidict import CIMultiDict # type: ignore
|
|||
from sanic.helpers import Default
|
||||
from sanic.log import error_logger
|
||||
|
||||
StartMethod = Union[
|
||||
Default, Literal["fork"], Literal["forkserver"], Literal["spawn"]
|
||||
]
|
||||
StartMethod = Union[Default, Literal["fork"], Literal["forkserver"], Literal["spawn"]]
|
||||
|
||||
OS_IS_WINDOWS = os.name == "nt"
|
||||
PYPY_IMPLEMENTATION = platform.python_implementation() == "PyPy"
|
||||
|
|
|
@ -227,9 +227,7 @@ class Config(dict, metaclass=DescriptorMeta):
|
|||
if attr == "LOCAL_CERT_CREATOR" and not isinstance(
|
||||
self.LOCAL_CERT_CREATOR, LocalCertCreator
|
||||
):
|
||||
self.LOCAL_CERT_CREATOR = LocalCertCreator[
|
||||
self.LOCAL_CERT_CREATOR.upper()
|
||||
]
|
||||
self.LOCAL_CERT_CREATOR = LocalCertCreator[self.LOCAL_CERT_CREATOR.upper()]
|
||||
elif attr == "DEPRECATION_FILTER":
|
||||
self._configure_warnings()
|
||||
|
||||
|
|
|
@ -147,9 +147,7 @@ class CookieRequestParameters(RequestParameters):
|
|||
except KeyError:
|
||||
return super().get(name, default)
|
||||
|
||||
def getlist(
|
||||
self, name: str, default: Optional[Any] = None
|
||||
) -> Optional[Any]:
|
||||
def getlist(self, name: str, default: Optional[Any] = None) -> Optional[Any]:
|
||||
try:
|
||||
return self._get_prefixed_cookie(name)
|
||||
except KeyError:
|
||||
|
|
|
@ -496,9 +496,7 @@ class Cookie(dict):
|
|||
"Cannot set host_prefix on a cookie without secure=True"
|
||||
)
|
||||
if path != "/":
|
||||
raise ServerError(
|
||||
"Cannot set host_prefix on a cookie unless path='/'"
|
||||
)
|
||||
raise ServerError("Cannot set host_prefix on a cookie unless path='/'")
|
||||
if domain:
|
||||
raise ServerError(
|
||||
"Cannot set host_prefix on a cookie with a defined domain"
|
||||
|
@ -600,9 +598,7 @@ class Cookie(dict):
|
|||
"""Format as a Set-Cookie header value."""
|
||||
output = [f"{self.key}={_quote(self.value)}"]
|
||||
key_index = list(self._keys)
|
||||
for key, value in sorted(
|
||||
self.items(), key=lambda x: key_index.index(x[0])
|
||||
):
|
||||
for key, value in sorted(self.items(), key=lambda x: key_index.index(x[0])):
|
||||
if value is not None and value is not False:
|
||||
if key == "max-age":
|
||||
try:
|
||||
|
|
|
@ -190,8 +190,7 @@ class TextRenderer(BaseRenderer):
|
|||
lines += [
|
||||
f"{self.exception.__class__.__name__}: {self.exception} while "
|
||||
f"handling path {self.request.path}",
|
||||
f"Traceback of {self.request.app.name} "
|
||||
"(most recent call last):\n",
|
||||
f"Traceback of {self.request.app.name} " "(most recent call last):\n",
|
||||
]
|
||||
|
||||
while exc_value:
|
||||
|
@ -388,9 +387,7 @@ def guess_mime(req: Request, fallback: str) -> str:
|
|||
if m:
|
||||
format = CONFIG_BY_MIME[m.mime]
|
||||
source = formats[format]
|
||||
logger.debug(
|
||||
f"The client accepts {m.header}, using '{format}' from {source}"
|
||||
)
|
||||
logger.debug(f"The client accepts {m.header}, using '{format}' from {source}")
|
||||
else:
|
||||
logger.debug(f"No format found, the client accepts {req.accept!r}")
|
||||
return m.mime
|
||||
|
|
|
@ -69,9 +69,7 @@ class SanicException(Exception):
|
|||
) -> None:
|
||||
self.context = context
|
||||
self.extra = extra
|
||||
status_code = status_code or getattr(
|
||||
self.__class__, "status_code", None
|
||||
)
|
||||
status_code = status_code or getattr(self.__class__, "status_code", None)
|
||||
quiet = quiet or getattr(self.__class__, "quiet", None)
|
||||
headers = headers or getattr(self.__class__, "headers", {})
|
||||
if message is None:
|
||||
|
|
|
@ -36,20 +36,14 @@ class ContentRangeHandler(Range):
|
|||
try:
|
||||
self.start = int(start_b) if start_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
f"'{start_b}' is invalid for Content Range", self
|
||||
)
|
||||
raise RangeNotSatisfiable(f"'{start_b}' is invalid for Content Range", self)
|
||||
try:
|
||||
self.end = int(end_b) if end_b else None
|
||||
except ValueError:
|
||||
raise RangeNotSatisfiable(
|
||||
f"'{end_b}' is invalid for Content Range", self
|
||||
)
|
||||
raise RangeNotSatisfiable(f"'{end_b}' is invalid for Content Range", self)
|
||||
if self.end is None:
|
||||
if self.start is None:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
raise RangeNotSatisfiable("Invalid for Content Range parameters", self)
|
||||
else:
|
||||
# this case represents `Content-Range: bytes 5-`
|
||||
self.end = self.total - 1
|
||||
|
@ -59,13 +53,9 @@ class ContentRangeHandler(Range):
|
|||
self.start = self.total - self.end
|
||||
self.end = self.total - 1
|
||||
if self.start >= self.end:
|
||||
raise RangeNotSatisfiable(
|
||||
"Invalid for Content Range parameters", self
|
||||
)
|
||||
raise RangeNotSatisfiable("Invalid for Content Range parameters", self)
|
||||
self.size = self.end - self.start + 1
|
||||
self.headers = {
|
||||
"Content-Range": f"bytes {self.start}-{self.end}/{self.total}"
|
||||
}
|
||||
self.headers = {"Content-Range": f"bytes {self.start}-{self.end}/{self.total}"}
|
||||
|
||||
def __bool__(self):
|
||||
return hasattr(self, "size") and self.size > 0
|
||||
|
|
|
@ -60,9 +60,7 @@ class DirectoryHandler:
|
|||
return await file(index_file)
|
||||
|
||||
if self.directory_view:
|
||||
return self._index(
|
||||
self.directory / current, path, request.app.debug
|
||||
)
|
||||
return self._index(self.directory / current, path, request.app.debug)
|
||||
|
||||
if self.index:
|
||||
raise NotFound("File not found")
|
||||
|
@ -72,9 +70,7 @@ class DirectoryHandler:
|
|||
def _index(self, location: Path, path: str, debug: bool):
|
||||
# Remove empty path elements, append slash
|
||||
if "//" in path or not path.endswith("/"):
|
||||
return redirect(
|
||||
"/" + "".join([f"{p}/" for p in path.split("/") if p])
|
||||
)
|
||||
return redirect("/" + "".join([f"{p}/" for p in path.split("/") if p]))
|
||||
|
||||
# Render file browser
|
||||
page = DirectoryPage(self._iter_files(location), path, debug)
|
||||
|
@ -83,9 +79,7 @@ class DirectoryHandler:
|
|||
def _prepare_file(self, path: Path) -> dict[str, int | str]:
|
||||
stat = path.stat()
|
||||
modified = (
|
||||
datetime.fromtimestamp(stat.st_mtime)
|
||||
.isoformat()[:19]
|
||||
.replace("T", " ")
|
||||
datetime.fromtimestamp(stat.st_mtime).isoformat()[:19].replace("T", " ")
|
||||
)
|
||||
is_dir = S_ISDIR(stat.st_mode)
|
||||
icon = "📁" if is_dir else "📄"
|
||||
|
|
|
@ -96,9 +96,7 @@ class ErrorHandler:
|
|||
exception_key = (ancestor, name)
|
||||
if exception_key in self.cached_handlers:
|
||||
handler = self.cached_handlers[exception_key]
|
||||
self.cached_handlers[
|
||||
(exception_class, route_name)
|
||||
] = handler
|
||||
self.cached_handlers[(exception_class, route_name)] = handler
|
||||
return handler
|
||||
|
||||
if ancestor is BaseException:
|
||||
|
@ -196,6 +194,4 @@ class ErrorHandler:
|
|||
except AttributeError: # no cov
|
||||
url = "unknown"
|
||||
|
||||
error_logger.exception(
|
||||
"Exception occurred while handling uri: %s", url
|
||||
)
|
||||
error_logger.exception("Exception occurred while handling uri: %s", url)
|
||||
|
|
|
@ -19,9 +19,7 @@ _token, _quoted = r"([\w!#$%&'*+\-.^_`|~]+)", r'"([^"]*)"'
|
|||
_param = re.compile(rf";\s*{_token}=(?:{_token}|{_quoted})", re.ASCII)
|
||||
_ipv6 = "(?:[0-9A-Fa-f]{0,4}:){2,7}[0-9A-Fa-f]{0,4}"
|
||||
_ipv6_re = re.compile(_ipv6)
|
||||
_host_re = re.compile(
|
||||
r"((?:\[" + _ipv6 + r"\])|[a-zA-Z0-9.\-]{1,253})(?::(\d{1,5}))?"
|
||||
)
|
||||
_host_re = re.compile(r"((?:\[" + _ipv6 + r"\])|[a-zA-Z0-9.\-]{1,253})(?::(\d{1,5}))?")
|
||||
|
||||
# RFC's quoted-pair escapes are mostly ignored by browsers. Chrome, Firefox and
|
||||
# curl all have different escaping, that we try to handle as well as possible,
|
||||
|
@ -122,9 +120,7 @@ class MediaType:
|
|||
or mt.subtype == "*"
|
||||
)
|
||||
# Type match
|
||||
and (
|
||||
self.type == mt.type or self.type == "*" or mt.type == "*"
|
||||
)
|
||||
and (self.type == mt.type or self.type == "*" or mt.type == "*")
|
||||
)
|
||||
else None
|
||||
)
|
||||
|
@ -312,9 +308,7 @@ def parse_accept(accept: str | None) -> AcceptList:
|
|||
accept = "*/*" # No header means that all types are accepted
|
||||
try:
|
||||
a = [
|
||||
mt
|
||||
for mt in [MediaType._parse(mtype) for mtype in accept.split(",")]
|
||||
if mt
|
||||
mt for mt in [MediaType._parse(mtype) for mtype in accept.split(",")] if mt
|
||||
]
|
||||
if not a:
|
||||
raise ValueError
|
||||
|
@ -411,11 +405,7 @@ def parse_xforwarded(headers, config) -> Options | None:
|
|||
# Combine, split and filter multiple headers' entries
|
||||
forwarded_for = headers.getall(config.FORWARDED_FOR_HEADER)
|
||||
proxies = [
|
||||
p
|
||||
for p in (
|
||||
p.strip() for h in forwarded_for for p in h.split(",")
|
||||
)
|
||||
if p
|
||||
p for p in (p.strip() for h in forwarded_for for p in h.split(",")) if p
|
||||
]
|
||||
addr = proxies[-proxies_count]
|
||||
except (KeyError, IndexError):
|
||||
|
|
|
@ -361,26 +361,20 @@ class Http(Stream, metaclass=TouchUpMeta):
|
|||
self.response_func = None
|
||||
self.stage = Stage.IDLE
|
||||
|
||||
async def http1_response_chunked(
|
||||
self, data: bytes, end_stream: bool
|
||||
) -> None:
|
||||
async def http1_response_chunked(self, data: bytes, end_stream: bool) -> None:
|
||||
"""Format a part of response body in chunked encoding."""
|
||||
# Chunked encoding
|
||||
size = len(data)
|
||||
if end_stream:
|
||||
await self._send(
|
||||
b"%x\r\n%b\r\n0\r\n\r\n" % (size, data)
|
||||
if size
|
||||
else b"0\r\n\r\n"
|
||||
b"%x\r\n%b\r\n0\r\n\r\n" % (size, data) if size else b"0\r\n\r\n"
|
||||
)
|
||||
self.response_func = None
|
||||
self.stage = Stage.IDLE
|
||||
elif size:
|
||||
await self._send(b"%x\r\n%b\r\n" % (size, data))
|
||||
|
||||
async def http1_response_normal(
|
||||
self, data: bytes, end_stream: bool
|
||||
) -> None:
|
||||
async def http1_response_normal(self, data: bytes, end_stream: bool) -> None:
|
||||
"""Format / keep track of non-chunked response."""
|
||||
bytes_left = self.response_bytes_left - len(data)
|
||||
if bytes_left <= 0:
|
||||
|
@ -418,9 +412,7 @@ class Http(Stream, metaclass=TouchUpMeta):
|
|||
exception, (ServiceUnavailable, RequestCancelled)
|
||||
)
|
||||
try:
|
||||
await app.handle_exception(
|
||||
self.request, exception, request_middleware
|
||||
)
|
||||
await app.handle_exception(self.request, exception, request_middleware)
|
||||
except Exception as e:
|
||||
await app.handle_exception(self.request, e, False)
|
||||
|
||||
|
|
|
@ -65,10 +65,7 @@ class HTTP3Transport(TransportProtocol):
|
|||
return self._protocol
|
||||
|
||||
def get_extra_info(self, info: str, default: Any = None) -> Any:
|
||||
if (
|
||||
info in ("socket", "sockname", "peername")
|
||||
and self._protocol._transport
|
||||
):
|
||||
if info in ("socket", "sockname", "peername") and self._protocol._transport:
|
||||
return self._protocol._transport.get_extra_info(info, default)
|
||||
elif info == "network_paths":
|
||||
return self._protocol._quic._network_paths
|
||||
|
@ -114,8 +111,7 @@ class HTTPReceiver(Receiver, Stream):
|
|||
|
||||
if exception:
|
||||
logger.info( # no cov
|
||||
f"{Colors.BLUE}[exception]: "
|
||||
f"{Colors.RED}{exception}{Colors.END}",
|
||||
f"{Colors.BLUE}[exception]: " f"{Colors.RED}{exception}{Colors.END}",
|
||||
exc_info=True,
|
||||
extra={"verbosity": 1},
|
||||
)
|
||||
|
@ -140,17 +136,13 @@ class HTTPReceiver(Receiver, Stream):
|
|||
|
||||
await app.handle_exception(self.request, exception)
|
||||
|
||||
def _prepare_headers(
|
||||
self, response: BaseHTTPResponse
|
||||
) -> list[tuple[bytes, bytes]]:
|
||||
def _prepare_headers(self, response: BaseHTTPResponse) -> list[tuple[bytes, bytes]]:
|
||||
size = len(response.body) if response.body else 0
|
||||
headers = response.headers
|
||||
status = response.status
|
||||
|
||||
if not has_message_body(status) and (
|
||||
size
|
||||
or "content-length" in headers
|
||||
or "transfer-encoding" in headers
|
||||
size or "content-length" in headers or "transfer-encoding" in headers
|
||||
):
|
||||
headers.pop("content-length", None)
|
||||
headers.pop("transfer-encoding", None)
|
||||
|
@ -243,11 +235,7 @@ class HTTPReceiver(Receiver, Stream):
|
|||
):
|
||||
size = len(data)
|
||||
if end_stream:
|
||||
data = (
|
||||
b"%x\r\n%b\r\n0\r\n\r\n" % (size, data)
|
||||
if size
|
||||
else b"0\r\n\r\n"
|
||||
)
|
||||
data = b"%x\r\n%b\r\n0\r\n\r\n" % (size, data) if size else b"0\r\n\r\n"
|
||||
elif size:
|
||||
data = b"%x\r\n%b\r\n" % (size, data)
|
||||
|
||||
|
@ -325,10 +313,7 @@ class Http3:
|
|||
)
|
||||
|
||||
def get_or_make_receiver(self, event: H3Event) -> tuple[Receiver, bool]:
|
||||
if (
|
||||
isinstance(event, HeadersReceived)
|
||||
and event.stream_id not in self.receivers
|
||||
):
|
||||
if isinstance(event, HeadersReceived) and event.stream_id not in self.receivers:
|
||||
request = self._make_request(event)
|
||||
receiver = HTTPReceiver(self.transmit, self.protocol, request)
|
||||
request.stream = receiver
|
||||
|
@ -351,9 +336,7 @@ class Http3:
|
|||
)
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
raise BadRequest(
|
||||
"Header names may only contain US-ASCII characters."
|
||||
)
|
||||
raise BadRequest("Header names may only contain US-ASCII characters.")
|
||||
method = headers[":method"]
|
||||
path = headers[":path"]
|
||||
scheme = headers.pop(":scheme", "")
|
||||
|
@ -422,8 +405,6 @@ def get_config(app: Sanic, ssl: SanicSSLContext | CertSelector | SSLContext):
|
|||
)
|
||||
password = app.config.TLS_CERT_PASSWORD or None
|
||||
|
||||
config.load_cert_chain(
|
||||
ssl.sanic["cert"], ssl.sanic["key"], password=password
|
||||
)
|
||||
config.load_cert_chain(ssl.sanic["cert"], ssl.sanic["key"], password=password)
|
||||
|
||||
return config
|
||||
|
|
|
@ -20,7 +20,5 @@ class Stream:
|
|||
__touchup__: tuple[str, ...] = ()
|
||||
__slots__ = ("request_max_size",)
|
||||
|
||||
def respond(
|
||||
self, response: BaseHTTPResponse
|
||||
) -> BaseHTTPResponse: # no cov
|
||||
def respond(self, response: BaseHTTPResponse) -> BaseHTTPResponse: # no cov
|
||||
raise NotImplementedError("Not implemented")
|
||||
|
|
|
@ -69,13 +69,9 @@ def load_cert_dir(p: str) -> ssl.SSLContext:
|
|||
keyfile = os.path.join(p, "privkey.pem")
|
||||
certfile = os.path.join(p, "fullchain.pem")
|
||||
if not os.access(keyfile, os.R_OK):
|
||||
raise ValueError(
|
||||
f"Certificate not found or permission denied {keyfile}"
|
||||
)
|
||||
raise ValueError(f"Certificate not found or permission denied {keyfile}")
|
||||
if not os.access(certfile, os.R_OK):
|
||||
raise ValueError(
|
||||
f"Certificate not found or permission denied {certfile}"
|
||||
)
|
||||
raise ValueError(f"Certificate not found or permission denied {certfile}")
|
||||
return CertSimple(certfile, keyfile)
|
||||
|
||||
|
||||
|
@ -87,9 +83,7 @@ def find_cert(self: CertSelector, server_name: str):
|
|||
if not server_name:
|
||||
if self.sanic_fallback:
|
||||
return self.sanic_fallback
|
||||
raise ValueError(
|
||||
"The client provided no SNI to match for certificate."
|
||||
)
|
||||
raise ValueError("The client provided no SNI to match for certificate.")
|
||||
for ctx in self.sanic_select:
|
||||
if match_hostname(ctx, server_name):
|
||||
return ctx
|
||||
|
@ -162,9 +156,7 @@ class CertSimple(SanicSSLContext):
|
|||
if "names" not in kw:
|
||||
cert = ssl._ssl._test_decode_cert(certfile) # type: ignore
|
||||
kw["names"] = [
|
||||
name
|
||||
for t, name in cert["subjectAltName"]
|
||||
if t in ["DNS", "IP Address"]
|
||||
name for t, name in cert["subjectAltName"] if t in ["DNS", "IP Address"]
|
||||
]
|
||||
subject = {k: v for item in cert["subject"] for k, v in item}
|
||||
self = create_context(certfile, keyfile, password)
|
||||
|
@ -201,7 +193,5 @@ class CertSelector(ssl.SSLContext):
|
|||
if i == 0:
|
||||
self.sanic_fallback = ctx
|
||||
if not all_names:
|
||||
raise ValueError(
|
||||
"No certificates with SubjectAlternativeNames found."
|
||||
)
|
||||
raise ValueError("No certificates with SubjectAlternativeNames found.")
|
||||
logger.info(f"Certificate vhosts: {', '.join(all_names)}")
|
||||
|
|
|
@ -92,16 +92,8 @@ class CertCreator(ABC):
|
|||
if isinstance(self.key, Default) or isinstance(self.cert, Default):
|
||||
self.tmpdir = Path(mkdtemp())
|
||||
|
||||
key = (
|
||||
DEFAULT_LOCAL_TLS_KEY
|
||||
if isinstance(self.key, Default)
|
||||
else self.key
|
||||
)
|
||||
cert = (
|
||||
DEFAULT_LOCAL_TLS_CERT
|
||||
if isinstance(self.cert, Default)
|
||||
else self.cert
|
||||
)
|
||||
key = DEFAULT_LOCAL_TLS_KEY if isinstance(self.key, Default) else self.key
|
||||
cert = DEFAULT_LOCAL_TLS_CERT if isinstance(self.cert, Default) else self.cert
|
||||
|
||||
self.key_path = _make_path(key, self.tmpdir)
|
||||
self.cert_path = _make_path(cert, self.tmpdir)
|
||||
|
@ -124,9 +116,7 @@ class CertCreator(ABC):
|
|||
) -> CertCreator:
|
||||
creator: CertCreator | None = None
|
||||
|
||||
cert_creator_options: tuple[
|
||||
tuple[type[CertCreator], LocalCertCreator], ...
|
||||
] = (
|
||||
cert_creator_options: tuple[tuple[type[CertCreator], LocalCertCreator], ...] = (
|
||||
(MkcertCreator, LocalCertCreator.MKCERT),
|
||||
(TrustmeCreator, LocalCertCreator.TRUSTME),
|
||||
)
|
||||
|
|
|
@ -79,9 +79,7 @@ class ExceptionMixin(metaclass=SanicMeta):
|
|||
|
||||
return decorator
|
||||
|
||||
def all_exceptions(
|
||||
self, handler: Callable[..., Any]
|
||||
) -> Callable[..., Any]:
|
||||
def all_exceptions(self, handler: Callable[..., Any]) -> Callable[..., Any]:
|
||||
"""Enables the process of creating a global exception handler as a convenience.
|
||||
|
||||
This following two examples are equivalent:
|
||||
|
|
|
@ -120,16 +120,12 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
|
||||
if callable(listener_or_event):
|
||||
if event_or_none is None:
|
||||
raise BadRequest(
|
||||
"Invalid event registration: Missing event name."
|
||||
)
|
||||
raise BadRequest("Invalid event registration: Missing event name.")
|
||||
return register_listener(listener_or_event, event_or_none)
|
||||
else:
|
||||
return partial(register_listener, event=listener_or_event)
|
||||
|
||||
def main_process_start(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def main_process_start(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the main_process_start event.
|
||||
|
||||
This event is fired only on the main process and **NOT** on any
|
||||
|
@ -151,9 +147,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "main_process_start")
|
||||
|
||||
def main_process_ready(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def main_process_ready(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the main_process_ready event.
|
||||
|
||||
This event is fired only on the main process and **NOT** on any
|
||||
|
@ -176,9 +170,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "main_process_ready")
|
||||
|
||||
def main_process_stop(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def main_process_stop(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the main_process_stop event.
|
||||
|
||||
This event is fired only on the main process and **NOT** on any
|
||||
|
@ -222,9 +214,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "reload_process_start")
|
||||
|
||||
def reload_process_stop(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def reload_process_stop(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the reload_process_stop event.
|
||||
|
||||
This event is fired only on the reload process and **NOT** on any
|
||||
|
@ -293,9 +283,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "after_reload_trigger")
|
||||
|
||||
def before_server_start(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def before_server_start(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the before_server_start event.
|
||||
|
||||
This event is fired on all worker processes. You should typically
|
||||
|
@ -319,9 +307,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "before_server_start")
|
||||
|
||||
def after_server_start(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def after_server_start(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the after_server_start event.
|
||||
|
||||
This event is fired on all worker processes. You should typically
|
||||
|
@ -349,9 +335,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "after_server_start")
|
||||
|
||||
def before_server_stop(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def before_server_stop(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the before_server_stop event.
|
||||
|
||||
This event is fired on all worker processes. This event is fired
|
||||
|
@ -376,9 +360,7 @@ class ListenerMixin(metaclass=SanicMeta):
|
|||
""" # noqa: E501
|
||||
return self.listener(listener, "before_server_stop")
|
||||
|
||||
def after_server_stop(
|
||||
self, listener: ListenerType[Sanic]
|
||||
) -> ListenerType[Sanic]:
|
||||
def after_server_stop(self, listener: ListenerType[Sanic]) -> ListenerType[Sanic]:
|
||||
"""Decorator for registering a listener for the after_server_stop event.
|
||||
|
||||
This event is fired on all worker processes. This event is fired
|
||||
|
|
|
@ -99,13 +99,9 @@ class MiddlewareMixin(metaclass=SanicMeta):
|
|||
|
||||
# Detect which way this was called, @middleware or @middleware('AT')
|
||||
if callable(middleware_or_request):
|
||||
return register_middleware(
|
||||
middleware_or_request, attach_to=attach_to
|
||||
)
|
||||
return register_middleware(middleware_or_request, attach_to=attach_to)
|
||||
else:
|
||||
return partial(
|
||||
register_middleware, attach_to=middleware_or_request
|
||||
)
|
||||
return partial(register_middleware, attach_to=middleware_or_request)
|
||||
|
||||
def on_request(self, middleware=None, *, priority=0) -> MiddlewareType:
|
||||
"""Register a middleware to be called before a request is handled.
|
||||
|
@ -157,9 +153,7 @@ class MiddlewareMixin(metaclass=SanicMeta):
|
|||
if callable(middleware):
|
||||
return self.middleware(middleware, "response", priority=priority)
|
||||
else:
|
||||
return partial(
|
||||
self.middleware, attach_to="response", priority=priority
|
||||
)
|
||||
return partial(self.middleware, attach_to="response", priority=priority)
|
||||
|
||||
def finalize_middleware(self) -> None:
|
||||
"""Finalize the middleware configuration for the Sanic application.
|
||||
|
|
|
@ -25,9 +25,7 @@ from sanic.models.futures import FutureRoute, FutureStatic
|
|||
from sanic.models.handler_types import RouteHandler
|
||||
from sanic.types import HashableDict
|
||||
|
||||
RouteWrapper = Callable[
|
||||
[RouteHandler], Union[RouteHandler, Tuple[Route, RouteHandler]]
|
||||
]
|
||||
RouteWrapper = Callable[[RouteHandler], Union[RouteHandler, Tuple[Route, RouteHandler]]]
|
||||
|
||||
|
||||
class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
||||
|
@ -814,7 +812,5 @@ class RouteMixin(BaseMixin, metaclass=SanicMeta):
|
|||
}
|
||||
if raw:
|
||||
unexpected_arguments = ", ".join(raw.keys())
|
||||
raise TypeError(
|
||||
f"Unexpected keyword arguments: {unexpected_arguments}"
|
||||
)
|
||||
raise TypeError(f"Unexpected keyword arguments: {unexpected_arguments}")
|
||||
return HashableDict(ctx_kwargs)
|
||||
|
|
|
@ -92,9 +92,7 @@ class SignalMixin(metaclass=SanicMeta):
|
|||
...
|
||||
|
||||
handler = noop
|
||||
self.signal(event=event, condition=condition, exclusive=exclusive)(
|
||||
handler
|
||||
)
|
||||
self.signal(event=event, condition=condition, exclusive=exclusive)(handler)
|
||||
return handler
|
||||
|
||||
def event(self, event: str):
|
||||
|
|
|
@ -100,8 +100,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
"""
|
||||
if not self.asgi:
|
||||
if self.config.USE_UVLOOP is True or (
|
||||
isinstance(self.config.USE_UVLOOP, Default)
|
||||
and not OS_IS_WINDOWS
|
||||
isinstance(self.config.USE_UVLOOP, Default) and not OS_IS_WINDOWS
|
||||
):
|
||||
try_use_uvloop()
|
||||
elif OS_IS_WINDOWS:
|
||||
|
@ -373,8 +372,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
|
||||
if single_process and (fast or (workers > 1) or auto_reload):
|
||||
raise RuntimeError(
|
||||
"Single process cannot be run with multiple workers "
|
||||
"or auto-reload"
|
||||
"Single process cannot be run with multiple workers " "or auto-reload"
|
||||
)
|
||||
|
||||
if register_sys_signals is False and not single_process:
|
||||
|
@ -393,9 +391,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
for directory in reload_dir:
|
||||
direc = Path(directory)
|
||||
if not direc.is_dir():
|
||||
logger.warning(
|
||||
f"Directory {directory} could not be located"
|
||||
)
|
||||
logger.warning(f"Directory {directory} could not be located")
|
||||
self.state.reload_dirs.add(Path(directory))
|
||||
|
||||
if loop is not None:
|
||||
|
@ -410,9 +406,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
host, port = self.get_address(host, port, version, auto_tls)
|
||||
|
||||
if protocol is None:
|
||||
protocol = (
|
||||
WebSocketProtocol if self.websocket_enabled else HttpProtocol
|
||||
)
|
||||
protocol = WebSocketProtocol if self.websocket_enabled else HttpProtocol
|
||||
|
||||
# Set explicitly passed configuration values
|
||||
for attribute, value in {
|
||||
|
@ -448,9 +442,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
register_sys_signals=register_sys_signals,
|
||||
auto_tls=auto_tls,
|
||||
)
|
||||
self.state.server_info.append(
|
||||
ApplicationServerInfo(settings=server_settings)
|
||||
)
|
||||
self.state.server_info.append(ApplicationServerInfo(settings=server_settings))
|
||||
|
||||
# if self.config.USE_UVLOOP is True or (
|
||||
# self.config.USE_UVLOOP is _default and not OS_IS_WINDOWS
|
||||
|
@ -546,9 +538,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
host, port = host, port = self.get_address(host, port)
|
||||
|
||||
if protocol is None:
|
||||
protocol = (
|
||||
WebSocketProtocol if self.websocket_enabled else HttpProtocol
|
||||
)
|
||||
protocol = WebSocketProtocol if self.websocket_enabled else HttpProtocol
|
||||
|
||||
# Set explicitly passed configuration values
|
||||
for attribute, value in {
|
||||
|
@ -790,10 +780,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
reload_display += ", ".join(
|
||||
[
|
||||
"",
|
||||
*(
|
||||
str(path.absolute())
|
||||
for path in self.state.reload_dirs
|
||||
),
|
||||
*(str(path.absolute()) for path in self.state.reload_dirs),
|
||||
]
|
||||
)
|
||||
display["auto-reload"] = reload_display
|
||||
|
@ -832,9 +819,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
return f"http://<{location}>"
|
||||
|
||||
@staticmethod
|
||||
def get_server_location(
|
||||
server_settings: dict[str, Any] | None = None
|
||||
) -> str:
|
||||
def get_server_location(server_settings: dict[str, Any] | None = None) -> str:
|
||||
"""Using the server settings, retrieve the server location.
|
||||
|
||||
Args:
|
||||
|
@ -901,9 +886,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
@classmethod
|
||||
def _get_startup_method(cls) -> str:
|
||||
return (
|
||||
cls.start_method
|
||||
if not isinstance(cls.start_method, Default)
|
||||
else "spawn"
|
||||
cls.start_method if not isinstance(cls.start_method, Default) else "spawn"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -984,9 +967,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
try:
|
||||
primary = apps[0]
|
||||
except IndexError:
|
||||
raise RuntimeError(
|
||||
"Did not find any applications."
|
||||
) from None
|
||||
raise RuntimeError("Did not find any applications.") from None
|
||||
|
||||
# This exists primarily for unit testing
|
||||
if not primary.state.server_info: # no cov
|
||||
|
@ -1089,9 +1070,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
inspector = None
|
||||
if primary.config.INSPECTOR:
|
||||
display, extra = primary.get_motd_data()
|
||||
packages = [
|
||||
pkg.strip() for pkg in display["packages"].split(",")
|
||||
]
|
||||
packages = [pkg.strip() for pkg in display["packages"].split(",")]
|
||||
module = import_module("sanic")
|
||||
sanic_version = f"sanic=={module.__version__}" # type: ignore
|
||||
app_info = {
|
||||
|
@ -1122,9 +1101,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
exit_code = 1
|
||||
except BaseException:
|
||||
kwargs = primary_server_info.settings
|
||||
error_logger.exception(
|
||||
"Experienced exception while trying to serve"
|
||||
)
|
||||
error_logger.exception("Experienced exception while trying to serve")
|
||||
raise
|
||||
finally:
|
||||
logger.info("Server Stopped")
|
||||
|
@ -1230,9 +1207,7 @@ class StartupMixin(metaclass=SanicMeta):
|
|||
try:
|
||||
worker_serve(monitor_publisher=None, **kwargs)
|
||||
except BaseException:
|
||||
error_logger.exception(
|
||||
"Experienced exception while trying to serve"
|
||||
)
|
||||
error_logger.exception("Experienced exception while trying to serve")
|
||||
raise
|
||||
finally:
|
||||
logger.info("Server Stopped")
|
||||
|
|
|
@ -205,17 +205,12 @@ class StaticHandleMixin(metaclass=SanicMeta):
|
|||
)
|
||||
uri = uri.rstrip("/")
|
||||
uri += "/<__file_uri__:path>"
|
||||
elif static.resource_type == "file" and not path.isfile(
|
||||
file_or_directory
|
||||
):
|
||||
elif static.resource_type == "file" and not path.isfile(file_or_directory):
|
||||
raise TypeError(
|
||||
"Resource type improperly identified as file. "
|
||||
f"'{file_or_directory}'"
|
||||
"Resource type improperly identified as file. " f"'{file_or_directory}'"
|
||||
)
|
||||
elif static.resource_type != "file":
|
||||
raise ValueError(
|
||||
"The resource_type should be set to 'file' or 'dir'"
|
||||
)
|
||||
raise ValueError("The resource_type should be set to 'file' or 'dir'")
|
||||
|
||||
# special prefix for static files
|
||||
# if not static.name.startswith("_static_"):
|
||||
|
@ -278,9 +273,7 @@ class StaticHandleMixin(metaclass=SanicMeta):
|
|||
response = await validate_file(request.headers, modified_since)
|
||||
if response:
|
||||
return response
|
||||
headers["Last-Modified"] = formatdate(
|
||||
modified_since, usegmt=True
|
||||
)
|
||||
headers["Last-Modified"] = formatdate(modified_since, usegmt=True)
|
||||
_range = None
|
||||
if use_content_range:
|
||||
_range = None
|
||||
|
|
|
@ -47,9 +47,7 @@ class MockProtocol: # no cov
|
|||
class MockTransport(TransportProtocol): # no cov
|
||||
_protocol: Optional[MockProtocol]
|
||||
|
||||
def __init__(
|
||||
self, scope: ASGIScope, receive: ASGIReceive, send: ASGISend
|
||||
) -> None:
|
||||
def __init__(self, scope: ASGIScope, receive: ASGIReceive, send: ASGISend) -> None:
|
||||
self.scope = scope
|
||||
self._receive = receive
|
||||
self._send = send
|
||||
|
@ -61,9 +59,7 @@ class MockTransport(TransportProtocol): # no cov
|
|||
self._protocol = MockProtocol(self, self.loop)
|
||||
return self._protocol
|
||||
|
||||
def get_extra_info(
|
||||
self, info: str, default=None
|
||||
) -> Optional[Union[str, bool]]:
|
||||
def get_extra_info(self, info: str, default=None) -> Optional[Union[str, bool]]:
|
||||
if info == "peername":
|
||||
return self.scope.get("client")
|
||||
elif info == "sslcontext":
|
||||
|
|
|
@ -12,9 +12,7 @@ MiddlewareResponse = Union[
|
|||
Optional[HTTPResponse], Coroutine[Any, Any, Optional[HTTPResponse]]
|
||||
]
|
||||
RequestMiddlewareType = Callable[[Request], MiddlewareResponse]
|
||||
ResponseMiddlewareType = Callable[
|
||||
[Request, BaseHTTPResponse], MiddlewareResponse
|
||||
]
|
||||
ResponseMiddlewareType = Callable[[Request, BaseHTTPResponse], MiddlewareResponse]
|
||||
ErrorMiddlewareType = Callable[
|
||||
[Request, BaseException], Optional[Coroutine[Any, Any, None]]
|
||||
]
|
||||
|
|
|
@ -27,8 +27,6 @@ class CSS(ABCMeta):
|
|||
Page.STYLE += attrs.get("STYLE_APPEND", "")
|
||||
# Combine with all ancestor styles
|
||||
Page.CSS = "".join(
|
||||
Class.STYLE
|
||||
for Class in reversed(Page.__mro__)
|
||||
if type(Class) is CSS
|
||||
Class.STYLE for Class in reversed(Page.__mro__) if type(Class) is CSS
|
||||
)
|
||||
return Page
|
||||
|
|
|
@ -19,9 +19,7 @@ class DirectoryPage(BasePage): # no cov
|
|||
|
||||
TITLE = "Directory Viewer"
|
||||
|
||||
def __init__(
|
||||
self, files: Iterable[FileInfo], url: str, debug: bool
|
||||
) -> None:
|
||||
def __init__(self, files: Iterable[FileInfo], url: str, debug: bool) -> None:
|
||||
super().__init__(debug)
|
||||
self.files = files
|
||||
self.url = url
|
||||
|
|
|
@ -37,9 +37,7 @@ class ErrorPage(BasePage):
|
|||
if name.islower():
|
||||
name = name.title()
|
||||
self.TITLE = f"Application {name} cannot handle your request"
|
||||
self.HEADING = E("Application ").strong(name)(
|
||||
" cannot handle your request"
|
||||
)
|
||||
self.HEADING = E("Application ").strong(name)(" cannot handle your request")
|
||||
self.title = title
|
||||
self.text = text
|
||||
self.request = request
|
||||
|
@ -58,9 +56,7 @@ class ErrorPage(BasePage):
|
|||
# Show context details if available on the exception
|
||||
context = getattr(self.exc, "context", None)
|
||||
if context:
|
||||
self._key_value_table(
|
||||
"Issue context", "exception-context", context
|
||||
)
|
||||
self._key_value_table("Issue context", "exception-context", context)
|
||||
|
||||
if not debug:
|
||||
with self.doc.div(id="enduser"):
|
||||
|
@ -72,19 +68,13 @@ class ErrorPage(BasePage):
|
|||
# Show extra details if available on the exception
|
||||
extra = getattr(self.exc, "extra", None)
|
||||
if extra:
|
||||
self._key_value_table(
|
||||
"Issue extra data", "exception-extra", extra
|
||||
)
|
||||
self._key_value_table("Issue extra data", "exception-extra", extra)
|
||||
|
||||
self.doc.summary(
|
||||
"Details for developers (Sanic debug mode only)"
|
||||
)
|
||||
self.doc.summary("Details for developers (Sanic debug mode only)")
|
||||
if self.exc:
|
||||
with self.doc.div(class_="exception-wrapper"):
|
||||
self.doc.h2(f"Exception in {route_name}:")
|
||||
self.doc(
|
||||
html_traceback(self.exc, include_js_css=False)
|
||||
)
|
||||
self.doc(html_traceback(self.exc, include_js_css=False))
|
||||
|
||||
self._key_value_table(
|
||||
f"{self.request.method} {self.request.path}",
|
||||
|
@ -104,6 +94,4 @@ class ErrorPage(BasePage):
|
|||
value = str(value)
|
||||
except Exception:
|
||||
value = E.em("Unable to display value")
|
||||
self.doc.dt.span(key, class_="nobr key").span(": ").dd(
|
||||
value
|
||||
)
|
||||
self.doc.dt.span(key, class_="nobr key").span(": ").dd(value)
|
||||
|
|
|
@ -60,9 +60,7 @@ def parse_multipart_form(body, boundary):
|
|||
colon_index = form_line.index(":")
|
||||
idx = colon_index + 2
|
||||
form_header_field = form_line[0:colon_index].lower()
|
||||
form_header_value, form_parameters = parse_content_header(
|
||||
form_line[idx:]
|
||||
)
|
||||
form_header_value, form_parameters = parse_content_header(form_line[idx:])
|
||||
|
||||
if form_header_field == "content-disposition":
|
||||
field_name = form_parameters.get("name")
|
||||
|
@ -97,9 +95,7 @@ def parse_multipart_form(body, boundary):
|
|||
else:
|
||||
fields[field_name] = [value]
|
||||
else:
|
||||
form_file = File(
|
||||
type=content_type, name=file_name, body=post_data
|
||||
)
|
||||
form_file = File(type=content_type, name=file_name, body=post_data)
|
||||
if field_name in files:
|
||||
files[field_name].append(form_file)
|
||||
else:
|
||||
|
|
|
@ -279,9 +279,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
int: The HTTP/3 stream ID.
|
||||
"""
|
||||
if self.protocol.version is not HTTP.VERSION_3:
|
||||
raise ServerError(
|
||||
"Stream ID is only a property of a HTTP/3 request"
|
||||
)
|
||||
raise ServerError("Stream ID is only a property of a HTTP/3 request")
|
||||
return self._stream_id
|
||||
|
||||
def reset_response(self) -> None:
|
||||
|
@ -296,10 +294,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
sent.
|
||||
"""
|
||||
try:
|
||||
if (
|
||||
self.stream is not None
|
||||
and self.stream.stage is not Stage.HANDLER
|
||||
):
|
||||
if self.stream is not None and self.stream.stage is not Stage.HANDLER:
|
||||
raise ServerError(
|
||||
"Cannot reset response because previous response was sent."
|
||||
)
|
||||
|
@ -622,9 +617,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
pass
|
||||
return self.parsed_credentials
|
||||
|
||||
def get_form(
|
||||
self, keep_blank_values: bool = False
|
||||
) -> RequestParameters | None:
|
||||
def get_form(self, keep_blank_values: bool = False) -> RequestParameters | None:
|
||||
"""Method to extract and parse the form data from a request.
|
||||
|
||||
Args:
|
||||
|
@ -635,9 +628,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
""" # noqa: E501
|
||||
self.parsed_form = RequestParameters()
|
||||
self.parsed_files = RequestParameters()
|
||||
content_type = self.headers.getone(
|
||||
"content-type", DEFAULT_HTTP_CONTENT_TYPE
|
||||
)
|
||||
content_type = self.headers.getone("content-type", DEFAULT_HTTP_CONTENT_TYPE)
|
||||
content_type, parameters = parse_content_header(content_type)
|
||||
try:
|
||||
if content_type == "application/x-www-form-urlencoded":
|
||||
|
@ -735,9 +726,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
)
|
||||
)
|
||||
|
||||
return self.parsed_args[
|
||||
(keep_blank_values, strict_parsing, encoding, errors)
|
||||
]
|
||||
return self.parsed_args[(keep_blank_values, strict_parsing, encoding, errors)]
|
||||
|
||||
args = property(get_args)
|
||||
"""Convenience property to access `Request.get_args` with default values.
|
||||
|
@ -990,9 +979,7 @@ class Request(Generic[sanic_type, ctx_type]):
|
|||
server_name = self.app.config.get("SERVER_NAME")
|
||||
if server_name:
|
||||
return server_name.split("//", 1)[-1].split("/", 1)[0]
|
||||
return str(
|
||||
self.forwarded.get("host") or self.headers.getone("host", "")
|
||||
)
|
||||
return str(self.forwarded.get("host") or self.headers.getone("host", ""))
|
||||
|
||||
@property
|
||||
def server_name(self) -> str:
|
||||
|
|
|
@ -18,9 +18,7 @@ from sanic.models.protocol_types import HTMLProtocol, Range
|
|||
from .types import HTTPResponse, JSONResponse, ResponseStream
|
||||
|
||||
|
||||
def empty(
|
||||
status: int = 204, headers: dict[str, str] | None = None
|
||||
) -> HTTPResponse:
|
||||
def empty(status: int = 204, headers: dict[str, str] | None = None) -> HTTPResponse:
|
||||
"""Returns an empty response to the client.
|
||||
|
||||
Args:
|
||||
|
@ -85,13 +83,9 @@ def text(
|
|||
TypeError: If the body is not a string.
|
||||
""" # noqa: E501
|
||||
if not isinstance(body, str):
|
||||
raise TypeError(
|
||||
f"Bad body type. Expected str, got {type(body).__name__})"
|
||||
)
|
||||
raise TypeError(f"Bad body type. Expected str, got {type(body).__name__})")
|
||||
|
||||
return HTTPResponse(
|
||||
body, status=status, headers=headers, content_type=content_type
|
||||
)
|
||||
return HTTPResponse(body, status=status, headers=headers, content_type=content_type)
|
||||
|
||||
|
||||
def raw(
|
||||
|
@ -178,18 +172,14 @@ async def validate_file(
|
|||
float(last_modified), tz=timezone.utc
|
||||
).replace(microsecond=0)
|
||||
|
||||
if (
|
||||
last_modified.utcoffset() is None
|
||||
and if_modified_since.utcoffset() is not None
|
||||
):
|
||||
if last_modified.utcoffset() is None and if_modified_since.utcoffset() is not None:
|
||||
logger.warning(
|
||||
"Cannot compare tz-aware and tz-naive datetimes. To avoid "
|
||||
"this conflict Sanic is converting last_modified to UTC."
|
||||
)
|
||||
last_modified.replace(tzinfo=timezone.utc)
|
||||
elif (
|
||||
last_modified.utcoffset() is not None
|
||||
and if_modified_since.utcoffset() is None
|
||||
last_modified.utcoffset() is not None and if_modified_since.utcoffset() is None
|
||||
):
|
||||
logger.warning(
|
||||
"Cannot compare tz-aware and tz-naive datetimes. To avoid "
|
||||
|
@ -240,25 +230,17 @@ async def file(
|
|||
stat = await stat_async(location)
|
||||
last_modified = stat.st_mtime
|
||||
|
||||
if (
|
||||
validate_when_requested
|
||||
and request_headers is not None
|
||||
and last_modified
|
||||
):
|
||||
if validate_when_requested and request_headers is not None and last_modified:
|
||||
response = await validate_file(request_headers, last_modified)
|
||||
if response:
|
||||
return response
|
||||
|
||||
headers = headers or {}
|
||||
if last_modified:
|
||||
headers.setdefault(
|
||||
"Last-Modified", formatdate(last_modified, usegmt=True)
|
||||
)
|
||||
headers.setdefault("Last-Modified", formatdate(last_modified, usegmt=True))
|
||||
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
"Content-Disposition", f'attachment; filename="{filename}"'
|
||||
)
|
||||
headers.setdefault("Content-Disposition", f'attachment; filename="{filename}"')
|
||||
|
||||
if no_store:
|
||||
cache_control = "no-store"
|
||||
|
@ -323,9 +305,7 @@ def redirect(
|
|||
# According to RFC 7231, a relative URI is now permitted.
|
||||
headers["Location"] = safe_to
|
||||
|
||||
return HTTPResponse(
|
||||
status=status, headers=headers, content_type=content_type
|
||||
)
|
||||
return HTTPResponse(status=status, headers=headers, content_type=content_type)
|
||||
|
||||
|
||||
async def file_stream(
|
||||
|
@ -357,9 +337,7 @@ async def file_stream(
|
|||
""" # noqa: E501
|
||||
headers = headers or {}
|
||||
if filename:
|
||||
headers.setdefault(
|
||||
"Content-Disposition", f'attachment; filename="{filename}"'
|
||||
)
|
||||
headers.setdefault("Content-Disposition", f'attachment; filename="{filename}"')
|
||||
filename = filename or path.split(location)[-1]
|
||||
mime_type = mime_type or guess_type(filename)[0] or "text/plain"
|
||||
if _range:
|
||||
|
|
|
@ -350,9 +350,7 @@ class JSONResponse(HTTPResponse):
|
|||
@raw_body.setter
|
||||
def raw_body(self, value: Any):
|
||||
self._body_manually_set = False
|
||||
self._body = self._encode_body(
|
||||
self._use_dumps(value, **self._use_dumps_kwargs)
|
||||
)
|
||||
self._body = self._encode_body(self._use_dumps(value, **self._use_dumps_kwargs))
|
||||
self._raw_body = value
|
||||
|
||||
@property # type: ignore
|
||||
|
@ -484,9 +482,7 @@ class JSONResponse(HTTPResponse):
|
|||
self._check_body_not_manually_set()
|
||||
|
||||
if not isinstance(self._raw_body, (list, dict)):
|
||||
raise SanicException(
|
||||
"Cannot pop from a non-list and non-dict object."
|
||||
)
|
||||
raise SanicException("Cannot pop from a non-list and non-dict object.")
|
||||
|
||||
if isinstance(default, Default):
|
||||
value = self._raw_body.pop(key)
|
||||
|
|
|
@ -40,9 +40,7 @@ class Router(BaseRouter):
|
|||
raise MethodNotAllowed(
|
||||
f"Method {method} not allowed for URL {path}",
|
||||
method=method,
|
||||
allowed_methods=tuple(e.allowed_methods)
|
||||
if e.allowed_methods
|
||||
else None,
|
||||
allowed_methods=tuple(e.allowed_methods) if e.allowed_methods else None,
|
||||
) from None
|
||||
|
||||
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
|
||||
|
@ -137,11 +135,7 @@ class Router(BaseRouter):
|
|||
|
||||
ident = name
|
||||
if len(hosts) > 1:
|
||||
ident = (
|
||||
f"{name}_{host.replace('.', '_')}"
|
||||
if name
|
||||
else "__unnamed__"
|
||||
)
|
||||
ident = f"{name}_{host.replace('.', '_')}" if name else "__unnamed__"
|
||||
|
||||
route = super().add(**params) # type: ignore
|
||||
route.extra.ident = ident
|
||||
|
|
|
@ -303,9 +303,7 @@ class Http3Protocol(HttpProtocolMixin, ConnectionProtocol): # type: ignore
|
|||
if isinstance(event, ProtocolNegotiated):
|
||||
self._setup_connection(transmit=self.transmit)
|
||||
if event.alpn_protocol in H3_ALPN:
|
||||
self._connection = H3Connection(
|
||||
self._quic, enable_webtransport=True
|
||||
)
|
||||
self._connection = H3Connection(self._quic, enable_webtransport=True)
|
||||
elif isinstance(event, DatagramFrameReceived):
|
||||
if event.data == b"quack":
|
||||
self._quic.send_datagram_frame(b"quack-ack")
|
||||
|
|
|
@ -140,8 +140,7 @@ class WebSocketProtocol(HttpProtocol):
|
|||
)
|
||||
loop = (
|
||||
request.transport.loop
|
||||
if hasattr(request, "transport")
|
||||
and hasattr(request.transport, "loop")
|
||||
if hasattr(request, "transport") and hasattr(request.transport, "loop")
|
||||
else None
|
||||
)
|
||||
await self.websocket.connection_made(self, loop=loop)
|
||||
|
|
|
@ -167,9 +167,7 @@ def _setup_system_signals(
|
|||
ctrlc_workaround_for_windows(app)
|
||||
else:
|
||||
for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]:
|
||||
loop.add_signal_handler(
|
||||
_signal, partial(app.stop, terminate=False)
|
||||
)
|
||||
loop.add_signal_handler(_signal, partial(app.stop, terminate=False))
|
||||
|
||||
|
||||
def _run_server_forever(loop, before_stop, after_stop, cleanup, unix):
|
||||
|
@ -223,9 +221,7 @@ def _serve_http_1(
|
|||
unix=unix,
|
||||
**protocol_kwargs,
|
||||
)
|
||||
asyncio_server_kwargs = (
|
||||
asyncio_server_kwargs if asyncio_server_kwargs else {}
|
||||
)
|
||||
asyncio_server_kwargs = asyncio_server_kwargs if asyncio_server_kwargs else {}
|
||||
if OS_IS_WINDOWS and sock:
|
||||
pid = os.getpid()
|
||||
sock = sock.share(pid)
|
||||
|
@ -314,9 +310,7 @@ def _serve_http_3(
|
|||
run_multiple: bool = False,
|
||||
):
|
||||
if not HTTP3_AVAILABLE:
|
||||
raise ServerError(
|
||||
"Cannot run HTTP/3 server without aioquic installed. "
|
||||
)
|
||||
raise ServerError("Cannot run HTTP/3 server without aioquic installed. ")
|
||||
protocol = partial(Http3Protocol, app=app)
|
||||
ticket_store = SessionTicketStore()
|
||||
ssl_context = get_ssl_context(app, ssl)
|
||||
|
@ -339,9 +333,7 @@ def _serve_http_3(
|
|||
|
||||
# TODO: Create connection cleanup and graceful shutdown
|
||||
cleanup = None
|
||||
_run_server_forever(
|
||||
loop, server.before_stop, server.after_stop, cleanup, None
|
||||
)
|
||||
_run_server_forever(loop, server.before_stop, server.after_stop, cleanup, None)
|
||||
|
||||
|
||||
def _build_protocol_kwargs(
|
||||
|
|
|
@ -23,9 +23,7 @@ def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
|
|||
try: # IP address: family must be specified for IPv6 at least
|
||||
ip = ip_address(host)
|
||||
host = str(ip)
|
||||
sock = socket.socket(
|
||||
socket.AF_INET6 if ip.version == 6 else socket.AF_INET
|
||||
)
|
||||
sock = socket.socket(socket.AF_INET6 if ip.version == 6 else socket.AF_INET)
|
||||
except ValueError: # Hostname, may become AF_INET or AF_INET6
|
||||
sock = socket.socket()
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
@ -92,9 +90,7 @@ def remove_unix_socket(path: str | None) -> None:
|
|||
pass
|
||||
|
||||
|
||||
def configure_socket(
|
||||
server_settings: dict[str, Any]
|
||||
) -> socket.SocketType | None:
|
||||
def configure_socket(server_settings: dict[str, Any]) -> socket.SocketType | None:
|
||||
# Create a listening socket or use the one in settings
|
||||
if server_settings.get("version") is HTTP.VERSION_3:
|
||||
return None
|
||||
|
|
|
@ -152,8 +152,7 @@ class WebsocketFrameAssembler:
|
|||
# and get_in_progress check, this exception is here
|
||||
# as a failsafe
|
||||
raise ServerError(
|
||||
"Websocket get() found a message when "
|
||||
"state was already fetched."
|
||||
"Websocket get() found a message when " "state was already fetched."
|
||||
)
|
||||
self.message_fetched.set()
|
||||
self.chunks = []
|
||||
|
|
|
@ -152,12 +152,8 @@ class WebsocketImplProtocol:
|
|||
self.data_finished_fut = asyncio.shield(self.loop.create_future())
|
||||
|
||||
if self.ping_interval:
|
||||
self.keepalive_ping_task = asyncio.create_task(
|
||||
self.keepalive_ping()
|
||||
)
|
||||
self.auto_closer_task = asyncio.create_task(
|
||||
self.auto_close_connection()
|
||||
)
|
||||
self.keepalive_ping_task = asyncio.create_task(self.keepalive_ping())
|
||||
self.auto_closer_task = asyncio.create_task(self.auto_close_connection())
|
||||
|
||||
async def wait_for_connection_lost(self, timeout=None) -> bool:
|
||||
"""
|
||||
|
@ -246,9 +242,7 @@ class WebsocketImplProtocol:
|
|||
try:
|
||||
await asyncio.wait_for(ping_waiter, self.ping_timeout)
|
||||
except asyncio.TimeoutError:
|
||||
error_logger.warning(
|
||||
"Websocket timed out waiting for pong"
|
||||
)
|
||||
error_logger.warning("Websocket timed out waiting for pong")
|
||||
self.fail_connection(1011)
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
|
@ -278,9 +272,7 @@ class WebsocketImplProtocol:
|
|||
self.keepalive_ping_task = None
|
||||
if self.loop and self.io_proto and self.io_proto.transport:
|
||||
self.io_proto.transport.close()
|
||||
self.loop.call_later(
|
||||
self.close_timeout, self.io_proto.transport.abort
|
||||
)
|
||||
self.loop.call_later(self.close_timeout, self.io_proto.transport.abort)
|
||||
# We were never open, or already closed
|
||||
return True
|
||||
|
||||
|
@ -318,9 +310,7 @@ class WebsocketImplProtocol:
|
|||
try:
|
||||
data_to_send = self.ws_proto.data_to_send()
|
||||
while (
|
||||
len(data_to_send)
|
||||
and self.io_proto
|
||||
and self.io_proto.transport
|
||||
len(data_to_send) and self.io_proto and self.io_proto.transport
|
||||
):
|
||||
frame_data = data_to_send.pop(0)
|
||||
self.io_proto.transport.write(frame_data)
|
||||
|
@ -356,11 +346,7 @@ class WebsocketImplProtocol:
|
|||
self.ws_proto.send_close(code, reason)
|
||||
data_to_send.extend(self.ws_proto.data_to_send())
|
||||
try:
|
||||
while (
|
||||
len(data_to_send)
|
||||
and self.io_proto
|
||||
and self.io_proto.transport
|
||||
):
|
||||
while len(data_to_send) and self.io_proto and self.io_proto.transport:
|
||||
frame_data = data_to_send.pop(0)
|
||||
self.io_proto.transport.write(frame_data)
|
||||
except Exception:
|
||||
|
@ -392,15 +378,11 @@ class WebsocketImplProtocol:
|
|||
if self.data_finished_fut:
|
||||
try:
|
||||
await self.data_finished_fut
|
||||
logger.debug(
|
||||
"Websocket task finished. Closing the connection."
|
||||
)
|
||||
logger.debug("Websocket task finished. Closing the connection.")
|
||||
except asyncio.CancelledError:
|
||||
# Cancelled error is called when data phase is cancelled
|
||||
# if an error occurred or the client closed the connection
|
||||
logger.debug(
|
||||
"Websocket handler cancelled. Closing the connection."
|
||||
)
|
||||
logger.debug("Websocket handler cancelled. Closing the connection.")
|
||||
|
||||
# Cancel the keepalive ping task.
|
||||
if self.keepalive_ping_task:
|
||||
|
@ -426,10 +408,7 @@ class WebsocketImplProtocol:
|
|||
if (not self.io_proto) or (not self.io_proto.transport):
|
||||
# we were never open, or done. Can't do any finalization.
|
||||
return
|
||||
elif (
|
||||
self.connection_lost_waiter
|
||||
and self.connection_lost_waiter.done()
|
||||
):
|
||||
elif self.connection_lost_waiter and self.connection_lost_waiter.done():
|
||||
# connection confirmed closed already, proceed to abort waiter
|
||||
...
|
||||
elif self.io_proto.transport.is_closing():
|
||||
|
@ -447,9 +426,7 @@ class WebsocketImplProtocol:
|
|||
if self.io_proto and self.io_proto.transport:
|
||||
self.io_proto.transport.abort()
|
||||
else:
|
||||
if await self.wait_for_connection_lost(
|
||||
timeout=self.close_timeout
|
||||
):
|
||||
if await self.wait_for_connection_lost(timeout=self.close_timeout):
|
||||
# Connection aborted before the timeout expired.
|
||||
return
|
||||
error_logger.warning(
|
||||
|
@ -731,8 +708,7 @@ class WebsocketImplProtocol:
|
|||
async with self.conn_mutex:
|
||||
if self.ws_proto.state in (CLOSED, CLOSING):
|
||||
raise WebsocketClosed(
|
||||
"Cannot send a ping when the websocket interface "
|
||||
"is closed."
|
||||
"Cannot send a ping when the websocket interface " "is closed."
|
||||
)
|
||||
if (not self.io_proto) or (not self.io_proto.loop):
|
||||
raise ServerError(
|
||||
|
@ -747,9 +723,7 @@ class WebsocketImplProtocol:
|
|||
|
||||
# Protect against duplicates if a payload is explicitly set.
|
||||
if data in self.pings:
|
||||
raise ValueError(
|
||||
"already waiting for a pong with the same data"
|
||||
)
|
||||
raise ValueError("already waiting for a pong with the same data")
|
||||
|
||||
# Generate a unique random payload otherwise.
|
||||
while data is None or data in self.pings:
|
||||
|
@ -842,9 +816,7 @@ class WebsocketImplProtocol:
|
|||
self.ws_proto.receive_eof()
|
||||
data_to_send = self.ws_proto.data_to_send()
|
||||
events_to_process = self.ws_proto.events_received()
|
||||
asyncio.create_task(
|
||||
self.async_eof_received(data_to_send, events_to_process)
|
||||
)
|
||||
asyncio.create_task(self.async_eof_received(data_to_send, events_to_process))
|
||||
return False
|
||||
|
||||
def connection_lost(self, exc):
|
||||
|
|
|
@ -303,11 +303,7 @@ class SignalRouter(BaseRouter):
|
|||
|
||||
def _build_event_parts(self, event: str) -> tuple[str, str, str]:
|
||||
parts = path_to_parts(event, self.delimiter)
|
||||
if (
|
||||
len(parts) != 3
|
||||
or parts[0].startswith("<")
|
||||
or parts[1].startswith("<")
|
||||
):
|
||||
if len(parts) != 3 or parts[0].startswith("<") or parts[1].startswith("<"):
|
||||
raise InvalidSignal("Invalid signal event: %s" % event)
|
||||
|
||||
if (
|
||||
|
@ -315,9 +311,7 @@ class SignalRouter(BaseRouter):
|
|||
and event not in RESERVED_NAMESPACES[parts[0]]
|
||||
and not (parts[2].startswith("<") and parts[2].endswith(">"))
|
||||
):
|
||||
raise InvalidSignal(
|
||||
"Cannot declare reserved signal event: %s" % event
|
||||
)
|
||||
raise InvalidSignal("Cannot declare reserved signal event: %s" % event)
|
||||
return parts
|
||||
|
||||
def _clean_trigger(self, trigger: str) -> str:
|
||||
|
|
|
@ -11,8 +11,6 @@ def create_simple_server(directory: Path):
|
|||
)
|
||||
|
||||
app = Sanic("SimpleServer")
|
||||
app.static(
|
||||
"/", directory, name="main", directory_view=True, index="index.html"
|
||||
)
|
||||
app.static("/", directory, name="main", directory_view=True, index="index.html")
|
||||
|
||||
return app
|
||||
|
|
|
@ -22,9 +22,7 @@ class RemoveAltSvc(NodeTransformer):
|
|||
def __init__(self, app: Sanic, verbosity: int = 0) -> None:
|
||||
self._app = app
|
||||
self._verbosity = verbosity
|
||||
self._versions = {
|
||||
info.settings["version"] for info in app.state.server_info
|
||||
}
|
||||
self._versions = {info.settings["version"] for info in app.state.server_info}
|
||||
|
||||
def visit_Assign(self, node: Assign) -> Any:
|
||||
if any(self._matches(target) for target in node.targets):
|
||||
|
|
|
@ -14,9 +14,7 @@ class OptionalDispatchEvent(BaseScheme):
|
|||
super().__init__(app)
|
||||
|
||||
self._sync_events()
|
||||
self._registered_events = [
|
||||
signal.name for signal in app.signal_router.routes
|
||||
]
|
||||
self._registered_events = [signal.name for signal in app.signal_router.routes]
|
||||
|
||||
def visitors(self) -> List[NodeTransformer]:
|
||||
return [RemoveDispatch(self._registered_events)]
|
||||
|
@ -26,9 +24,7 @@ class OptionalDispatchEvent(BaseScheme):
|
|||
app_events = {}
|
||||
for app in self.app.__class__._app_registry.values():
|
||||
if app.state.server_info:
|
||||
app_events[app] = {
|
||||
signal.name for signal in app.signal_router.routes
|
||||
}
|
||||
app_events[app] = {signal.name for signal in app.signal_router.routes}
|
||||
all_events.update(app_events[app])
|
||||
|
||||
for app, events in app_events.items():
|
||||
|
|
|
@ -14,9 +14,7 @@ class SharedContext(SimpleNamespace):
|
|||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
if self.is_locked:
|
||||
raise RuntimeError(
|
||||
f"Cannot set {name} on locked SharedContext object"
|
||||
)
|
||||
raise RuntimeError(f"Cannot set {name} on locked SharedContext object")
|
||||
if not os.environ.get("SANIC_WORKER_NAME"):
|
||||
to_check: Iterable[Any]
|
||||
if not isinstance(value, (tuple, frozenset)):
|
||||
|
@ -35,8 +33,7 @@ class SharedContext(SimpleNamespace):
|
|||
except AttributeError:
|
||||
module = ""
|
||||
if not any(
|
||||
module.startswith(prefix)
|
||||
for prefix in ("multiprocessing", "ctypes")
|
||||
module.startswith(prefix) for prefix in ("multiprocessing", "ctypes")
|
||||
):
|
||||
error_logger.warning(
|
||||
f"{Colors.YELLOW}Unsafe object {Colors.PURPLE}{name} "
|
||||
|
|
|
@ -80,9 +80,7 @@ def load_module_from_file_location(
|
|||
env_vars_in_location = set(re_findall(r"\${(.+?)}", location))
|
||||
|
||||
# B) Check these variables exists in environment.
|
||||
not_defined_env_vars = env_vars_in_location.difference(
|
||||
os_environ.keys()
|
||||
)
|
||||
not_defined_env_vars = env_vars_in_location.difference(os_environ.keys())
|
||||
if not_defined_env_vars:
|
||||
raise LoadFileException(
|
||||
"The following environment variables are not set: "
|
||||
|
@ -91,18 +89,14 @@ def load_module_from_file_location(
|
|||
|
||||
# C) Substitute them in location.
|
||||
for env_var in env_vars_in_location:
|
||||
location = location.replace(
|
||||
"${" + env_var + "}", os_environ[env_var]
|
||||
)
|
||||
location = location.replace("${" + env_var + "}", os_environ[env_var])
|
||||
|
||||
location = str(location)
|
||||
if ".py" in location:
|
||||
name = location.split("/")[-1].split(".")[
|
||||
0
|
||||
] # get just the file name without path and .py extension
|
||||
_mod_spec = spec_from_file_location(
|
||||
name, location, *args, **kwargs
|
||||
)
|
||||
_mod_spec = spec_from_file_location(name, location, *args, **kwargs)
|
||||
assert _mod_spec is not None # type assertion for mypy
|
||||
module = module_from_spec(_mod_spec)
|
||||
_mod_spec.loader.exec_module(module) # type: ignore
|
||||
|
|
|
@ -99,9 +99,7 @@ class WorkerManager:
|
|||
Worker: The Worker instance
|
||||
""" # noqa: E501
|
||||
container = self.transient if transient else self.durable
|
||||
worker = Worker(
|
||||
ident, func, kwargs, self.context, self.worker_state, workers
|
||||
)
|
||||
worker = Worker(ident, func, kwargs, self.context, self.worker_state, workers)
|
||||
container[worker.ident] = worker
|
||||
return worker
|
||||
|
||||
|
@ -204,9 +202,7 @@ class WorkerManager:
|
|||
|
||||
change = num_worker - self.num_server
|
||||
if change == 0:
|
||||
logger.info(
|
||||
f"No change needed. There are already {num_worker} workers."
|
||||
)
|
||||
logger.info(f"No change needed. There are already {num_worker} workers.")
|
||||
return
|
||||
|
||||
logger.info(f"Scaling from {self.num_server} to {num_worker} workers")
|
||||
|
@ -237,9 +233,7 @@ class WorkerManager:
|
|||
try:
|
||||
if self.monitor_subscriber.poll(0.1):
|
||||
message = self.monitor_subscriber.recv()
|
||||
logger.debug(
|
||||
f"Monitor message: {message}", extra={"verbosity": 2}
|
||||
)
|
||||
logger.debug(f"Monitor message: {message}", extra={"verbosity": 2})
|
||||
if not message:
|
||||
break
|
||||
elif message == "__TERMINATE__":
|
||||
|
@ -258,9 +252,7 @@ class WorkerManager:
|
|||
reloaded_files = (
|
||||
split_message[1] if len(split_message) > 1 else None
|
||||
)
|
||||
process_names = [
|
||||
name.strip() for name in processes.split(",")
|
||||
]
|
||||
process_names = [name.strip() for name in processes.split(",")]
|
||||
if "__ALL_PROCESSES__" in process_names:
|
||||
process_names = None
|
||||
order = (
|
||||
|
|
|
@ -54,8 +54,7 @@ class WorkerMultiplexer:
|
|||
"""
|
||||
if name and all_workers:
|
||||
raise ValueError(
|
||||
"Ambiguous restart with both a named process and"
|
||||
" all_workers=True"
|
||||
"Ambiguous restart with both a named process and" " all_workers=True"
|
||||
)
|
||||
if not name:
|
||||
name = "__ALL_PROCESSES__:" if all_workers else self.name
|
||||
|
|
|
@ -29,9 +29,7 @@ class WorkerProcess:
|
|||
self.kwargs = kwargs
|
||||
self.worker_state = worker_state
|
||||
if self.name not in self.worker_state:
|
||||
self.worker_state[self.name] = {
|
||||
"server": self.SERVER_LABEL in self.name
|
||||
}
|
||||
self.worker_state[self.name] = {"server": self.SERVER_LABEL in self.name}
|
||||
self.spawn()
|
||||
|
||||
def set_state(self, state: ProcessState, force=False):
|
||||
|
@ -94,9 +92,7 @@ class WorkerProcess:
|
|||
self._terminate_now()
|
||||
else:
|
||||
self._old_process = self._current_process
|
||||
self.kwargs.update(
|
||||
{"config": {k.upper(): v for k, v in kwargs.items()}}
|
||||
)
|
||||
self.kwargs.update({"config": {k.upper(): v for k, v in kwargs.items()}})
|
||||
try:
|
||||
self.spawn()
|
||||
self.start()
|
||||
|
|
|
@ -27,9 +27,7 @@ def worker_serve(
|
|||
app_loader: AppLoader,
|
||||
worker_state: Optional[Dict[str, Any]] = None,
|
||||
server_info: Optional[Dict[str, List[ApplicationServerInfo]]] = None,
|
||||
ssl: Optional[
|
||||
Union[SSLContext, Dict[str, Union[str, os.PathLike]]]
|
||||
] = None,
|
||||
ssl: Optional[Union[SSLContext, Dict[str, Union[str, os.PathLike]]]] = None,
|
||||
sock: Optional[socket.socket] = None,
|
||||
unix: Optional[str] = None,
|
||||
reuse_port: bool = False,
|
||||
|
@ -86,9 +84,7 @@ def worker_serve(
|
|||
# Hydrate apps with any passed server info
|
||||
|
||||
if monitor_publisher is None:
|
||||
raise RuntimeError(
|
||||
"No restart publisher found in worker process"
|
||||
)
|
||||
raise RuntimeError("No restart publisher found in worker process")
|
||||
if worker_state is None:
|
||||
raise RuntimeError("No worker state found in worker process")
|
||||
|
||||
|
@ -96,9 +92,7 @@ def worker_serve(
|
|||
apps = list(Sanic._app_registry.values())
|
||||
app.before_server_start(partial(app._start_servers, apps=apps))
|
||||
for a in apps:
|
||||
a.multiplexer = WorkerMultiplexer(
|
||||
monitor_publisher, worker_state
|
||||
)
|
||||
a.multiplexer = WorkerMultiplexer(monitor_publisher, worker_state)
|
||||
|
||||
if app.debug:
|
||||
loop.set_debug(app.debug)
|
||||
|
|
|
@ -62,9 +62,7 @@ class WorkerState(Mapping):
|
|||
|
||||
def update(self, mapping: MappingType[str, Any]) -> None:
|
||||
if any(k in self.RESTRICTED for k in mapping.keys()):
|
||||
self._write_error(
|
||||
[k for k in mapping.keys() if k in self.RESTRICTED]
|
||||
)
|
||||
self._write_error([k for k in mapping.keys() if k in self.RESTRICTED])
|
||||
self._state[self._name] = {
|
||||
**self._state[self._name],
|
||||
**mapping,
|
||||
|
|
|
@ -22,9 +22,7 @@ if __name__ == "__main__":
|
|||
help="Render the news fragments, don't write to files, "
|
||||
"don't check versions.",
|
||||
)
|
||||
@click.option(
|
||||
"--dir", "directory", default=path.dirname(path.abspath(__file__))
|
||||
)
|
||||
@click.option("--dir", "directory", default=path.dirname(path.abspath(__file__)))
|
||||
@click.option("--name", "project_name", default=None)
|
||||
@click.option(
|
||||
"--version",
|
||||
|
|
|
@ -25,8 +25,7 @@ GIT_COMMANDS = {
|
|||
],
|
||||
"push_tag": ["git push origin {new_version}"],
|
||||
"get_change_log": [
|
||||
'git log --no-merges --pretty=format:"%h::: %cn::: %s" '
|
||||
"{current_version}.."
|
||||
'git log --no-merges --pretty=format:"%h::: %cn::: %s" ' "{current_version}.."
|
||||
],
|
||||
}
|
||||
|
||||
|
@ -72,9 +71,7 @@ class Directory:
|
|||
|
||||
def _run_shell_command(command: list):
|
||||
try:
|
||||
process = Popen(
|
||||
command, stderr=PIPE, stdout=PIPE, stdin=PIPE, shell=True
|
||||
)
|
||||
process = Popen(command, stderr=PIPE, stdout=PIPE, stdin=PIPE, shell=True)
|
||||
output, error = process.communicate()
|
||||
return_code = process.returncode
|
||||
return output.decode("utf-8"), error, return_code
|
||||
|
@ -143,9 +140,9 @@ def _update_release_version_for_sanic(
|
|||
current_version_line = config_parser.get(
|
||||
"version", "current_version_pattern"
|
||||
).format(current_version=current_version)
|
||||
new_version_line = config_parser.get(
|
||||
"version", "new_version_pattern"
|
||||
).format(new_version=new_version)
|
||||
new_version_line = config_parser.get("version", "new_version_pattern").format(
|
||||
new_version=new_version
|
||||
)
|
||||
|
||||
for version_file in version_files.split(","):
|
||||
with open(version_file) as init_file:
|
||||
|
|
8
setup.py
8
setup.py
|
@ -59,9 +59,7 @@ def str_to_bool(val: str) -> bool:
|
|||
|
||||
with open_local(["sanic", "__version__.py"], encoding="latin1") as fp:
|
||||
try:
|
||||
version = re.findall(
|
||||
r"^__version__ = \"([^']+)\"\r?$", fp.read(), re.M
|
||||
)[0]
|
||||
version = re.findall(r"^__version__ = \"([^']+)\"\r?$", fp.read(), re.M)[0]
|
||||
except IndexError:
|
||||
raise RuntimeError("Unable to determine version.")
|
||||
|
||||
|
@ -96,9 +94,7 @@ setup_kwargs = {
|
|||
"entry_points": {"console_scripts": ["sanic = sanic.__main__:main"]},
|
||||
}
|
||||
|
||||
env_dependency = (
|
||||
'; sys_platform != "win32" and implementation_name == "cpython"'
|
||||
)
|
||||
env_dependency = '; sys_platform != "win32" and implementation_name == "cpython"'
|
||||
ujson = "ujson>=1.35" + env_dependency
|
||||
uvloop = "uvloop>=0.15.0" + env_dependency
|
||||
types_ujson = "types-ujson" + env_dependency
|
||||
|
|
|
@ -16,9 +16,7 @@ class TestSanicRouteResolution:
|
|||
async def test_resolve_route_no_arg_string_path(
|
||||
self, sanic_router, route_generator, benchmark
|
||||
):
|
||||
simple_routes = route_generator.generate_random_direct_route(
|
||||
max_route_depth=4
|
||||
)
|
||||
simple_routes = route_generator.generate_random_direct_route(max_route_depth=4)
|
||||
router, simple_routes = sanic_router(route_details=simple_routes)
|
||||
route_to_call = choice(simple_routes)
|
||||
request = Request(
|
||||
|
@ -52,9 +50,7 @@ class TestSanicRouteResolution:
|
|||
)
|
||||
router, typed_routes = sanic_router(route_details=typed_routes)
|
||||
route_to_call = choice(typed_routes)
|
||||
url = route_generator.generate_url_for_template(
|
||||
template=route_to_call[-1]
|
||||
)
|
||||
url = route_generator.generate_url_for_template(template=route_to_call[-1])
|
||||
|
||||
print(f"{route_to_call[-1]} -> {url}")
|
||||
request = Request(
|
||||
|
|
|
@ -51,9 +51,7 @@ def selfsigned(key, common_name, san):
|
|||
.not_valid_before(datetime.utcnow())
|
||||
.not_valid_after(datetime.utcnow() + timedelta(days=365.25 * 8))
|
||||
.add_extension(
|
||||
KeyUsage(
|
||||
True, False, False, False, False, False, False, False, False
|
||||
),
|
||||
KeyUsage(True, False, False, False, False, False, False, False, False),
|
||||
critical=True,
|
||||
)
|
||||
.add_extension(
|
||||
|
|
|
@ -13,9 +13,7 @@ class RawClient:
|
|||
self.port = port
|
||||
|
||||
async def connect(self):
|
||||
self.reader, self.writer = await asyncio.open_connection(
|
||||
self.host, self.port
|
||||
)
|
||||
self.reader, self.writer = await asyncio.open_connection(self.host, self.port)
|
||||
|
||||
async def close(self):
|
||||
self.writer.close()
|
||||
|
@ -39,8 +37,4 @@ class RawClient:
|
|||
return await self.reader.read(nbytes)
|
||||
|
||||
def _clean(self, message: str) -> str:
|
||||
return (
|
||||
dedent(message)
|
||||
.lstrip("\n")
|
||||
.replace("\n", self.CRLF.decode("utf-8"))
|
||||
)
|
||||
return dedent(message).lstrip("\n").replace("\n", self.CRLF.decode("utf-8"))
|
||||
|
|
|
@ -42,14 +42,10 @@ async def _handler(request):
|
|||
|
||||
|
||||
TYPE_TO_GENERATOR_MAP = {
|
||||
"str": lambda: "".join(
|
||||
[random.choice(string.ascii_lowercase) for _ in range(4)]
|
||||
),
|
||||
"str": lambda: "".join([random.choice(string.ascii_lowercase) for _ in range(4)]),
|
||||
"int": lambda: random.choice(range(1000000)),
|
||||
"float": lambda: random.random(),
|
||||
"alpha": lambda: "".join(
|
||||
[random.choice(string.ascii_lowercase) for _ in range(4)]
|
||||
),
|
||||
"alpha": lambda: "".join([random.choice(string.ascii_lowercase) for _ in range(4)]),
|
||||
"uuid": lambda: str(uuid.uuid1()),
|
||||
}
|
||||
|
||||
|
|
|
@ -84,9 +84,7 @@ async def test_http_receiver_run_request(app: Sanic, http_request: Request):
|
|||
|
||||
app.__class__ = mock_handle
|
||||
receiver = generate_http_receiver(app, http_request)
|
||||
receiver.protocol.quic_event_received(
|
||||
ProtocolNegotiated(alpn_protocol="h3")
|
||||
)
|
||||
receiver.protocol.quic_event_received(ProtocolNegotiated(alpn_protocol="h3"))
|
||||
await receiver.run()
|
||||
handler.assert_awaited_once_with(receiver.request)
|
||||
|
||||
|
@ -99,9 +97,7 @@ async def test_http_receiver_run_exception(app: Sanic, http_request: Request):
|
|||
|
||||
app.__class__ = mock_handle
|
||||
receiver = generate_http_receiver(app, http_request)
|
||||
receiver.protocol.quic_event_received(
|
||||
ProtocolNegotiated(alpn_protocol="h3")
|
||||
)
|
||||
receiver.protocol.quic_event_received(ProtocolNegotiated(alpn_protocol="h3"))
|
||||
exception = Exception("Oof")
|
||||
await receiver.run(exception)
|
||||
handler.assert_awaited_once_with(receiver.request, exception)
|
||||
|
@ -139,9 +135,7 @@ def test_http_receiver_receive_body(app: Sanic, http_request: Request):
|
|||
receiver.receive_body(b"..")
|
||||
assert receiver.request.body == b"...."
|
||||
|
||||
with pytest.raises(
|
||||
PayloadTooLarge, match="Request body exceeds the size limit"
|
||||
):
|
||||
with pytest.raises(PayloadTooLarge, match="Request body exceeds the size limit"):
|
||||
receiver.receive_body(b"..")
|
||||
|
||||
|
||||
|
@ -176,17 +170,13 @@ async def test_send_headers(app: Sanic, http_request: Request):
|
|||
send_headers_mock = Mock()
|
||||
existing_send_headers = H3Connection.send_headers
|
||||
receiver = generate_http_receiver(app, http_request)
|
||||
receiver.protocol.quic_event_received(
|
||||
ProtocolNegotiated(alpn_protocol="h3")
|
||||
)
|
||||
receiver.protocol.quic_event_received(ProtocolNegotiated(alpn_protocol="h3"))
|
||||
|
||||
http_request._protocol = receiver.protocol
|
||||
|
||||
def send_headers(*args, **kwargs):
|
||||
send_headers_mock(*args, **kwargs)
|
||||
return existing_send_headers(
|
||||
receiver.protocol.connection, *args, **kwargs
|
||||
)
|
||||
return existing_send_headers(receiver.protocol.connection, *args, **kwargs)
|
||||
|
||||
receiver.protocol.connection.send_headers = send_headers
|
||||
receiver.head_only = False
|
||||
|
@ -310,10 +300,7 @@ def test_request_header_encoding(app):
|
|||
)
|
||||
)
|
||||
assert exc_info.value.status_code == 400
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== "Header names may only contain US-ASCII characters."
|
||||
)
|
||||
assert str(exc_info.value) == "Header names may only contain US-ASCII characters."
|
||||
|
||||
|
||||
def test_request_url_encoding(app):
|
||||
|
|
|
@ -5,9 +5,7 @@ import timeit
|
|||
|
||||
from sanic.response import json
|
||||
|
||||
currentdir = os.path.dirname(
|
||||
os.path.abspath(inspect.getfile(inspect.currentframe()))
|
||||
)
|
||||
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
sys.path.insert(0, currentdir + "/../../../")
|
||||
|
||||
|
||||
|
|
|
@ -5,9 +5,7 @@ import sys
|
|||
from sanic import Sanic
|
||||
from sanic.response import json
|
||||
|
||||
currentdir = os.path.dirname(
|
||||
os.path.abspath(inspect.getfile(inspect.currentframe()))
|
||||
)
|
||||
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
sys.path.insert(0, currentdir + "/../../../")
|
||||
|
||||
|
||||
|
|
|
@ -6,9 +6,7 @@ from sanic import Sanic
|
|||
from sanic.exceptions import ServerError
|
||||
from sanic.response import json, text
|
||||
|
||||
currentdir = os.path.dirname(
|
||||
os.path.abspath(inspect.getfile(inspect.currentframe()))
|
||||
)
|
||||
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
sys.path.insert(0, currentdir + "/../../../")
|
||||
|
||||
|
||||
|
|
|
@ -97,9 +97,7 @@ def test_create_server_no_startup(app: Sanic):
|
|||
asyncio_server_kwargs={"start_serving": False},
|
||||
)
|
||||
srv = loop.run_until_complete(asyncio_srv_coro)
|
||||
message = (
|
||||
"Cannot run Sanic server without first running await server.startup()"
|
||||
)
|
||||
message = "Cannot run Sanic server without first running await server.startup()"
|
||||
with pytest.raises(SanicException, match=message):
|
||||
loop.run_until_complete(srv.start_serving())
|
||||
|
||||
|
@ -212,14 +210,8 @@ def test_app_websocket_parameters(websocket_protocol_mock, app: Sanic):
|
|||
websocket_protocol_call_args = websocket_protocol_mock.call_args
|
||||
ws_kwargs = websocket_protocol_call_args[1]
|
||||
assert ws_kwargs["websocket_max_size"] == app.config.WEBSOCKET_MAX_SIZE
|
||||
assert (
|
||||
ws_kwargs["websocket_ping_timeout"]
|
||||
== app.config.WEBSOCKET_PING_TIMEOUT
|
||||
)
|
||||
assert (
|
||||
ws_kwargs["websocket_ping_interval"]
|
||||
== app.config.WEBSOCKET_PING_INTERVAL
|
||||
)
|
||||
assert ws_kwargs["websocket_ping_timeout"] == app.config.WEBSOCKET_PING_TIMEOUT
|
||||
assert ws_kwargs["websocket_ping_interval"] == app.config.WEBSOCKET_PING_INTERVAL
|
||||
|
||||
|
||||
def test_handle_request_with_nested_exception(app: Sanic, monkeypatch):
|
||||
|
@ -228,9 +220,7 @@ def test_handle_request_with_nested_exception(app: Sanic, monkeypatch):
|
|||
def mock_error_handler_response(*args, **kwargs):
|
||||
raise Exception(err_msg)
|
||||
|
||||
monkeypatch.setattr(
|
||||
app.error_handler, "response", mock_error_handler_response
|
||||
)
|
||||
monkeypatch.setattr(app.error_handler, "response", mock_error_handler_response)
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
|
@ -247,9 +237,7 @@ def test_handle_request_with_nested_exception_debug(app: Sanic, monkeypatch):
|
|||
def mock_error_handler_response(*args, **kwargs):
|
||||
raise Exception(err_msg)
|
||||
|
||||
monkeypatch.setattr(
|
||||
app.error_handler, "response", mock_error_handler_response
|
||||
)
|
||||
monkeypatch.setattr(app.error_handler, "response", mock_error_handler_response)
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
|
@ -263,15 +251,11 @@ def test_handle_request_with_nested_exception_debug(app: Sanic, monkeypatch):
|
|||
)
|
||||
|
||||
|
||||
def test_handle_request_with_nested_sanic_exception(
|
||||
app: Sanic, monkeypatch, caplog
|
||||
):
|
||||
def test_handle_request_with_nested_sanic_exception(app: Sanic, monkeypatch, caplog):
|
||||
def mock_error_handler_response(*args, **kwargs):
|
||||
raise SanicException("Mock SanicException")
|
||||
|
||||
monkeypatch.setattr(
|
||||
app.error_handler, "response", mock_error_handler_response
|
||||
)
|
||||
monkeypatch.setattr(app.error_handler, "response", mock_error_handler_response)
|
||||
|
||||
@app.get("/")
|
||||
def handler(request):
|
||||
|
@ -326,9 +310,7 @@ def test_app_registry_wrong_type():
|
|||
def test_app_registry_name_reuse():
|
||||
Sanic("test")
|
||||
Sanic.test_mode = False
|
||||
with pytest.raises(
|
||||
SanicException, match='Sanic app name "test" already in use.'
|
||||
):
|
||||
with pytest.raises(SanicException, match='Sanic app name "test" already in use.'):
|
||||
Sanic("test")
|
||||
Sanic.test_mode = True
|
||||
Sanic("test")
|
||||
|
@ -361,9 +343,7 @@ def test_get_app_does_not_exist():
|
|||
|
||||
|
||||
def test_get_app_does_not_exist_force_create():
|
||||
assert isinstance(
|
||||
Sanic.get_app("does-not-exist", force_create=True), Sanic
|
||||
)
|
||||
assert isinstance(Sanic.get_app("does-not-exist", force_create=True), Sanic)
|
||||
|
||||
|
||||
def test_get_app_default():
|
||||
|
@ -372,9 +352,7 @@ def test_get_app_default():
|
|||
|
||||
|
||||
def test_get_app_no_default():
|
||||
with pytest.raises(
|
||||
SanicException, match="No Sanic apps have been registered."
|
||||
):
|
||||
with pytest.raises(SanicException, match="No Sanic apps have been registered."):
|
||||
Sanic.get_app()
|
||||
|
||||
|
||||
|
@ -383,9 +361,7 @@ def test_get_app_default_ambiguous():
|
|||
Sanic("test2")
|
||||
with pytest.raises(
|
||||
SanicException,
|
||||
match=re.escape(
|
||||
'Multiple Sanic apps found, use Sanic.get_app("app_name")'
|
||||
),
|
||||
match=re.escape('Multiple Sanic apps found, use Sanic.get_app("app_name")'),
|
||||
):
|
||||
Sanic.get_app()
|
||||
|
||||
|
@ -417,8 +393,7 @@ def test_bad_custom_config():
|
|||
with pytest.raises(
|
||||
SanicException,
|
||||
match=(
|
||||
"When instantiating Sanic with config, you cannot also pass "
|
||||
"env_prefix"
|
||||
"When instantiating Sanic with config, you cannot also pass " "env_prefix"
|
||||
),
|
||||
):
|
||||
Sanic("test", config=1, env_prefix=1)
|
||||
|
@ -500,9 +475,7 @@ def test_uvloop_cannot_never_called_with_create_server(caplog, monkeypatch):
|
|||
)
|
||||
|
||||
counter = Counter([(r[1], r[2]) for r in caplog.record_tuples])
|
||||
modified = sum(
|
||||
1 for app in apps if not isinstance(app.config.USE_UVLOOP, Default)
|
||||
)
|
||||
modified = sum(1 for app in apps if not isinstance(app.config.USE_UVLOOP, Default))
|
||||
|
||||
assert counter[(logging.WARNING, message)] == modified
|
||||
|
||||
|
@ -559,12 +532,8 @@ def test_no_workers(app: Sanic):
|
|||
{"auto_reload": True},
|
||||
),
|
||||
)
|
||||
def test_cannot_run_single_process_and_workers_or_auto_reload(
|
||||
app: Sanic, extra
|
||||
):
|
||||
message = (
|
||||
"Single process cannot be run with multiple workers or auto-reload"
|
||||
)
|
||||
def test_cannot_run_single_process_and_workers_or_auto_reload(app: Sanic, extra):
|
||||
message = "Single process cannot be run with multiple workers or auto-reload"
|
||||
with pytest.raises(RuntimeError, match=message):
|
||||
app.run(single_process=True, **extra)
|
||||
|
||||
|
|
|
@ -352,9 +352,7 @@ async def test_websocket_bytes_receive(send, receive, message_stack):
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_accept_with_no_subprotocols(
|
||||
send, receive, message_stack
|
||||
):
|
||||
async def test_websocket_accept_with_no_subprotocols(send, receive, message_stack):
|
||||
ws = WebSocketConnection(send, receive)
|
||||
await ws.accept()
|
||||
|
||||
|
|
|
@ -28,9 +28,7 @@ def test_bp_group_with_additional_route_params(app: Sanic):
|
|||
blueprint_1 = Blueprint("blueprint_1", url_prefix="/bp1")
|
||||
blueprint_2 = Blueprint("blueprint_2", url_prefix="/bp2")
|
||||
|
||||
@blueprint_1.route(
|
||||
"/request_path", methods=frozenset({"PUT", "POST"}), version=2
|
||||
)
|
||||
@blueprint_1.route("/request_path", methods=frozenset({"PUT", "POST"}), version=2)
|
||||
def blueprint_1_v2_method_with_put_and_post(request: Request):
|
||||
if request.method == "PUT":
|
||||
return text("PUT_OK")
|
||||
|
@ -46,9 +44,7 @@ def test_bp_group_with_additional_route_params(app: Sanic):
|
|||
elif request.method == "PATCH":
|
||||
return text(f"PATCH_{param}")
|
||||
|
||||
blueprint_group = Blueprint.group(
|
||||
blueprint_1, blueprint_2, url_prefix="/api"
|
||||
)
|
||||
blueprint_group = Blueprint.group(blueprint_1, blueprint_2, url_prefix="/api")
|
||||
|
||||
@blueprint_group.middleware("request")
|
||||
def authenticate_request(request: Request):
|
||||
|
@ -68,15 +64,11 @@ def test_bp_group_with_additional_route_params(app: Sanic):
|
|||
app.blueprint(blueprint_group)
|
||||
|
||||
header = {"authorization": " ".join(["Basic", AUTH])}
|
||||
_, response = app.test_client.put(
|
||||
"/v2/api/bp1/request_path", headers=header
|
||||
)
|
||||
_, response = app.test_client.put("/v2/api/bp1/request_path", headers=header)
|
||||
assert response.text == "PUT_OK"
|
||||
assert response.headers.get("x-test-middleware") == "value"
|
||||
|
||||
_, response = app.test_client.post(
|
||||
"/v2/api/bp1/request_path", headers=header
|
||||
)
|
||||
_, response = app.test_client.post("/v2/api/bp1/request_path", headers=header)
|
||||
assert response.text == "POST_OK"
|
||||
|
||||
_, response = app.test_client.delete("/api/bp2/route/bp2", headers=header)
|
||||
|
@ -109,9 +101,7 @@ def test_bp_group(app: Sanic):
|
|||
def blueprint_2_error(request: Request):
|
||||
raise ServerError("Error")
|
||||
|
||||
blueprint_group_1 = Blueprint.group(
|
||||
blueprint_1, blueprint_2, url_prefix="/bp"
|
||||
)
|
||||
blueprint_group_1 = Blueprint.group(blueprint_1, blueprint_2, url_prefix="/bp")
|
||||
|
||||
blueprint_3 = Blueprint("blueprint_3", url_prefix="/bp3")
|
||||
|
||||
|
@ -213,9 +203,7 @@ def test_bp_group_list_operations(app: Sanic):
|
|||
def blueprint_2_default_route(request):
|
||||
return text("BP2_OK")
|
||||
|
||||
blueprint_group_1 = Blueprint.group(
|
||||
blueprint_1, blueprint_2, url_prefix="/bp"
|
||||
)
|
||||
blueprint_group_1 = Blueprint.group(blueprint_1, blueprint_2, url_prefix="/bp")
|
||||
|
||||
blueprint_3 = Blueprint("blueprint_2", url_prefix="/bp3")
|
||||
|
||||
|
@ -247,9 +235,7 @@ def test_bp_group_as_list():
|
|||
def test_bp_group_as_nested_group():
|
||||
blueprint_1 = Blueprint("blueprint_1", url_prefix="/bp1")
|
||||
blueprint_2 = Blueprint("blueprint_2", url_prefix="/bp2")
|
||||
blueprint_group_1 = Blueprint.group(
|
||||
Blueprint.group(blueprint_1, blueprint_2)
|
||||
)
|
||||
blueprint_group_1 = Blueprint.group(Blueprint.group(blueprint_1, blueprint_2))
|
||||
assert len(blueprint_group_1) == 1
|
||||
|
||||
|
||||
|
@ -259,9 +245,7 @@ def test_blueprint_group_insert():
|
|||
)
|
||||
blueprint_2 = Blueprint("blueprint_2", url_prefix="/bp2")
|
||||
blueprint_3 = Blueprint("blueprint_3", url_prefix=None)
|
||||
group = BlueprintGroup(
|
||||
url_prefix="/test", version=1.3, strict_slashes=False
|
||||
)
|
||||
group = BlueprintGroup(url_prefix="/test", version=1.3, strict_slashes=False)
|
||||
group.insert(0, blueprint_1)
|
||||
group.insert(0, blueprint_2)
|
||||
group.insert(0, blueprint_3)
|
||||
|
@ -350,12 +334,8 @@ async def test_multiple_nested_bp_group():
|
|||
bp1.add_route(lambda _: ..., "/", name="route1")
|
||||
bp2.add_route(lambda _: ..., "/", name="route2")
|
||||
|
||||
group_a = Blueprint.group(
|
||||
bp1, bp2, url_prefix="/group-a", name_prefix="group-a"
|
||||
)
|
||||
group_b = Blueprint.group(
|
||||
bp1, bp2, url_prefix="/group-b", name_prefix="group-b"
|
||||
)
|
||||
group_a = Blueprint.group(bp1, bp2, url_prefix="/group-a", name_prefix="group-a")
|
||||
group_b = Blueprint.group(bp1, bp2, url_prefix="/group-b", name_prefix="group-b")
|
||||
|
||||
app = Sanic("PropTest")
|
||||
app.blueprint(group_a)
|
||||
|
|
|
@ -751,9 +751,7 @@ def test_bp_group_with_default_url_prefix(app: Sanic):
|
|||
def api_v1_info(request):
|
||||
return text("api_version: v1")
|
||||
|
||||
bp_api_v1_group = Blueprint.group(
|
||||
bp_api_v1, bp_resources_group, url_prefix="/v1"
|
||||
)
|
||||
bp_api_v1_group = Blueprint.group(bp_api_v1, bp_resources_group, url_prefix="/v1")
|
||||
bp_api_group = Blueprint.group(bp_api_v1_group, url_prefix="/api")
|
||||
app.blueprint(bp_api_group)
|
||||
|
||||
|
@ -794,9 +792,7 @@ def test_blueprint_middleware_with_args(app: Sanic):
|
|||
)
|
||||
assert response.text == "value"
|
||||
|
||||
_, response = app.test_client.get(
|
||||
"/wa", headers={"content-type": "plain/text"}
|
||||
)
|
||||
_, response = app.test_client.get("/wa", headers={"content-type": "plain/text"})
|
||||
assert response.json.get("test") == "value"
|
||||
|
||||
|
||||
|
@ -985,13 +981,9 @@ def test_blueprint_group_strict_slashes():
|
|||
app = Sanic(name="blueprint-group-test")
|
||||
bp1 = Blueprint(name="bp1", url_prefix=None, strict_slashes=False)
|
||||
|
||||
bp2 = Blueprint(
|
||||
name="bp2", version=3, url_prefix="/bp2", strict_slashes=None
|
||||
)
|
||||
bp2 = Blueprint(name="bp2", version=3, url_prefix="/bp2", strict_slashes=None)
|
||||
|
||||
bp3 = Blueprint(
|
||||
name="bp3", version=None, url_prefix="/bp3/", strict_slashes=None
|
||||
)
|
||||
bp3 = Blueprint(name="bp3", version=None, url_prefix="/bp3/", strict_slashes=None)
|
||||
|
||||
@bp1.get("/r1")
|
||||
async def bp1_r1(request):
|
||||
|
|
|
@ -133,9 +133,7 @@ def test_add_converter_multiple_times(caplog):
|
|||
def converter():
|
||||
...
|
||||
|
||||
message = (
|
||||
"Configuration value converter 'converter' has already been registered"
|
||||
)
|
||||
message = "Configuration value converter 'converter' has already been registered"
|
||||
config = Config()
|
||||
config.register_type(converter)
|
||||
with caplog.at_level(logging.WARNING):
|
||||
|
@ -306,14 +304,10 @@ async def test_config_access_log_passing_in_create_server(app: Sanic):
|
|||
async def _request(sanic, loop):
|
||||
app.stop()
|
||||
|
||||
await app.create_server(
|
||||
port=1341, access_log=False, return_asyncio_server=True
|
||||
)
|
||||
await app.create_server(port=1341, access_log=False, return_asyncio_server=True)
|
||||
assert app.config.ACCESS_LOG is False
|
||||
|
||||
await app.create_server(
|
||||
port=1342, access_log=True, return_asyncio_server=True
|
||||
)
|
||||
await app.create_server(port=1342, access_log=True, return_asyncio_server=True)
|
||||
assert app.config.ACCESS_LOG is True
|
||||
|
||||
|
||||
|
@ -334,9 +328,7 @@ def test_config_rewrite_keep_alive():
|
|||
|
||||
_test_setting_as_dict = {"TEST_SETTING_VALUE": 1}
|
||||
_test_setting_as_class = type("C", (), {"TEST_SETTING_VALUE": 1})
|
||||
_test_setting_as_module = str(
|
||||
Path(__file__).parent / "static/app_test_config.py"
|
||||
)
|
||||
_test_setting_as_module = str(Path(__file__).parent / "static/app_test_config.py")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
@ -42,9 +42,7 @@ async def test_cookies_asgi(app):
|
|||
response.cookies["right_back"] = "at you"
|
||||
return response
|
||||
|
||||
request, response = await app.asgi_client.get(
|
||||
"/", cookies={"test": "working!"}
|
||||
)
|
||||
request, response = await app.asgi_client.get("/", cookies={"test": "working!"})
|
||||
response_cookies = SimpleCookie()
|
||||
response_cookies.load(response.headers.get("set-cookie", {}))
|
||||
|
||||
|
@ -101,9 +99,7 @@ def test_cookie_options(app):
|
|||
response = text("OK")
|
||||
response.cookies["test"] = "at you"
|
||||
response.cookies["test"]["httponly"] = True
|
||||
response.cookies["test"]["expires"] = datetime.now() + timedelta(
|
||||
seconds=10
|
||||
)
|
||||
response.cookies["test"]["expires"] = datetime.now() + timedelta(seconds=10)
|
||||
return response
|
||||
|
||||
request, response = app.test_client.get("/")
|
||||
|
@ -179,17 +175,11 @@ def test_cookie_max_age(app, max_age):
|
|||
response.cookies["test"]["max-age"] = max_age
|
||||
return response
|
||||
|
||||
request, response = app.test_client.get(
|
||||
"/", cookies=cookies, raw_cookies=True
|
||||
)
|
||||
request, response = app.test_client.get("/", cookies=cookies, raw_cookies=True)
|
||||
assert response.status == 200
|
||||
|
||||
cookie = response.cookies.get("test")
|
||||
if (
|
||||
str(max_age).isdigit()
|
||||
and int(max_age) == float(max_age)
|
||||
and int(max_age) != 0
|
||||
):
|
||||
if str(max_age).isdigit() and int(max_age) == float(max_age) and int(max_age) != 0:
|
||||
cookie_expires = datetime.utcfromtimestamp(
|
||||
response.raw_cookies["test"].expires
|
||||
).replace(microsecond=0)
|
||||
|
@ -202,9 +192,8 @@ def test_cookie_max_age(app, max_age):
|
|||
)
|
||||
|
||||
assert cookie == "pass"
|
||||
assert (
|
||||
cookie_expires == expires
|
||||
or cookie_expires == expires + timedelta(seconds=-1)
|
||||
assert cookie_expires == expires or cookie_expires == expires + timedelta(
|
||||
seconds=-1
|
||||
)
|
||||
else:
|
||||
assert cookie is None
|
||||
|
@ -221,9 +210,7 @@ def test_cookie_bad_max_age(app, max_age):
|
|||
response.cookies["test"]["max-age"] = max_age
|
||||
return response
|
||||
|
||||
request, response = app.test_client.get(
|
||||
"/", cookies=cookies, raw_cookies=True
|
||||
)
|
||||
request, response = app.test_client.get("/", cookies=cookies, raw_cookies=True)
|
||||
assert response.status == 500
|
||||
|
||||
|
||||
|
@ -239,9 +226,7 @@ def test_cookie_expires(app: Sanic, expires: timedelta):
|
|||
response.cookies["test"]["expires"] = expires_time
|
||||
return response
|
||||
|
||||
request, response = app.test_client.get(
|
||||
"/", cookies=cookies, raw_cookies=True
|
||||
)
|
||||
request, response = app.test_client.get("/", cookies=cookies, raw_cookies=True)
|
||||
|
||||
cookie_expires = datetime.utcfromtimestamp(
|
||||
response.raw_cookies["test"].expires
|
||||
|
@ -399,17 +384,13 @@ def test_bad_cookie_prarms():
|
|||
ServerError,
|
||||
match="Cannot set host_prefix on a cookie unless path='/'",
|
||||
):
|
||||
jar.add_cookie(
|
||||
"foo", "bar", host_prefix=True, secure=True, path="/foo"
|
||||
)
|
||||
jar.add_cookie("foo", "bar", host_prefix=True, secure=True, path="/foo")
|
||||
|
||||
with pytest.raises(
|
||||
ServerError,
|
||||
match="Cannot set host_prefix on a cookie with a defined domain",
|
||||
):
|
||||
jar.add_cookie(
|
||||
"foo", "bar", host_prefix=True, secure=True, domain="foo.bar"
|
||||
)
|
||||
jar.add_cookie("foo", "bar", host_prefix=True, secure=True, domain="foo.bar")
|
||||
|
||||
with pytest.raises(
|
||||
ServerError,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user