Implemented HTTP caching and updates on wwwroot, much faster page loads.
This commit is contained in:
parent
ba36eaec1b
commit
bdc0bbd44f
71
cista/app.py
71
cista/app.py
|
@ -2,15 +2,21 @@ import mimetypes
|
||||||
from importlib.resources import files
|
from importlib.resources import files
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
import sanic.helpers
|
||||||
import asyncio
|
import asyncio
|
||||||
import brotli
|
import brotli
|
||||||
from sanic import Blueprint, Sanic, raw
|
from blake3 import blake3
|
||||||
|
from sanic import Blueprint, Sanic, raw, empty
|
||||||
from sanic.exceptions import Forbidden, NotFound
|
from sanic.exceptions import Forbidden, NotFound
|
||||||
|
from wsgiref.handlers import format_date_time
|
||||||
|
|
||||||
from cista import auth, config, session, watching
|
from cista import auth, config, session, watching
|
||||||
from cista.api import bp
|
from cista.api import bp
|
||||||
from cista.util.apphelpers import handle_sanic_exception
|
from cista.util.apphelpers import handle_sanic_exception
|
||||||
|
|
||||||
|
# Workaround until Sanic PR #2824 is merged
|
||||||
|
sanic.helpers._ENTITY_HEADERS = frozenset()
|
||||||
|
|
||||||
app = Sanic("cista", strict_slashes=True)
|
app = Sanic("cista", strict_slashes=True)
|
||||||
app.blueprint(auth.bp)
|
app.blueprint(auth.bp)
|
||||||
app.blueprint(bp)
|
app.blueprint(bp)
|
||||||
|
@ -63,8 +69,12 @@ www = {}
|
||||||
|
|
||||||
|
|
||||||
@app.before_server_start
|
@app.before_server_start
|
||||||
def load_wwwroot(app):
|
async def load_wwwroot(*_ignored):
|
||||||
global www
|
global www
|
||||||
|
www = await asyncio.get_event_loop().run_in_executor(None, _load_wwwroot, www)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_wwwroot(www):
|
||||||
wwwnew = {}
|
wwwnew = {}
|
||||||
base = files("cista") / "wwwroot"
|
base = files("cista") / "wwwroot"
|
||||||
paths = ["."]
|
paths = ["."]
|
||||||
|
@ -77,36 +87,71 @@ def load_wwwroot(app):
|
||||||
continue
|
continue
|
||||||
name = p.relative_to(base).as_posix()
|
name = p.relative_to(base).as_posix()
|
||||||
mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
|
mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
|
||||||
|
mtime = p.stat().st_mtime
|
||||||
data = p.read_bytes()
|
data = p.read_bytes()
|
||||||
|
etag = blake3(data).hexdigest(length=8)
|
||||||
|
if name == "index.html":
|
||||||
|
name = ""
|
||||||
# Use old data if not changed
|
# Use old data if not changed
|
||||||
if name in www and www[name][0] == data:
|
if name in www and www[name][2]["etag"] == etag:
|
||||||
wwwnew[name] = www[name]
|
wwwnew[name] = www[name]
|
||||||
continue
|
continue
|
||||||
|
cached = name.startswith("assets/")
|
||||||
|
headers = {
|
||||||
|
"etag": etag,
|
||||||
|
"last-modified": format_date_time(mtime),
|
||||||
|
"cache-control": "max-age=31536000, immutable"
|
||||||
|
if cached
|
||||||
|
else "no-cache",
|
||||||
|
"content-type": mime,
|
||||||
|
}
|
||||||
# Precompress with Brotli
|
# Precompress with Brotli
|
||||||
br = brotli.compress(data)
|
br = brotli.compress(data)
|
||||||
if len(br) >= len(data):
|
if len(br) >= len(data):
|
||||||
br = False
|
br = False
|
||||||
wwwnew[name] = data, br, mime
|
wwwnew[name] = data, br, headers
|
||||||
www = wwwnew
|
return wwwnew
|
||||||
|
|
||||||
|
|
||||||
@app.add_task
|
@app.add_task
|
||||||
async def refresh_wwwroot():
|
async def refresh_wwwroot():
|
||||||
while app.debug:
|
while True:
|
||||||
|
try:
|
||||||
|
wwwold = www
|
||||||
|
await load_wwwroot()
|
||||||
|
changes = ""
|
||||||
|
for name in sorted(www):
|
||||||
|
attr = www[name]
|
||||||
|
if wwwold.get(name) == attr:
|
||||||
|
continue
|
||||||
|
headers = attr[2]
|
||||||
|
changes += f"{headers['last-modified']} {headers['etag']} /{name}\n"
|
||||||
|
for name in sorted(set(wwwold) - set(www)):
|
||||||
|
changes += f"Deleted /{name}\n"
|
||||||
|
if changes:
|
||||||
|
print(f"Updated wwwroot:\n{changes}", end="", flush=True)
|
||||||
|
except Exception as e:
|
||||||
|
print("Error loading wwwroot", e)
|
||||||
|
if not app.debug:
|
||||||
|
return
|
||||||
await asyncio.sleep(0.5)
|
await asyncio.sleep(0.5)
|
||||||
load_wwwroot(app)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/<path:path>", static=True)
|
@app.route("/<path:path>", methods=["GET", "HEAD"])
|
||||||
async def wwwroot(req, path=""):
|
async def wwwroot(req, path=""):
|
||||||
"""Frontend files only"""
|
"""Frontend files only"""
|
||||||
name = unquote(path) or "index.html"
|
name = unquote(path)
|
||||||
if name not in www:
|
if name not in www:
|
||||||
raise NotFound(f"File not found: /{path}", extra={"name": name})
|
raise NotFound(f"File not found: /{path}", extra={"name": name})
|
||||||
data, br, mime = www[name]
|
data, br, headers = www[name]
|
||||||
headers = {}
|
if req.headers.if_none_match == headers["etag"]:
|
||||||
|
# The client has it cached, respond 304 Not Modified
|
||||||
|
return empty(304, headers=headers)
|
||||||
# Brotli compressed?
|
# Brotli compressed?
|
||||||
if br and "br" in req.headers.accept_encoding.split(", "):
|
if br and "br" in req.headers.accept_encoding.split(", "):
|
||||||
headers["content-encoding"] = "br"
|
headers = {
|
||||||
|
**headers,
|
||||||
|
"content-encoding": "br",
|
||||||
|
}
|
||||||
data = br
|
data = br
|
||||||
return raw(data, content_type=mime, headers=headers)
|
return raw(data, headers=headers)
|
||||||
|
|
|
@ -15,6 +15,7 @@ classifiers = [
|
||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"argon2-cffi",
|
"argon2-cffi",
|
||||||
|
"blake3",
|
||||||
"brotli",
|
"brotli",
|
||||||
"docopt",
|
"docopt",
|
||||||
"inotify",
|
"inotify",
|
||||||
|
|
Loading…
Reference in New Issue
Block a user