Fix server exit hang on MacOS, cleanup, include directories in zips, natural sort.

This commit is contained in:
Leo Vasanko 2023-11-07 16:54:46 -08:00
parent 63f6008a0a
commit 37167a41a6
6 changed files with 37 additions and 33 deletions

View File

@ -55,11 +55,16 @@ async function sendChunk(file: File, start: number, end: number) {
} }
} }
async function uploadFileChangeHandler(event: Event) { async function uploadHandler(event: Event) {
const target = event.target as HTMLInputElement const target = event.target as HTMLInputElement
const chunkSize = 1 << 20 const chunkSize = 1 << 20
if (target && target.files && target.files.length > 0) { if (!target?.files?.length) {
const file = target.files[0] documentStore.error = 'No files selected'
return
}
for (const idx in target.files) {
const file = target.files[idx]
console.log('Uploading', file)
const numChunks = Math.ceil(file.size / chunkSize) const numChunks = Math.ceil(file.size / chunkSize)
const document = documentStore.pushUploadingDocuments(file.name) const document = documentStore.pushUploadingDocuments(file.name)
open('bottomRight') open('bottomRight')
@ -78,14 +83,14 @@ async function uploadFileChangeHandler(event: Event) {
<template> <template>
<input <input
ref="fileUploadButton" ref="fileUploadButton"
@change="uploadFileChangeHandler" @change="uploadHandler"
class="upload-input" class="upload-input"
type="file" type="file"
multiple multiple
/> />
<input <input
ref="folderUploadButton" ref="folderUploadButton"
@change="uploadFileChangeHandler" @change="uploadHandler"
class="upload-input" class="upload-input"
type="file" type="file"
webkitdirectory webkitdirectory

View File

@ -1,16 +1,22 @@
import asyncio import asyncio
import datetime
import mimetypes import mimetypes
from collections import deque
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from importlib.resources import files from importlib.resources import files
from pathlib import Path
from stat import S_IFDIR, S_IFREG
from urllib.parse import unquote from urllib.parse import unquote
from wsgiref.handlers import format_date_time from wsgiref.handlers import format_date_time
import brotli import brotli
import sanic.helpers import sanic.helpers
from blake3 import blake3 from blake3 import blake3
from natsort import natsorted, ns
from sanic import Blueprint, Sanic, empty, raw from sanic import Blueprint, Sanic, empty, raw
from sanic.exceptions import Forbidden, NotFound from sanic.exceptions import Forbidden, NotFound
from sanic.log import logging from sanic.log import logging
from stream_zip import ZIP_AUTO, stream_zip
from cista import auth, config, session, watching from cista import auth, config, session, watching
from cista.api import bp from cista.api import bp
@ -168,14 +174,6 @@ async def wwwroot(req, path=""):
return raw(data, headers=headers) return raw(data, headers=headers)
import datetime
from collections import deque
from pathlib import Path
from stat import S_IFREG
from stream_zip import ZIP_AUTO, stream_zip
@app.get("/zip/<keys>/<zipfile:ext=zip>") @app.get("/zip/<keys>/<zipfile:ext=zip>")
async def zip_download(req, keys, zipfile, ext): async def zip_download(req, keys, zipfile, ext):
"""Download a zip archive of the given keys""" """Download a zip archive of the given keys"""
@ -191,16 +189,12 @@ async def zip_download(req, keys, zipfile, ext):
if relpar or attr.key in wanted: if relpar or attr.key in wanted:
rel = [*relpar, name] if relpar else [name] rel = [*relpar, name] if relpar else [name]
wanted.discard(attr.key) wanted.discard(attr.key)
if isinstance(attr, DirEntry): isdir = isinstance(attr, DirEntry)
if isdir:
q.append((loc, rel, attr.dir)) q.append((loc, rel, attr.dir))
elif rel: if rel:
files.append( files.append(
( ("/".join(rel), Path(watching.rootpath.joinpath(*loc)))
"/".join(rel),
Path(watching.rootpath.joinpath(*loc)),
attr.mtime,
attr.size,
)
) )
if not files: if not files:
@ -211,14 +205,17 @@ async def zip_download(req, keys, zipfile, ext):
if wanted: if wanted:
raise NotFound("Files not found", context={"missing": wanted}) raise NotFound("Files not found", context={"missing": wanted})
for rel, p, mtime, size in files: files = natsorted(files, key=lambda f: f[0], alg=ns.IGNORECASE)
if not p.is_file():
raise NotFound(f"File not found {rel}")
def local_files(files): def local_files(files):
for rel, p, mtime, size in files: for rel, p in files:
modified = datetime.datetime.fromtimestamp(mtime, datetime.UTC) s = p.stat()
yield rel, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p) size = s.st_size
modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC)
if p.is_dir():
yield rel, modified, S_IFDIR | 0o755, ZIP_AUTO(size), b""
else:
yield rel, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p)
def contents(name): def contents(name):
with name.open("rb") as f: with name.open("rb") as f:

View File

@ -71,7 +71,7 @@ def verify(request, *, privileged=False):
raise Forbidden("Access Forbidden: Only for privileged users") raise Forbidden("Access Forbidden: Only for privileged users")
elif config.config.public or request.ctx.user: elif config.config.public or request.ctx.user:
return return
raise Unauthorized("Login required", "cookie", context={"redirect": "/login"}) raise Unauthorized("Login required", "cookie")
bp = Blueprint("auth") bp = Blueprint("auth")

View File

@ -30,7 +30,10 @@ def run(*, dev=False):
reload_dir={confdir, wwwroot}, reload_dir={confdir, wwwroot},
access_log=True, access_log=True,
) # type: ignore ) # type: ignore
Sanic.serve() if dev:
Sanic.serve()
else:
Sanic.serve_single()
def check_cert(certdir, domain): def check_cert(certdir, domain):

View File

@ -40,7 +40,6 @@ def watcher_thread(loop):
with tree_lock: with tree_lock:
# Initialize the tree from filesystem # Initialize the tree from filesystem
tree[""] = walk(rootpath) tree[""] = walk(rootpath)
print(" ".join(tree[""].dir.keys()))
msg = format_tree() msg = format_tree()
if msg != old: if msg != old:
asyncio.run_coroutine_threadsafe(broadcast(msg), loop) asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
@ -78,13 +77,12 @@ def watcher_thread(loop):
def watcher_thread_poll(loop): def watcher_thread_poll(loop):
global disk_usage, rootpath global disk_usage, rootpath
while True: while not quit:
rootpath = config.config.path rootpath = config.config.path
old = format_tree() if tree[""] else None old = format_tree() if tree[""] else None
with tree_lock: with tree_lock:
# Initialize the tree from filesystem # Initialize the tree from filesystem
tree[""] = walk(rootpath) tree[""] = walk(rootpath)
print(" ".join(tree[""].dir.keys()))
msg = format_tree() msg = format_tree()
if msg != old: if msg != old:
asyncio.run_coroutine_threadsafe(broadcast(msg), loop) asyncio.run_coroutine_threadsafe(broadcast(msg), loop)

View File

@ -20,11 +20,12 @@ dependencies = [
"docopt", "docopt",
"inotify", "inotify",
"msgspec", "msgspec",
"natsort",
"pathvalidate", "pathvalidate",
"pyjwt", "pyjwt",
"sanic", "sanic",
"tomli_w",
"stream-zip", "stream-zip",
"tomli_w",
] ]
[project.urls] [project.urls]