More efficient flat file list format and various UX improvements (#3)

This is a major upgrade with assorted things included.

- Navigation flows improved, search appears in URL history, cleared when navigating to another folder
- More efficient file list format for faster loads
- Efficient updates, never re-send full root another time (except at connection)
- Large number of watching and filelist updates (inotify issues remain)
- File size coloring
- Fixed ZIP generation random glitches (thread race condition)
- Code refactoring, cleanup, typing fixes
- More tests

Reviewed-on: #3
This commit is contained in:
Leo Vasanko 2023-11-12 23:20:40 +00:00
parent fb03fa5430
commit 9854dd01cc
101 changed files with 728 additions and 527 deletions

View File

@ -1,6 +1,7 @@
# Web File Storage # Web File Storage
Run directly from repository with Hatch (or use pip install as usual): Run directly from repository with Hatch (or use pip install as usual):
```sh ```sh
hatch run cista -l :3000 /path/to/files hatch run cista -l :3000 /path/to/files
``` ```
@ -8,16 +9,17 @@ hatch run cista -l :3000 /path/to/files
Settings incl. these arguments are stored to config file on the first startup and later `hatch run cista` is sufficient. If the `cista` script is missing, consider `pip install -e .` (within `hatch shell`) or some other trickery (known issue with installs made prior to adding the startup script). Settings incl. these arguments are stored to config file on the first startup and later `hatch run cista` is sufficient. If the `cista` script is missing, consider `pip install -e .` (within `hatch shell`) or some other trickery (known issue with installs made prior to adding the startup script).
Create your user account: Create your user account:
```sh ```sh
hatch run cista --user admin --privileged hatch run cista --user admin --privileged
``` ```
## Build frontend ## Build frontend
Prebuilt frontend is provided in repository but for any changes it will need to be manually rebuilt: Frontend needs to be built before using and after any frontend changes:
```sh ```sh
cd cista-front cd frontend
npm install npm install
npm run build npm run build
``` ```

View File

@ -1,52 +0,0 @@
<template>
<object
v-if="props.type === 'pdf'"
:data="dataURL"
type="application/pdf"
width="100%"
height="100%"
></object>
<a-image
v-else-if="props.type === 'image'"
width="50%"
:src="dataURL"
@click="() => setVisible(true)"
:previewMask="false"
:preview="{
visibleImg,
onVisibleChange: setVisible
}"
/>
<!-- Unknown case -->
<h1 v-else>Unsupported file type</h1>
</template>
<script setup lang="ts">
import { watchEffect, ref } from 'vue'
import Router from '@/router/index'
import { url_document_get } from '@/repositories/Document'
const dataURL = ref('')
watchEffect(() => {
dataURL.value = new URL(
url_document_get + Router.currentRoute.value.path,
location.origin
).toString()
})
const emit = defineEmits({
visibleImg(value: boolean) {
return value
}
})
function setVisible(value: boolean) {
emit('visibleImg', value)
}
const props = defineProps<{
type?: string
visibleImg: boolean
}>()
</script>
<style></style>

View File

@ -1,27 +0,0 @@
<template>
<template v-for="upload in documentStore.uploadingDocuments" :key="upload.key">
<span>{{ upload.name }}</span>
<div class="progress-container">
<a-progress :percent="upload.progress" />
<CloseCircleOutlined class="close-button" @click="dismissUpload(upload.key)" />
</div>
</template>
</template>
<script setup lang="ts">
import { useDocumentStore } from '@/stores/documents'
const documentStore = useDocumentStore()
function dismissUpload(key: number) {
documentStore.deleteUploadingDocument(key)
}
</script>
<style scoped>
.progress-container {
display: flex;
align-items: center;
}
.close-button:hover {
color: #b81414;
}
</style>

View File

@ -104,11 +104,11 @@ async def watch(req, ws):
) )
uuid = token_bytes(16) uuid = token_bytes(16)
try: try:
with watching.tree_lock: with watching.state.lock:
q = watching.pubsub[uuid] = asyncio.Queue() q = watching.pubsub[uuid] = asyncio.Queue()
# Init with disk usage and full tree # Init with disk usage and full tree
await ws.send(watching.format_du()) await ws.send(watching.format_space(watching.state.space))
await ws.send(watching.format_tree()) await ws.send(watching.format_root(watching.state.root))
# Send updates # Send updates
while True: while True:
await ws.send(await q.get()) await ws.send(await q.get())

View File

@ -1,10 +1,8 @@
import asyncio import asyncio
import datetime import datetime
import mimetypes import mimetypes
from collections import deque
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from importlib.resources import files from pathlib import Path, PurePath, PurePosixPath
from pathlib import Path
from stat import S_IFDIR, S_IFREG from stat import S_IFDIR, S_IFREG
from urllib.parse import unquote from urllib.parse import unquote
from wsgiref.handlers import format_date_time from wsgiref.handlers import format_date_time
@ -12,15 +10,13 @@ from wsgiref.handlers import format_date_time
import brotli import brotli
import sanic.helpers import sanic.helpers
from blake3 import blake3 from blake3 import blake3
from natsort import natsorted, ns
from sanic import Blueprint, Sanic, empty, raw from sanic import Blueprint, Sanic, empty, raw
from sanic.exceptions import Forbidden, NotFound from sanic.exceptions import Forbidden, NotFound, ServerError
from sanic.log import logging from sanic.log import logging
from stream_zip import ZIP_AUTO, stream_zip from stream_zip import ZIP_AUTO, stream_zip
from cista import auth, config, session, watching from cista import auth, config, session, watching
from cista.api import bp from cista.api import bp
from cista.protocol import DirEntry
from cista.util.apphelpers import handle_sanic_exception from cista.util.apphelpers import handle_sanic_exception
# Workaround until Sanic PR #2824 is merged # Workaround until Sanic PR #2824 is merged
@ -36,7 +32,9 @@ app.exception(Exception)(handle_sanic_exception)
async def main_start(app, loop): async def main_start(app, loop):
config.load_config() config.load_config()
await watching.start(app, loop) await watching.start(app, loop)
app.ctx.threadexec = ThreadPoolExecutor(max_workers=8) app.ctx.threadexec = ThreadPoolExecutor(
max_workers=8, thread_name_prefix="cista-ioworker"
)
@app.after_server_stop @app.after_server_stop
@ -49,8 +47,8 @@ async def main_stop(app, loop):
async def use_session(req): async def use_session(req):
req.ctx.session = session.get(req) req.ctx.session = session.get(req)
try: try:
req.ctx.username = req.ctx.session["username"] req.ctx.username = req.ctx.session["username"] # type: ignore
req.ctx.user = config.config.users[req.ctx.session["username"]] # type: ignore req.ctx.user = config.config.users[req.ctx.username]
except (AttributeError, KeyError, TypeError): except (AttributeError, KeyError, TypeError):
req.ctx.username = None req.ctx.username = None
req.ctx.user = None req.ctx.user = None
@ -81,22 +79,16 @@ def http_fileserver(app, _):
www = {} www = {}
@app.before_server_start
async def load_wwwroot(*_ignored):
global www
www = await asyncio.get_event_loop().run_in_executor(None, _load_wwwroot, www)
def _load_wwwroot(www): def _load_wwwroot(www):
wwwnew = {} wwwnew = {}
base = files("cista") / "wwwroot" base = Path(__file__).with_name("wwwroot")
paths = ["."] paths = [PurePath()]
while paths: while paths:
path = paths.pop(0) path = paths.pop(0)
current = base / path current = base / path
for p in current.iterdir(): for p in current.iterdir():
if p.is_dir(): if p.is_dir():
paths.append(current / p.parts[-1]) paths.append(p.relative_to(base))
continue continue
name = p.relative_to(base).as_posix() name = p.relative_to(base).as_posix()
mime = mimetypes.guess_type(name)[0] or "application/octet-stream" mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
@ -127,15 +119,35 @@ def _load_wwwroot(www):
if len(br) >= len(data): if len(br) >= len(data):
br = False br = False
wwwnew[name] = data, br, headers wwwnew[name] = data, br, headers
if not wwwnew:
raise ServerError(
"Web frontend missing. Did you forget npm run build?",
extra={"wwwroot": str(base)},
quiet=True,
)
return wwwnew return wwwnew
@app.add_task @app.before_server_start
async def start(app):
await load_wwwroot(app)
if app.debug:
app.add_task(refresh_wwwroot())
async def load_wwwroot(app):
global www
www = await asyncio.get_event_loop().run_in_executor(
app.ctx.threadexec, _load_wwwroot, www
)
async def refresh_wwwroot(): async def refresh_wwwroot():
while True: while True:
await asyncio.sleep(0.5)
try: try:
wwwold = www wwwold = www
await load_wwwroot() await load_wwwroot(app)
changes = "" changes = ""
for name in sorted(www): for name in sorted(www):
attr = www[name] attr = www[name]
@ -151,7 +163,6 @@ async def refresh_wwwroot():
print("Error loading wwwroot", e) print("Error loading wwwroot", e)
if not app.debug: if not app.debug:
return return
await asyncio.sleep(0.5)
@app.route("/<path:path>", methods=["GET", "HEAD"]) @app.route("/<path:path>", methods=["GET", "HEAD"])
@ -166,66 +177,70 @@ async def wwwroot(req, path=""):
return empty(304, headers=headers) return empty(304, headers=headers)
# Brotli compressed? # Brotli compressed?
if br and "br" in req.headers.accept_encoding.split(", "): if br and "br" in req.headers.accept_encoding.split(", "):
headers = { headers = {**headers, "content-encoding": "br"}
**headers,
"content-encoding": "br",
}
data = br data = br
return raw(data, headers=headers) return raw(data, headers=headers)
def get_files(wanted: set) -> list[tuple[PurePosixPath, Path]]:
loc = PurePosixPath()
idx = 0
ret = []
level: int | None = None
parent: PurePosixPath | None = None
with watching.state.lock:
root = watching.state.root
while idx < len(root):
f = root[idx]
loc = PurePosixPath(*loc.parts[: f.level - 1]) / f.name
if parent is not None and f.level <= level:
level = parent = None
if f.key in wanted:
level, parent = f.level, loc.parent
if parent is not None:
wanted.discard(f.key)
ret.append((loc.relative_to(parent), watching.rootpath / loc))
idx += 1
return ret
@app.get("/zip/<keys>/<zipfile:ext=zip>") @app.get("/zip/<keys>/<zipfile:ext=zip>")
async def zip_download(req, keys, zipfile, ext): async def zip_download(req, keys, zipfile, ext):
"""Download a zip archive of the given keys""" """Download a zip archive of the given keys"""
wanted = set(keys.split("+")) wanted = set(keys.split("+"))
with watching.tree_lock: files = get_files(wanted)
q = deque([([], None, watching.tree[""].dir)])
files = []
while q:
locpar, relpar, d = q.pop()
for name, attr in d.items():
loc = [*locpar, name]
rel = None
if relpar or attr.key in wanted:
rel = [*relpar, name] if relpar else [name]
wanted.discard(attr.key)
isdir = isinstance(attr, DirEntry)
if isdir:
q.append((loc, rel, attr.dir))
if rel:
files.append(
("/".join(rel), Path(watching.rootpath.joinpath(*loc)))
)
if not files: if not files:
raise NotFound( raise NotFound(
"No files found", "No files found",
context={"keys": keys, "zipfile": zipfile, "wanted": wanted}, context={"keys": keys, "zipfile": f"{zipfile}.{ext}", "wanted": wanted},
) )
if wanted: if wanted:
raise NotFound("Files not found", context={"missing": wanted}) raise NotFound("Files not found", context={"missing": wanted})
files = natsorted(files, key=lambda f: f[0], alg=ns.IGNORECASE)
def local_files(files): def local_files(files):
for rel, p in files: for rel, p in files:
s = p.stat() s = p.stat()
size = s.st_size size = s.st_size
modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC) modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC)
name = rel.as_posix()
if p.is_dir(): if p.is_dir():
yield rel, modified, S_IFDIR | 0o755, ZIP_AUTO(size), b"" yield f"{name}/", modified, S_IFDIR | 0o755, ZIP_AUTO(size), iter(b"")
else: else:
yield rel, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p) yield name, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p, size)
def contents(name): def contents(name, size):
with name.open("rb") as f: with name.open("rb") as f:
while chunk := f.read(65536): while size > 0 and (chunk := f.read(min(size, 1 << 20))):
size -= len(chunk)
yield chunk yield chunk
assert size == 0
def worker(): def worker():
try: try:
for chunk in stream_zip(local_files(files)): for chunk in stream_zip(local_files(files)):
asyncio.run_coroutine_threadsafe(queue.put(chunk), loop) asyncio.run_coroutine_threadsafe(queue.put(chunk), loop).result()
except Exception: except Exception:
logging.exception("Error streaming ZIP") logging.exception("Error streaming ZIP")
raise raise
@ -238,7 +253,10 @@ async def zip_download(req, keys, zipfile, ext):
thread = loop.run_in_executor(app.ctx.threadexec, worker) thread = loop.run_in_executor(app.ctx.threadexec, worker)
# Stream the response # Stream the response
res = await req.respond(content_type="application/zip") res = await req.respond(
content_type="application/zip",
headers={"cache-control": "no-store"},
)
while chunk := await queue.get(): while chunk := await queue.get():
await res.send(chunk) await res.send(chunk)

View File

@ -68,10 +68,10 @@ def verify(request, *, privileged=False):
if request.ctx.user: if request.ctx.user:
if request.ctx.user.privileged: if request.ctx.user.privileged:
return return
raise Forbidden("Access Forbidden: Only for privileged users") raise Forbidden("Access Forbidden: Only for privileged users", quiet=True)
elif config.config.public or request.ctx.user: elif config.config.public or request.ctx.user:
return return
raise Unauthorized("Login required", "cookie") raise Unauthorized("Login required", "cookie", quiet=True)
bp = Blueprint("auth") bp = Blueprint("auth")

View File

@ -112,47 +112,43 @@ class ErrorMsg(msgspec.Struct):
## Directory listings ## Directory listings
class FileEntry(msgspec.Struct): class FileEntry(msgspec.Struct, array_like=True):
key: str level: int
size: int
mtime: int
class DirEntry(msgspec.Struct):
key: str
size: int
mtime: int
dir: DirList
def __getitem__(self, name):
return self.dir[name]
def __setitem__(self, name, value):
self.dir[name] = value
def __contains__(self, name):
return name in self.dir
def __delitem__(self, name):
del self.dir[name]
@property
def props(self):
return {k: v for k, v in self.__struct_fields__ if k != "dir"}
DirList = dict[str, FileEntry | DirEntry]
class UpdateEntry(msgspec.Struct, omit_defaults=True):
"""Updates the named entry in the tree. Fields that are set replace old values. A list of entries recurses directories."""
name: str name: str
key: str key: str
deleted: bool = False mtime: int
size: int | None = None size: int
mtime: int | None = None isfile: int
dir: DirList | None = None
def __repr__(self):
return self.key or "FileEntry()"
class Update(msgspec.Struct, array_like=True):
...
class UpdKeep(Update, tag="k"):
count: int
class UpdDel(Update, tag="d"):
count: int
class UpdIns(Update, tag="i"):
items: list[FileEntry]
class UpdateMessage(msgspec.Struct):
update: list[UpdKeep | UpdDel | UpdIns]
class Space(msgspec.Struct):
disk: int
free: int
usage: int
storage: int
def make_dir_data(root): def make_dir_data(root):

View File

@ -1,20 +1,137 @@
import asyncio import asyncio
import shutil import shutil
import stat
import sys import sys
import threading import threading
import time import time
from os import stat_result
from pathlib import Path, PurePosixPath from pathlib import Path, PurePosixPath
import msgspec import msgspec
from natsort import humansorted, natsort_keygen, ns
from sanic.log import logging from sanic.log import logging
from cista import config from cista import config
from cista.fileio import fuid from cista.fileio import fuid
from cista.protocol import DirEntry, FileEntry, UpdateEntry from cista.protocol import FileEntry, Space, UpdDel, UpdIns, UpdKeep
pubsub = {} pubsub = {}
tree = {"": None} sortkey = natsort_keygen(alg=ns.LOCALE)
tree_lock = threading.Lock()
class State:
def __init__(self):
self.lock = threading.RLock()
self._space = Space(0, 0, 0, 0)
self._listing: list[FileEntry] = []
@property
def space(self):
with self.lock:
return self._space
@space.setter
def space(self, space):
with self.lock:
self._space = space
@property
def root(self) -> list[FileEntry]:
with self.lock:
return self._listing[:]
@root.setter
def root(self, listing: list[FileEntry]):
with self.lock:
self._listing = listing
def _slice(self, idx: PurePosixPath | tuple[PurePosixPath, int]):
relpath, relfile = idx if isinstance(idx, tuple) else (idx, 0)
begin, end = 0, len(self._listing)
level = 0
isfile = 0
# Special case for root
if not relpath.parts:
return slice(begin, end)
begin += 1
for part in relpath.parts:
level += 1
found = False
while begin < end:
entry = self._listing[begin]
if entry.level < level:
break
if entry.level == level:
if entry.name == part:
found = True
if level == len(relpath.parts):
isfile = relfile
else:
begin += 1
break
cmp = entry.isfile - isfile or sortkey(entry.name) > sortkey(part)
if cmp > 0:
break
begin += 1
if not found:
return slice(begin, begin)
# Found the starting point, now find the end of the slice
for end in range(begin + 1, len(self._listing) + 1):
if end == len(self._listing) or self._listing[end].level <= level:
break
return slice(begin, end)
def __getitem__(self, index: PurePosixPath | tuple[PurePosixPath, int]):
with self.lock:
return self._listing[self._slice(index)]
def __setitem__(
self, index: tuple[PurePosixPath, int], value: list[FileEntry]
) -> None:
rel, isfile = index
with self.lock:
if rel.parts:
parent = self._slice(rel.parent)
if parent.start == parent.stop:
raise ValueError(
f"Parent folder {rel.as_posix()} is missing for {rel.name}"
)
self._listing[self._slice(index)] = value
def __delitem__(self, relpath: PurePosixPath):
with self.lock:
del self._listing[self._slice(relpath)]
def _index(self, rel: PurePosixPath):
idx = 0
ret = []
def _dir(self, idx: int):
level = self._listing[idx].level + 1
end = len(self._listing)
idx += 1
ret = []
while idx < end and (r := self._listing[idx]).level >= level:
if r.level == level:
ret.append(idx)
return ret, idx
def update(self, rel: PurePosixPath, value: FileEntry):
begin = 0
parents = []
while self._listing[begin].level < len(rel.parts):
parents.append(begin)
state = State()
rootpath: Path = None # type: ignore rootpath: Path = None # type: ignore
quit = False quit = False
modified_flags = ( modified_flags = (
@ -26,23 +143,22 @@ modified_flags = (
"IN_MOVED_FROM", "IN_MOVED_FROM",
"IN_MOVED_TO", "IN_MOVED_TO",
) )
disk_usage = None
def watcher_thread(loop): def watcher_thread(loop):
global disk_usage, rootpath global rootpath
import inotify.adapters import inotify.adapters
while True: while True:
rootpath = config.config.path rootpath = config.config.path
i = inotify.adapters.InotifyTree(rootpath.as_posix()) i = inotify.adapters.InotifyTree(rootpath.as_posix())
old = format_tree() if tree[""] else None
with tree_lock:
# Initialize the tree from filesystem # Initialize the tree from filesystem
tree[""] = walk(rootpath) new = walk()
msg = format_tree() with state.lock:
if msg != old: old = state.root
asyncio.run_coroutine_threadsafe(broadcast(msg), loop) if old != new:
state.root = new
broadcast(format_update(old, new), loop)
# The watching is not entirely reliable, so do a full refresh every minute # The watching is not entirely reliable, so do a full refresh every minute
refreshdl = time.monotonic() + 60.0 refreshdl = time.monotonic() + 60.0
@ -52,9 +168,10 @@ def watcher_thread(loop):
return return
# Disk usage update # Disk usage update
du = shutil.disk_usage(rootpath) du = shutil.disk_usage(rootpath)
if du != disk_usage: space = Space(*du, storage=state.root[0].size)
disk_usage = du if space != state.space:
asyncio.run_coroutine_threadsafe(broadcast(format_du()), loop) state.space = space
broadcast(format_space(space), loop)
break break
# Do a full refresh? # Do a full refresh?
if time.monotonic() > refreshdl: if time.monotonic() > refreshdl:
@ -75,144 +192,141 @@ def watcher_thread(loop):
def watcher_thread_poll(loop): def watcher_thread_poll(loop):
global disk_usage, rootpath global rootpath
while not quit: while not quit:
rootpath = config.config.path rootpath = config.config.path
old = format_tree() if tree[""] else None new = walk()
with tree_lock: with state.lock:
# Initialize the tree from filesystem old = state.root
tree[""] = walk(rootpath) if old != new:
msg = format_tree() state.root = new
if msg != old: broadcast(format_update(old, new), loop)
asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
# Disk usage update # Disk usage update
du = shutil.disk_usage(rootpath) du = shutil.disk_usage(rootpath)
if du != disk_usage: space = Space(*du, storage=state.root[0].size)
disk_usage = du if space != state.space:
asyncio.run_coroutine_threadsafe(broadcast(format_du()), loop) state.space = space
broadcast(format_space(space), loop)
time.sleep(1.0) time.sleep(2.0)
def format_du(): def walk(rel=PurePosixPath()) -> list[FileEntry]: # noqa: B008
return msgspec.json.encode( path = rootpath / rel
{
"space": {
"disk": disk_usage.total,
"used": disk_usage.used,
"free": disk_usage.free,
"storage": tree[""].size,
},
},
).decode()
def format_tree():
root = tree[""]
return msgspec.json.encode({"root": root}).decode()
def walk(path: Path) -> DirEntry | FileEntry | None:
try: try:
s = path.stat() st = path.stat()
key = fuid(s) except OSError:
assert key, repr(key) return []
mtime = int(s.st_mtime) return _walk(rel, int(not stat.S_ISDIR(st.st_mode)), st)
if path.is_file():
return FileEntry(key, s.st_size, mtime)
tree = {
p.name: v def _walk(rel: PurePosixPath, isfile: int, st: stat_result) -> list[FileEntry]:
for p in path.iterdir() entry = FileEntry(
if not p.name.startswith(".") level=len(rel.parts),
if (v := walk(p)) is not None name=rel.name,
} key=fuid(st),
if tree: mtime=int(st.st_mtime),
size = sum(v.size for v in tree.values()) size=st.st_size if isfile else 0,
mtime = max(mtime, *(v.mtime for v in tree.values())) isfile=isfile,
else: )
size = 0 if isfile:
return DirEntry(key, size, mtime, tree) return [entry]
ret = [entry]
path = rootpath / rel
try:
li = []
for f in path.iterdir():
if f.name.startswith("."):
continue # No dotfiles
s = f.stat()
li.append((int(not stat.S_ISDIR(s.st_mode)), f.name, s))
for [isfile, name, s] in humansorted(li):
subtree = _walk(rel / name, isfile, s)
child = subtree[0]
entry.mtime = max(entry.mtime, child.mtime)
entry.size += child.size
ret.extend(subtree)
except FileNotFoundError: except FileNotFoundError:
return None pass # Things may be rapidly in motion
except OSError as e: except OSError as e:
print("OS error walking path", path, e) print("OS error walking path", path, e)
return None return ret
def update(relpath: PurePosixPath, loop): def update(relpath: PurePosixPath, loop):
"""Called by inotify updates, check the filesystem and broadcast any changes.""" """Called by inotify updates, check the filesystem and broadcast any changes."""
if rootpath is None or relpath is None: if rootpath is None or relpath is None:
print("ERROR", rootpath, relpath) print("ERROR", rootpath, relpath)
new = walk(rootpath / relpath) new = walk(relpath)
with tree_lock: with state.lock:
update = update_internal(relpath, new) old = state[relpath]
if not update:
return # No changes
msg = msgspec.json.encode({"update": update}).decode()
asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
def update_internal(
relpath: PurePosixPath,
new: DirEntry | FileEntry | None,
) -> list[UpdateEntry]:
path = "", *relpath.parts
old = tree
elems = []
for name in path:
if name not in old:
# File or folder created
old = None
elems.append((name, None))
if len(elems) < len(path):
# We got a notify for an item whose parent is not in tree
print("Tree out of sync DEBUG", relpath)
print(elems)
print("Current tree:")
print(tree[""])
print("Walking all:")
print(walk(rootpath))
raise ValueError("Tree out of sync")
break
old = old[name]
elems.append((name, old))
if old == new: if old == new:
return [] return
mt = new.mtime if new else 0 old = state.root
szdiff = (new.size if new else 0) - (old.size if old else 0)
# Update parents
update = []
for name, entry in elems[:-1]:
u = UpdateEntry(name, entry.key)
if szdiff:
entry.size += szdiff
u.size = entry.size
if mt > entry.mtime:
u.mtime = entry.mtime = mt
update.append(u)
# The last element is the one that changed
name, entry = elems[-1]
parent = elems[-2][1] if len(elems) > 1 else tree
u = UpdateEntry(name, new.key if new else entry.key)
if new: if new:
parent[name] = new state[relpath, new[0].isfile] = new
if u.size != new.size:
u.size = new.size
if u.mtime != new.mtime:
u.mtime = new.mtime
if isinstance(new, DirEntry) and u.dir != new.dir:
u.dir = new.dir
else: else:
del parent[name] del state[relpath]
u.deleted = True broadcast(format_update(old, state.root), loop)
update.append(u)
return update
async def broadcast(msg): def format_update(old, new):
# Make keep/del/insert diff until one of the lists ends
oidx, nidx = 0, 0
update = []
keep_count = 0
while oidx < len(old) and nidx < len(new):
if old[oidx] == new[nidx]:
keep_count += 1
oidx += 1
nidx += 1
continue
if keep_count > 0:
update.append(UpdKeep(keep_count))
keep_count = 0
del_count = 0
rest = new[nidx:]
while oidx < len(old) and old[oidx] not in rest:
del_count += 1
oidx += 1
if del_count:
update.append(UpdDel(del_count))
continue
insert_items = []
rest = old[oidx:]
while nidx < len(new) and new[nidx] not in rest:
insert_items.append(new[nidx])
nidx += 1
update.append(UpdIns(insert_items))
# Diff any remaining
if keep_count > 0:
update.append(UpdKeep(keep_count))
if oidx < len(old):
update.append(UpdDel(len(old) - oidx))
elif nidx < len(new):
update.append(UpdIns(new[nidx:]))
return msgspec.json.encode({"update": update}).decode()
def format_space(usage):
return msgspec.json.encode({"space": usage}).decode()
def format_root(root):
return msgspec.json.encode({"root": root}).decode()
def broadcast(msg, loop):
return asyncio.run_coroutine_threadsafe(abroadcast(msg), loop).result()
async def abroadcast(msg):
try: try:
for queue in pubsub.values(): for queue in pubsub.values():
queue.put_nowait(msg) queue.put_nowait(msg)
@ -223,8 +337,9 @@ async def broadcast(msg):
async def start(app, loop): async def start(app, loop):
config.load_config() config.load_config()
use_inotify = False and sys.platform == "linux"
app.ctx.watcher = threading.Thread( app.ctx.watcher = threading.Thread(
target=watcher_thread if sys.platform == "linux" else watcher_thread_poll, target=watcher_thread if use_inotify else watcher_thread_poll,
args=[loop], args=[loop],
) )
app.ctx.watcher.start() app.ctx.watcher.start()

View File

@ -1,5 +1,5 @@
{ {
"name": "front", "name": "cista-frontend",
"version": "0.0.0", "version": "0.0.0",
"private": true, "private": true,
"scripts": { "scripts": {

View File

@ -1,13 +1,13 @@
<template> <template>
<LoginModal /> <LoginModal />
<header> <header>
<HeaderMain ref="headerMain" :path="path.pathList"> <HeaderMain ref="headerMain" :path="path.pathList" :query="path.query">
<HeaderSelected :path="path.pathList" /> <HeaderSelected :path="path.pathList" />
</HeaderMain> </HeaderMain>
<BreadCrumb :path="path.pathList" tabindex="-1"/> <BreadCrumb :path="path.pathList" tabindex="-1"/>
</header> </header>
<main> <main>
<RouterView :path="path.pathList" /> <RouterView :path="path.pathList" :query="path.query" />
</main> </main>
</template> </template>
@ -16,7 +16,7 @@ import { RouterView } from 'vue-router'
import type { ComputedRef } from 'vue' import type { ComputedRef } from 'vue'
import type HeaderMain from '@/components/HeaderMain.vue' import type HeaderMain from '@/components/HeaderMain.vue'
import { onMounted, onUnmounted, ref, watchEffect } from 'vue' import { onMounted, onUnmounted, ref, watchEffect } from 'vue'
import { watchConnect, watchDisconnect } from '@/repositories/WS' import { loadSession, watchConnect, watchDisconnect } from '@/repositories/WS'
import { useDocumentStore } from '@/stores/documents' import { useDocumentStore } from '@/stores/documents'
import { computed } from 'vue' import { computed } from 'vue'
@ -25,19 +25,23 @@ import Router from '@/router/index'
interface Path { interface Path {
path: string path: string
pathList: string[] pathList: string[]
query: string
} }
const documentStore = useDocumentStore() const documentStore = useDocumentStore()
const path: ComputedRef<Path> = computed(() => { const path: ComputedRef<Path> = computed(() => {
const p = decodeURIComponent(Router.currentRoute.value.path) const p = decodeURIComponent(Router.currentRoute.value.path).split('//')
const pathList = p.split('/').filter(value => value !== '') const pathList = p[0].split('/').filter(value => value !== '')
const query = p.slice(1).join('//')
return { return {
path: p, path: p[0],
pathList pathList,
query
} }
}) })
watchEffect(() => { watchEffect(() => {
document.title = path.value.path.replace(/\/$/, '').split('/').pop() || documentStore.server.name || 'Cista Storage' document.title = path.value.path.replace(/\/$/, '').split('/').pop() || documentStore.server.name || 'Cista Storage'
}) })
onMounted(loadSession)
onMounted(watchConnect) onMounted(watchConnect)
onUnmounted(watchDisconnect) onUnmounted(watchDisconnect)
// Update human-readable x seconds ago messages from mtimes // Update human-readable x seconds ago messages from mtimes

View File

Before

Width:  |  Height:  |  Size: 258 B

After

Width:  |  Height:  |  Size: 258 B

View File

Before

Width:  |  Height:  |  Size: 158 B

After

Width:  |  Height:  |  Size: 158 B

View File

Before

Width:  |  Height:  |  Size: 168 B

After

Width:  |  Height:  |  Size: 168 B

View File

Before

Width:  |  Height:  |  Size: 388 B

After

Width:  |  Height:  |  Size: 388 B

View File

Before

Width:  |  Height:  |  Size: 128 B

After

Width:  |  Height:  |  Size: 128 B

View File

Before

Width:  |  Height:  |  Size: 126 B

After

Width:  |  Height:  |  Size: 126 B

View File

Before

Width:  |  Height:  |  Size: 158 B

After

Width:  |  Height:  |  Size: 158 B

View File

Before

Width:  |  Height:  |  Size: 208 B

After

Width:  |  Height:  |  Size: 208 B

View File

Before

Width:  |  Height:  |  Size: 563 B

After

Width:  |  Height:  |  Size: 563 B

View File

Before

Width:  |  Height:  |  Size: 212 B

After

Width:  |  Height:  |  Size: 212 B

View File

Before

Width:  |  Height:  |  Size: 293 B

After

Width:  |  Height:  |  Size: 293 B

View File

Before

Width:  |  Height:  |  Size: 310 B

After

Width:  |  Height:  |  Size: 310 B

View File

Before

Width:  |  Height:  |  Size: 193 B

After

Width:  |  Height:  |  Size: 193 B

View File

Before

Width:  |  Height:  |  Size: 278 B

After

Width:  |  Height:  |  Size: 278 B

View File

Before

Width:  |  Height:  |  Size: 711 B

After

Width:  |  Height:  |  Size: 711 B

View File

Before

Width:  |  Height:  |  Size: 365 B

After

Width:  |  Height:  |  Size: 365 B

View File

Before

Width:  |  Height:  |  Size: 783 B

After

Width:  |  Height:  |  Size: 783 B

View File

Before

Width:  |  Height:  |  Size: 382 B

After

Width:  |  Height:  |  Size: 382 B

View File

Before

Width:  |  Height:  |  Size: 200 B

After

Width:  |  Height:  |  Size: 200 B

View File

Before

Width:  |  Height:  |  Size: 698 B

After

Width:  |  Height:  |  Size: 698 B

View File

Before

Width:  |  Height:  |  Size: 156 B

After

Width:  |  Height:  |  Size: 156 B

View File

Before

Width:  |  Height:  |  Size: 416 B

After

Width:  |  Height:  |  Size: 416 B

View File

Before

Width:  |  Height:  |  Size: 517 B

After

Width:  |  Height:  |  Size: 517 B

View File

Before

Width:  |  Height:  |  Size: 257 B

After

Width:  |  Height:  |  Size: 257 B

View File

Before

Width:  |  Height:  |  Size: 297 B

After

Width:  |  Height:  |  Size: 297 B

View File

Before

Width:  |  Height:  |  Size: 312 B

After

Width:  |  Height:  |  Size: 312 B

View File

Before

Width:  |  Height:  |  Size: 109 B

After

Width:  |  Height:  |  Size: 109 B

View File

Before

Width:  |  Height:  |  Size: 587 B

After

Width:  |  Height:  |  Size: 587 B

View File

Before

Width:  |  Height:  |  Size: 269 B

After

Width:  |  Height:  |  Size: 269 B

View File

Before

Width:  |  Height:  |  Size: 106 B

After

Width:  |  Height:  |  Size: 106 B

View File

Before

Width:  |  Height:  |  Size: 393 B

After

Width:  |  Height:  |  Size: 393 B

View File

Before

Width:  |  Height:  |  Size: 94 B

After

Width:  |  Height:  |  Size: 94 B

View File

Before

Width:  |  Height:  |  Size: 229 B

After

Width:  |  Height:  |  Size: 229 B

View File

Before

Width:  |  Height:  |  Size: 108 B

After

Width:  |  Height:  |  Size: 108 B

View File

Before

Width:  |  Height:  |  Size: 407 B

After

Width:  |  Height:  |  Size: 407 B

View File

Before

Width:  |  Height:  |  Size: 887 B

After

Width:  |  Height:  |  Size: 887 B

View File

Before

Width:  |  Height:  |  Size: 908 B

After

Width:  |  Height:  |  Size: 908 B

View File

Before

Width:  |  Height:  |  Size: 417 B

After

Width:  |  Height:  |  Size: 417 B

View File

Before

Width:  |  Height:  |  Size: 554 B

After

Width:  |  Height:  |  Size: 554 B

View File

Before

Width:  |  Height:  |  Size: 552 B

After

Width:  |  Height:  |  Size: 552 B

View File

Before

Width:  |  Height:  |  Size: 114 B

After

Width:  |  Height:  |  Size: 114 B

View File

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

Before

Width:  |  Height:  |  Size: 91 B

After

Width:  |  Height:  |  Size: 91 B

View File

Before

Width:  |  Height:  |  Size: 647 B

After

Width:  |  Height:  |  Size: 647 B

View File

Before

Width:  |  Height:  |  Size: 95 B

After

Width:  |  Height:  |  Size: 95 B

View File

Before

Width:  |  Height:  |  Size: 208 B

After

Width:  |  Height:  |  Size: 208 B

View File

Before

Width:  |  Height:  |  Size: 104 B

After

Width:  |  Height:  |  Size: 104 B

View File

Before

Width:  |  Height:  |  Size: 508 B

After

Width:  |  Height:  |  Size: 508 B

View File

Before

Width:  |  Height:  |  Size: 1009 B

After

Width:  |  Height:  |  Size: 1009 B

View File

Before

Width:  |  Height:  |  Size: 278 B

After

Width:  |  Height:  |  Size: 278 B

View File

Before

Width:  |  Height:  |  Size: 753 B

After

Width:  |  Height:  |  Size: 753 B

View File

Before

Width:  |  Height:  |  Size: 353 B

After

Width:  |  Height:  |  Size: 353 B

View File

Before

Width:  |  Height:  |  Size: 542 B

After

Width:  |  Height:  |  Size: 542 B

View File

Before

Width:  |  Height:  |  Size: 292 B

After

Width:  |  Height:  |  Size: 292 B

View File

Before

Width:  |  Height:  |  Size: 621 B

After

Width:  |  Height:  |  Size: 621 B

View File

Before

Width:  |  Height:  |  Size: 517 B

After

Width:  |  Height:  |  Size: 517 B

View File

Before

Width:  |  Height:  |  Size: 289 B

After

Width:  |  Height:  |  Size: 289 B

View File

Before

Width:  |  Height:  |  Size: 498 B

After

Width:  |  Height:  |  Size: 498 B

View File

Before

Width:  |  Height:  |  Size: 464 B

After

Width:  |  Height:  |  Size: 464 B

View File

@ -46,8 +46,11 @@ const isCurrent = (index: number) => index == props.path.length ? 'location' : u
const navigate = (index: number) => { const navigate = (index: number) => {
const link = links[index] const link = links[index]
if (!link) throw Error(`No link at index ${index} (path: ${props.path})`) if (!link) throw Error(`No link at index ${index} (path: ${props.path})`)
const url = `/${longest.value.slice(0, index).join('/')}/`
const here = `/${longest.value.join('/')}/`
link.focus() link.focus()
router.replace(`/${longest.value.slice(0, index).join('/')}`) if (here.startsWith(location.hash.slice(1))) router.replace(url)
else router.push(url)
} }
const move = (dir: number) => { const move = (dir: number) => {

View File

@ -3,34 +3,11 @@
<thead> <thead>
<tr> <tr>
<th class="selection"> <th class="selection">
<input <input type="checkbox" tabindex="-1" v-model="allSelected" :indeterminate="selectionIndeterminate">
type="checkbox"
tabindex="-1"
v-model="allSelected"
:indeterminate="selectionIndeterminate"
/>
</th>
<th
class="sortcolumn"
:class="{ sortactive: sort === 'name' }"
@click="toggleSort('name')"
>
Name
</th>
<th
class="sortcolumn modified right"
:class="{ sortactive: sort === 'modified' }"
@click="toggleSort('modified')"
>
Modified
</th>
<th
class="sortcolumn size right"
:class="{ sortactive: sort === 'size' }"
@click="toggleSort('size')"
>
Size
</th> </th>
<th class="sortcolumn" :class="{ sortactive: sort === 'name' }" @click="toggleSort('name')">Name</th>
<th class="sortcolumn modified right" :class="{ sortactive: sort === 'modified' }" @click="toggleSort('modified')">Modified</th>
<th class="sortcolumn size right" :class="{ sortactive: sort === 'size' }" @click="toggleSort('size')">Size</th>
<th class="menu"></th> <th class="menu"></th>
</tr> </tr>
</thead> </thead>
@ -38,27 +15,13 @@
<tr v-if="editing?.key === 'new'" class="folder"> <tr v-if="editing?.key === 'new'" class="folder">
<td class="selection"></td> <td class="selection"></td>
<td class="name"> <td class="name">
<FileRenameInput <FileRenameInput :doc="editing" :rename="mkdir" :exit="() => {editing = null}" />
:doc="editing"
:rename="mkdir"
:exit="
() => {
editing = null
}
"
/>
</td> </td>
<td class="modified right"> <FileModified :doc=editing />
<time :datetime="new Date(editing.mtime).toISOString().replace('.000', '')">{{ <FileSize :doc=editing />
editing.modified
}}</time>
</td>
<td class="size right">{{ editing.sizedisp }}</td>
<td class="menu"></td> <td class="menu"></td>
</tr> </tr>
<template <template v-for="(doc, index) in sortedDocuments" :key="doc.key">
v-for="(doc, index) in sortedDocuments"
:key="doc.key">
<tr class="folder-change" v-if="showFolderBreadcrumb(index)"> <tr class="folder-change" v-if="showFolderBreadcrumb(index)">
<th colspan="5"><BreadCrumb :path="doc.loc ? doc.loc.split('/') : []" /></th> <th colspan="5"><BreadCrumb :path="doc.loc ? doc.loc.split('/') : []" /></th>
</tr> </tr>
@ -82,16 +45,9 @@
/> />
</td> </td>
<td class="name"> <td class="name">
<template v-if="editing === doc" <template v-if="editing === doc">
><FileRenameInput <FileRenameInput :doc="doc" :rename="rename" :exit="() => {editing = null}" />
:doc="doc" </template>
:rename="rename"
:exit="
() => {
editing = null
}
"
/></template>
<template v-else> <template v-else>
<a <a
:href="url_for(doc)" :href="url_for(doc)"
@ -102,29 +58,13 @@
@keyup.right.stop="ev => { if (doc.dir) (ev.target as HTMLElement).click() }" @keyup.right.stop="ev => { if (doc.dir) (ev.target as HTMLElement).click() }"
>{{ doc.name }}</a >{{ doc.name }}</a
> >
<button <button v-if="cursor == doc" class="rename-button" @click="() => (editing = doc)">🖊</button>
v-if="cursor == doc"
class="rename-button"
@click="() => (editing = doc)"
>
🖊
</button>
</template> </template>
</td> </td>
<td class="modified right"> <FileModified :doc=doc />
<time <FileSize :doc=doc />
:data-tooltip="new Date(1000 * doc.mtime).toISOString().replace('T', '\n').replace('.000Z', ' UTC')"
>{{ doc.modified }}</time
>
</td>
<td class="size right">{{ doc.sizedisp }}</td>
<td class="menu"> <td class="menu">
<button <button tabindex="-1" @click.stop="contextMenu($event, doc)"></button>
tabindex="-1"
@click.stop="contextMenu($event, doc)"
>
</button>
</td> </td>
</tr> </tr>
</template> </template>
@ -147,13 +87,10 @@ import { connect, controlUrl } from '@/repositories/WS'
import { collator, formatSize, formatUnixDate } from '@/utils' import { collator, formatSize, formatUnixDate } from '@/utils'
import { useRouter } from 'vue-router' import { useRouter } from 'vue-router'
const props = withDefaults( const props = defineProps<{
defineProps<{
path: Array<string> path: Array<string>
documents: Document[] documents: Document[]
}>(), }>()
{}
)
const documentStore = useDocumentStore() const documentStore = useDocumentStore()
const router = useRouter() const router = useRouter()
const url_for = (doc: Document) => { const url_for = (doc: Document) => {
@ -309,7 +246,7 @@ const mkdir = (doc: Document, name: string) => {
editing.value = null editing.value = null
} else { } else {
console.log('mkdir', msg) console.log('mkdir', msg)
router.push(`/${doc.loc}/${name}/`) router.push(doc.loc ? `/${doc.loc}/${name}/` : `/${name}/`)
} }
} }
}) })
@ -400,7 +337,7 @@ table .selection {
text-overflow: clip; text-overflow: clip;
} }
table .modified { table .modified {
width: 8em; width: 9em;
} }
table .size { table .size {
width: 5em; width: 5em;

View File

@ -0,0 +1,22 @@
<template>
<td class="modified right">
<time :data-tooltip=tooltip :datetime=datetime>{{ doc.modified }}</time>
</td>
</template>
<script setup lang="ts">
import type { Document } from '@/repositories/Document'
import { computed } from 'vue'
const datetime = computed(() =>
new Date(1000 * props.doc.mtime).toISOString().replace('.000Z', 'Z')
)
const tooltip = computed(() =>
datetime.value.replace('T', '\n').replace('Z', ' UTC')
)
const props = defineProps<{
doc: Document
}>()
</script>

View File

@ -0,0 +1,43 @@
<template>
<td class="size right" :class=sizeClass>{{ doc.sizedisp }}</td>
</template>
<script setup lang="ts">
import type { Document } from '@/repositories/Document'
import { computed } from 'vue'
const sizeClass = computed(() => {
const unit = props.doc.sizedisp.split('\u202F').slice(-1)[0]
return +unit ? "bytes" : unit
})
const props = defineProps<{
doc: Document
}>()
</script>
<style scoped>
.size.empty { color: #555 }
.size.bytes { color: #77a }
.size.kB { color: #474 }
.size.MB { color: #a80 }
.size.GB { color: #f83 }
.size.TB, .size.PB, .size.EB, .size.huge {
color: #f44;
text-shadow: 0 0 .2em;
}
@media (prefers-color-scheme: dark) {
.size.empty { color: #bbb }
.size.bytes { color: #99d }
.size.kB { color: #aea }
.size.MB { color: #ff4 }
.size.GB { color: #f86 }
.size.TB, .size.PB, .size.EB, .size.huge { color: #f55 }
}
.cursor .size {
color: inherit;
text-shadow: none;
}
</style>

View File

@ -9,7 +9,7 @@
<SvgButton <SvgButton
name="create-folder" name="create-folder"
data-tooltip="New folder" data-tooltip="New folder"
@click="() => documentStore.fileExplorer.newFolder()" @click="() => documentStore.fileExplorer!.newFolder()"
/> />
<slot></slot> <slot></slot>
<div class="spacer smallgap"></div> <div class="spacer smallgap"></div>
@ -17,7 +17,9 @@
<input <input
ref="search" ref="search"
type="search" type="search"
v-model="documentStore.search" :value="query"
@blur="ev => { if (!query) closeSearch(ev) }"
@input="updateSearch"
placeholder="Search words" placeholder="Search words"
class="margin-input" class="margin-input"
@keyup.escape="closeSearch" @keyup.escape="closeSearch"
@ -31,30 +33,42 @@
<script setup lang="ts"> <script setup lang="ts">
import { useDocumentStore } from '@/stores/documents' import { useDocumentStore } from '@/stores/documents'
import { ref, nextTick } from 'vue' import { ref, nextTick, watchEffect } from 'vue'
import ContextMenu from '@imengyu/vue3-context-menu' import ContextMenu from '@imengyu/vue3-context-menu'
import router from '@/router';
const documentStore = useDocumentStore() const documentStore = useDocumentStore()
const showSearchInput = ref<boolean>(false) const showSearchInput = ref<boolean>(false)
const search = ref<HTMLInputElement | null>() const search = ref<HTMLInputElement | null>()
const searchButton = ref<HTMLButtonElement | null>() const searchButton = ref<HTMLButtonElement | null>()
const closeSearch = () => { const closeSearch = (ev: Event) => {
if (!showSearchInput.value) return // Already closing if (!showSearchInput.value) return // Already closing
showSearchInput.value = false showSearchInput.value = false
documentStore.search = ''
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
breadcrumb.focus() breadcrumb.focus()
updateSearch(ev)
} }
const toggleSearchInput = () => { const updateSearch = (ev: Event) => {
const q = (ev.target as HTMLInputElement).value
let p = props.path.join('/')
p = p ? `/${p}` : ''
const url = q ? `${p}//${q}` : (p || '/')
console.log("Update search", url)
if (!props.query && q) router.push(url)
else router.replace(url)
}
const toggleSearchInput = (ev: Event) => {
showSearchInput.value = !showSearchInput.value showSearchInput.value = !showSearchInput.value
if (!showSearchInput.value) return closeSearch() if (!showSearchInput.value) return closeSearch(ev)
nextTick(() => { nextTick(() => {
const input = search.value const input = search.value
if (input) input.focus() if (input) input.focus()
}) })
} }
watchEffect(() => {
if (props.query) showSearchInput.value = true
})
const settingsMenu = (e: Event) => { const settingsMenu = (e: Event) => {
// show the context menu // show the context menu
const items = [] const items = []
@ -69,9 +83,10 @@ const settingsMenu = (e: Event) => {
items, items,
}) })
} }
const props = defineProps({ const props = defineProps<{
path: Array<string> path: Array<string>
}) query: string
}>()
defineExpose({ defineExpose({
toggleSearchInput, toggleSearchInput,

View File

@ -34,7 +34,7 @@ const op = (op: string, dst?: string) => {
// @ts-ignore // @ts-ignore
if (dst !== undefined) msg.dst = dst if (dst !== undefined) msg.dst = dst
const control = connect(controlUrl, { const control = connect(controlUrl, {
message(ev: WebSocmetMessageEvent) { message(ev: MessageEvent) {
const res = JSON.parse(ev.data) const res = JSON.parse(ev.data)
if ('error' in res) { if ('error' in res) {
console.error('Control socket error', msg, res.error) console.error('Control socket error', msg, res.error)

View File

@ -11,29 +11,38 @@ const props = defineProps({
path: Array<string> path: Array<string>
}) })
type CloudFile = {
file: File
cloudName: string
cloudPos: number
}
function uploadHandler(event: Event) { function uploadHandler(event: Event) {
event.preventDefault() event.preventDefault()
event.stopPropagation() event.stopPropagation()
// @ts-ignore // @ts-ignore
let infiles = Array.from(event.dataTransfer?.files || event.target.files) as File[] const infiles = Array.from(event.dataTransfer?.files || event.target.files) as File[]
if (!infiles.length) return if (!infiles.length) return
const loc = props.path!.join('/') const loc = props.path!.join('/')
for (const f of infiles) { let files = []
f.cloudName = loc + '/' + (f.webkitRelativePath || f.name) for (const file of infiles) {
f.cloudPos = 0 files.push({
file,
cloudName: loc + '/' + (file.webkitRelativePath || file.name),
cloudPos: 0,
})
} }
const dotfiles = infiles.filter(f => f.cloudName.includes('/.')) const dotfiles = files.filter(f => f.cloudName.includes('/.'))
if (dotfiles.length) { if (dotfiles.length) {
documentStore.error = "Won't upload dotfiles" documentStore.error = "Won't upload dotfiles"
console.log("Dotfiles omitted", dotfiles) console.log("Dotfiles omitted", dotfiles)
infiles = infiles.filter(f => !f.cloudName.includes('/.')) files = files.filter(f => !f.cloudName.includes('/.'))
} }
if (!infiles.length) return if (!files.length) return
infiles.sort((a, b) => collator.compare(a.cloudName, b.cloudName)) files.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
// @ts-ignore // @ts-ignore
upqueue = upqueue.concat(infiles) upqueue = [...upqueue, ...files]
statsAdd(infiles) statsAdd(files)
startWorker() startWorker()
} }
@ -49,13 +58,14 @@ const uprogress_init = {
tlast: 0, tlast: 0,
statbytes: 0, statbytes: 0,
statdur: 0, statdur: 0,
files: [], files: [] as CloudFile[],
filestart: 0, filestart: 0,
fileidx: 0, fileidx: 0,
filecount: 0, filecount: 0,
filename: '', filename: '',
filesize: 0, filesize: 0,
filepos: 0, filepos: 0,
status: 'idle',
} }
const uprogress = reactive({...uprogress_init}) const uprogress = reactive({...uprogress_init})
const percent = computed(() => uprogress.uploaded / uprogress.total * 100) const percent = computed(() => uprogress.uploaded / uprogress.total * 100)
@ -78,7 +88,7 @@ setInterval(() => {
uprogress.statdur *= .9 uprogress.statdur *= .9
} }
}, 100) }, 100)
const statUpdate = ({name, size, start, end}) => { const statUpdate = ({name, size, start, end}: {name: string, size: number, start: number, end: number}) => {
if (name !== uprogress.filename) return // If stats have been reset if (name !== uprogress.filename) return // If stats have been reset
const now = Date.now() const now = Date.now()
uprogress.uploaded = uprogress.filestart + end uprogress.uploaded = uprogress.filestart + end
@ -97,7 +107,7 @@ const statNextFile = () => {
const f = uprogress.files.shift() const f = uprogress.files.shift()
if (!f) return statReset() if (!f) return statReset()
uprogress.filepos = 0 uprogress.filepos = 0
uprogress.filesize = f.size uprogress.filesize = f.file.size
uprogress.filename = f.cloudName uprogress.filename = f.cloudName
} }
const statReset = () => { const statReset = () => {
@ -105,14 +115,14 @@ const statReset = () => {
uprogress.t0 = Date.now() uprogress.t0 = Date.now()
uprogress.tlast = uprogress.t0 + 1 uprogress.tlast = uprogress.t0 + 1
} }
const statsAdd = (f: Array<File>) => { const statsAdd = (f: CloudFile[]) => {
if (uprogress.files.length === 0) statReset() if (uprogress.files.length === 0) statReset()
uprogress.total += f.reduce((a, b) => a + b.size, 0) uprogress.total += f.reduce((a, b) => a + b.file.size, 0)
uprogress.filecount += f.length uprogress.filecount += f.length
uprogress.files = uprogress.files.concat(f) uprogress.files = [...uprogress.files, ...f]
statNextFile() statNextFile()
} }
let upqueue = [] as File[] let upqueue = [] as CloudFile[]
// TODO: Rewrite as WebSocket class // TODO: Rewrite as WebSocket class
const WSCreate = async () => await new Promise<WebSocket>(resolve => { const WSCreate = async () => await new Promise<WebSocket>(resolve => {
@ -155,17 +165,19 @@ const worker = async () => {
const ws = await WSCreate() const ws = await WSCreate()
while (upqueue.length) { while (upqueue.length) {
const f = upqueue[0] const f = upqueue[0]
if (f.cloudPos === f.size) { if (f.cloudPos === f.file.size) {
upqueue.shift() upqueue.shift()
continue continue
} }
const start = f.cloudPos const start = f.cloudPos
const end = Math.min(f.size, start + (1<<20)) const end = Math.min(f.file.size, start + (1<<20))
const control = { name: f.cloudName, size: f.size, start, end } const control = { name: f.cloudName, size: f.file.size, start, end }
const data = f.slice(start, end) const data = f.file.slice(start, end)
f.cloudPos = end f.cloudPos = end
// Note: files may get modified during I/O // Note: files may get modified during I/O
// @ts-ignore FIXME proper WebSocket class, avoid attaching functions to WebSocket object
ws.sendMsg(control) ws.sendMsg(control)
// @ts-ignore
await ws.sendData(data) await ws.sendData(data)
} }
if (upqueue.length) startWorker() if (upqueue.length) startWorker()

View File

@ -22,29 +22,16 @@ export type errorEvent = {
// Raw types the backend /api/watch sends us // Raw types the backend /api/watch sends us
export type FileEntry = { export type FileEntry = [
key: FUID number, // level
size: number string, // name
mtime: number FUID,
} number, //mtime
number, // size
number, // isfile
]
export type DirEntry = { export type UpdateEntry = ['k', number] | ['d', number] | ['i', Array<FileEntry>]
key: FUID
size: number
mtime: number
dir: DirList
}
export type DirList = Record<string, FileEntry | DirEntry>
export type UpdateEntry = {
name: string
deleted?: boolean
key?: FUID
size?: number
mtime?: number
dir?: DirList
}
// Helper structure for selections // Helper structure for selections
export interface SelectedItems { export interface SelectedItems {

View File

@ -1,14 +1,29 @@
import { useDocumentStore } from "@/stores/documents" import { useDocumentStore } from "@/stores/documents"
import type { DirEntry, UpdateEntry, errorEvent } from "./Document" import type { FileEntry, UpdateEntry, errorEvent } from "./Document"
export const controlUrl = '/api/control' export const controlUrl = '/api/control'
export const uploadUrl = '/api/upload' export const uploadUrl = '/api/upload'
export const watchUrl = '/api/watch' export const watchUrl = '/api/watch'
let tree = null as DirEntry | null let tree = [] as FileEntry[]
let reconnectDuration = 500 let reconnectDuration = 500
let wsWatch = null as WebSocket | null let wsWatch = null as WebSocket | null
export const loadSession = () => {
const store = useDocumentStore()
try {
tree = JSON.parse(sessionStorage["cista-files"])
store.updateRoot(tree)
return true
} catch (error) {
return false
}
}
const saveSession = () => {
sessionStorage["cista-files"] = JSON.stringify(tree)
}
export const connect = (path: string, handlers: Partial<Record<keyof WebSocketEventMap, any>>) => { export const connect = (path: string, handlers: Partial<Record<keyof WebSocketEventMap, any>>) => {
const webSocket = new WebSocket(new URL(path, location.origin.replace(/^http/, 'ws'))) const webSocket = new WebSocket(new URL(path, location.origin.replace(/^http/, 'ws')))
for (const [event, handler] of Object.entries(handlers)) webSocket.addEventListener(event, handler) for (const [event, handler] of Object.entries(handlers)) webSocket.addEventListener(event, handler)
@ -94,34 +109,36 @@ const handleWatchMessage = (event: MessageEvent) => {
} }
} }
function handleRootMessage({ root }: { root: DirEntry }) { function handleRootMessage({ root }: { root: FileEntry[] }) {
const store = useDocumentStore() const store = useDocumentStore()
console.log('Watch root', root) console.log('Watch root', root)
store.updateRoot(root) store.updateRoot(root)
tree = root tree = root
saveSession()
} }
function handleUpdateMessage(updateData: { update: UpdateEntry[] }) { function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
const store = useDocumentStore() const store = useDocumentStore()
console.log('Watch update', updateData.update) const update = updateData.update
console.log('Watch update', update)
if (!tree) return console.error('Watch update before root') if (!tree) return console.error('Watch update before root')
let node: DirEntry = tree let newtree = []
for (const elem of updateData.update) { let oidx = 0
if (elem.deleted) {
delete node.dir[elem.name] for (const [action, arg] of update) {
break // Deleted elements can't have further children if (action === 'k') {
newtree.push(...tree.slice(oidx, oidx + arg))
oidx += arg
} }
if (elem.name) { else if (action === 'd') oidx += arg
// @ts-ignore else if (action === 'i') newtree.push(...arg)
console.log(node, elem.name) else console.log("Unknown update action", action, arg)
node = node.dir[elem.name] ||= {}
} }
if (elem.key !== undefined) node.key = elem.key if (oidx != tree.length)
if (elem.size !== undefined) node.size = elem.size throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}, new tree ${newtree.length}`)
if (elem.mtime !== undefined) node.mtime = elem.mtime store.updateRoot(newtree)
if (elem.dir !== undefined) node.dir = elem.dir tree = newtree
} saveSession()
store.updateRoot(tree)
} }
function handleError(msg: errorEvent) { function handleError(msg: errorEvent) {

View File

@ -1,10 +1,4 @@
import type { import type { Document, FileEntry, FUID, SelectedItems } from '@/repositories/Document'
Document,
DirEntry,
FileEntry,
FUID,
SelectedItems
} from '@/repositories/Document'
import { formatSize, formatUnixDate, haystackFormat } from '@/utils' import { formatSize, formatUnixDate, haystackFormat } from '@/utils'
import { defineStore } from 'pinia' import { defineStore } from 'pinia'
import { collator } from '@/utils' import { collator } from '@/utils'
@ -26,11 +20,8 @@ export const useDocumentStore = defineStore({
id: 'documents', id: 'documents',
state: () => ({ state: () => ({
document: [] as Document[], document: [] as Document[],
search: "" as string,
selected: new Set<FUID>(), selected: new Set<FUID>(),
uploadingDocuments: [], fileExplorer: null as any,
uploadCount: 0 as number,
fileExplorer: null,
error: '' as string, error: '' as string,
connected: false, connected: false,
server: {} as Record<string, any>, server: {} as Record<string, any>,
@ -41,48 +32,30 @@ export const useDocumentStore = defineStore({
isOpenLoginModal: false isOpenLoginModal: false
} as User } as User
}), }),
persist: {
storage: sessionStorage,
paths: ['document'],
},
actions: { actions: {
updateRoot(root: DirEntry | null = null) { updateRoot(root: FileEntry[]) {
if (!root) {
this.document = []
return
}
// Transform tree data to flat documents array
let loc = ""
const mapper = ([name, attr]: [string, FileEntry | DirEntry]) => ({
...attr,
loc,
name,
sizedisp: formatSize(attr.size),
modified: formatUnixDate(attr.mtime),
haystack: haystackFormat(name),
})
const queue = [...Object.entries(root.dir ?? {}).map(mapper)]
const docs = [] const docs = []
for (let doc; (doc = queue.shift()) !== undefined;) { let loc = [] as string[]
docs.push(doc) for (const [level, name, key, mtime, size, isfile] of root) {
if ("dir" in doc) { loc = loc.slice(0, level - 1)
// Recurse but replace recursive structure with boolean docs.push({
loc = doc.loc ? `${doc.loc}/${doc.name}` : doc.name name,
queue.push(...Object.entries(doc.dir).map(mapper)) loc: level ? loc.join('/') : '/',
// @ts-ignore key,
doc.dir = true size,
sizedisp: formatSize(size),
mtime,
modified: formatUnixDate(mtime),
haystack: haystackFormat(name),
dir: !isfile,
})
loc.push(name)
} }
// @ts-ignore
else doc.dir = false
}
// Pre sort directory entries folders first then files, names in natural ordering
docs.sort((a, b) =>
// @ts-ignore
b.dir - a.dir ||
collator.compare(a.name, b.name)
)
this.document = docs as Document[] this.document = docs as Document[]
}, },
updateModified() {
for (const doc of this.document) doc.modified = formatUnixDate(doc.mtime)
},
login(username: string, privileged: boolean) { login(username: string, privileged: boolean) {
this.user.username = username this.user.username = username
this.user.privileged = privileged this.user.privileged = privileged

View File

@ -16,17 +16,17 @@ import { needleFormat, localeIncludes, collator } from '@/utils';
const documentStore = useDocumentStore() const documentStore = useDocumentStore()
const fileExplorer = ref() const fileExplorer = ref()
const props = defineProps({ const props = defineProps<{
path: Array<string> path: Array<string>
}) query: string
}>()
const documents = computed(() => { const documents = computed(() => {
if (!props.path) return []
const loc = props.path.join('/') const loc = props.path.join('/')
const query = props.query
// List the current location // List the current location
if (!documentStore.search) return documentStore.document.filter(doc => doc.loc === loc) if (!query) return documentStore.document.filter(doc => doc.loc === loc)
// Find up to 100 newest documents that match the search // Find up to 100 newest documents that match the search
const search = documentStore.search const needle = needleFormat(query)
const needle = needleFormat(search)
let limit = 100 let limit = 100
let docs = [] let docs = []
for (const doc of documentStore.recentDocuments) { for (const doc of documentStore.recentDocuments) {
@ -46,7 +46,7 @@ const documents = computed(() => {
// @ts-ignore // @ts-ignore
(a.type === 'file') - (b.type === 'file') || (a.type === 'file') - (b.type === 'file') ||
// @ts-ignore // @ts-ignore
b.name.includes(search) - a.name.includes(search) || b.name.includes(query) - a.name.includes(query) ||
collator.compare(a.name, b.name) collator.compare(a.name, b.name)
)) ))
return docs return docs

Some files were not shown because too many files have changed in this diff Show More