Compare commits
8 Commits
2978e0c968
...
v0.3.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a08f7cbe2 | ||
|
|
dd37238510 | ||
|
|
c8d5f335b1 | ||
|
|
bb80b3ee54 | ||
|
|
06d860c601 | ||
|
|
c321de13fd | ||
|
|
278e8303c4 | ||
|
|
9854dd01cc |
13
cista/api.py
13
cista/api.py
@@ -37,16 +37,23 @@ async def upload(req, ws):
|
|||||||
)
|
)
|
||||||
req = msgspec.json.decode(text, type=FileRange)
|
req = msgspec.json.decode(text, type=FileRange)
|
||||||
pos = req.start
|
pos = req.start
|
||||||
data = None
|
while True:
|
||||||
while pos < req.end and (data := await ws.recv()) and isinstance(data, bytes):
|
data = await ws.recv()
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
break
|
||||||
|
if len(data) > req.end - pos:
|
||||||
|
raise ValueError(
|
||||||
|
f"Expected up to {req.end - pos} bytes, got {len(data)} bytes"
|
||||||
|
)
|
||||||
sentsize = await alink(("upload", req.name, pos, data, req.size))
|
sentsize = await alink(("upload", req.name, pos, data, req.size))
|
||||||
pos += typing.cast(int, sentsize)
|
pos += typing.cast(int, sentsize)
|
||||||
|
if pos >= req.end:
|
||||||
|
break
|
||||||
if pos != req.end:
|
if pos != req.end:
|
||||||
d = f"{len(data)} bytes" if isinstance(data, bytes) else data
|
d = f"{len(data)} bytes" if isinstance(data, bytes) else data
|
||||||
raise ValueError(f"Expected {req.end - pos} more bytes, got {d}")
|
raise ValueError(f"Expected {req.end - pos} more bytes, got {d}")
|
||||||
# Report success
|
# Report success
|
||||||
res = StatusMsg(status="ack", req=req)
|
res = StatusMsg(status="ack", req=req)
|
||||||
print("ack", res)
|
|
||||||
await asend(ws, res)
|
await asend(ws, res)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,9 @@ class File:
|
|||||||
self.open_rw()
|
self.open_rw()
|
||||||
assert self.fd is not None
|
assert self.fd is not None
|
||||||
if file_size is not None:
|
if file_size is not None:
|
||||||
|
assert pos + len(buffer) <= file_size
|
||||||
os.ftruncate(self.fd, file_size)
|
os.ftruncate(self.fd, file_size)
|
||||||
|
if buffer:
|
||||||
os.lseek(self.fd, pos, os.SEEK_SET)
|
os.lseek(self.fd, pos, os.SEEK_SET)
|
||||||
os.write(self.fd, buffer)
|
os.write(self.fd, buffer)
|
||||||
|
|
||||||
|
|||||||
@@ -120,6 +120,9 @@ class FileEntry(msgspec.Struct, array_like=True):
|
|||||||
size: int
|
size: int
|
||||||
isfile: int
|
isfile: int
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.key or "FileEntry()"
|
||||||
|
|
||||||
|
|
||||||
class Update(msgspec.Struct, array_like=True):
|
class Update(msgspec.Struct, array_like=True):
|
||||||
...
|
...
|
||||||
@@ -137,6 +140,10 @@ class UpdIns(Update, tag="i"):
|
|||||||
items: list[FileEntry]
|
items: list[FileEntry]
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateMessage(msgspec.Struct):
|
||||||
|
update: list[UpdKeep | UpdDel | UpdIns]
|
||||||
|
|
||||||
|
|
||||||
class Space(msgspec.Struct):
|
class Space(msgspec.Struct):
|
||||||
disk: int
|
disk: int
|
||||||
free: int
|
free: int
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path
|
||||||
|
|
||||||
from sanic import Sanic
|
from sanic import Sanic
|
||||||
|
|
||||||
@@ -15,7 +15,6 @@ def run(*, dev=False):
|
|||||||
# Silence Sanic's warning about running in production rather than debug
|
# Silence Sanic's warning about running in production rather than debug
|
||||||
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "1"
|
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "1"
|
||||||
confdir = config.conffile.parent
|
confdir = config.conffile.parent
|
||||||
wwwroot = PurePath(__file__).parent / "wwwroot"
|
|
||||||
if opts.get("ssl"):
|
if opts.get("ssl"):
|
||||||
# Run plain HTTP redirect/acme server on port 80
|
# Run plain HTTP redirect/acme server on port 80
|
||||||
server80.app.prepare(port=80, motd=False)
|
server80.app.prepare(port=80, motd=False)
|
||||||
@@ -27,7 +26,7 @@ def run(*, dev=False):
|
|||||||
motd=False,
|
motd=False,
|
||||||
dev=dev,
|
dev=dev,
|
||||||
auto_reload=dev,
|
auto_reload=dev,
|
||||||
reload_dir={confdir, wwwroot},
|
reload_dir={confdir},
|
||||||
access_log=True,
|
access_log=True,
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
if dev:
|
if dev:
|
||||||
|
|||||||
@@ -50,37 +50,42 @@ class State:
|
|||||||
begin, end = 0, len(self._listing)
|
begin, end = 0, len(self._listing)
|
||||||
level = 0
|
level = 0
|
||||||
isfile = 0
|
isfile = 0
|
||||||
while level < len(relpath.parts):
|
|
||||||
# Enter a subdirectory
|
# Special case for root
|
||||||
level += 1
|
if not relpath.parts:
|
||||||
|
return slice(begin, end)
|
||||||
|
|
||||||
begin += 1
|
begin += 1
|
||||||
|
for part in relpath.parts:
|
||||||
|
level += 1
|
||||||
|
found = False
|
||||||
|
|
||||||
|
while begin < end:
|
||||||
|
entry = self._listing[begin]
|
||||||
|
|
||||||
|
if entry.level < level:
|
||||||
|
break
|
||||||
|
|
||||||
|
if entry.level == level:
|
||||||
|
if entry.name == part:
|
||||||
|
found = True
|
||||||
if level == len(relpath.parts):
|
if level == len(relpath.parts):
|
||||||
isfile = relfile
|
isfile = relfile
|
||||||
name = relpath.parts[level - 1]
|
else:
|
||||||
namesort = sortkey(name)
|
|
||||||
r = self._listing[begin]
|
|
||||||
assert r.level == level
|
|
||||||
# Iterate over items at this level
|
|
||||||
while (
|
|
||||||
begin < end
|
|
||||||
and r.name != name
|
|
||||||
and r.isfile <= isfile
|
|
||||||
and sortkey(r.name) < namesort
|
|
||||||
):
|
|
||||||
# Skip contents
|
|
||||||
begin += 1
|
begin += 1
|
||||||
while begin < end and self._listing[begin].level > level:
|
break
|
||||||
|
cmp = entry.isfile - isfile or sortkey(entry.name) > sortkey(part)
|
||||||
|
if cmp > 0:
|
||||||
|
break
|
||||||
|
|
||||||
begin += 1
|
begin += 1
|
||||||
# Not found?
|
|
||||||
if begin == end or self._listing[begin].level < level:
|
if not found:
|
||||||
return slice(begin, begin)
|
return slice(begin, begin)
|
||||||
r = self._listing[begin]
|
|
||||||
# Not found?
|
# Found the starting point, now find the end of the slice
|
||||||
if begin == end or r.name != name:
|
for end in range(begin + 1, len(self._listing) + 1):
|
||||||
return slice(begin, begin)
|
if end == len(self._listing) or self._listing[end].level <= level:
|
||||||
# Found an item, now find its end
|
|
||||||
for end in range(begin + 1, len(self._listing)):
|
|
||||||
if self._listing[end].level <= level:
|
|
||||||
break
|
break
|
||||||
return slice(begin, end)
|
return slice(begin, end)
|
||||||
|
|
||||||
@@ -148,11 +153,12 @@ def watcher_thread(loop):
|
|||||||
rootpath = config.config.path
|
rootpath = config.config.path
|
||||||
i = inotify.adapters.InotifyTree(rootpath.as_posix())
|
i = inotify.adapters.InotifyTree(rootpath.as_posix())
|
||||||
# Initialize the tree from filesystem
|
# Initialize the tree from filesystem
|
||||||
old, new = state.root, walk()
|
new = walk()
|
||||||
if old != new:
|
|
||||||
with state.lock:
|
with state.lock:
|
||||||
|
old = state.root
|
||||||
|
if old != new:
|
||||||
state.root = new
|
state.root = new
|
||||||
broadcast(format_root(new), loop)
|
broadcast(format_update(old, new), loop)
|
||||||
|
|
||||||
# The watching is not entirely reliable, so do a full refresh every minute
|
# The watching is not entirely reliable, so do a full refresh every minute
|
||||||
refreshdl = time.monotonic() + 60.0
|
refreshdl = time.monotonic() + 60.0
|
||||||
@@ -190,10 +196,10 @@ def watcher_thread_poll(loop):
|
|||||||
|
|
||||||
while not quit:
|
while not quit:
|
||||||
rootpath = config.config.path
|
rootpath = config.config.path
|
||||||
old = state.root
|
|
||||||
new = walk()
|
new = walk()
|
||||||
if old != new:
|
|
||||||
with state.lock:
|
with state.lock:
|
||||||
|
old = state.root
|
||||||
|
if old != new:
|
||||||
state.root = new
|
state.root = new
|
||||||
broadcast(format_update(old, new), loop)
|
broadcast(format_update(old, new), loop)
|
||||||
|
|
||||||
@@ -283,13 +289,11 @@ def format_update(old, new):
|
|||||||
|
|
||||||
del_count = 0
|
del_count = 0
|
||||||
rest = new[nidx:]
|
rest = new[nidx:]
|
||||||
while old[oidx] not in rest:
|
while oidx < len(old) and old[oidx] not in rest:
|
||||||
del_count += 1
|
del_count += 1
|
||||||
oidx += 1
|
oidx += 1
|
||||||
|
|
||||||
if del_count:
|
if del_count:
|
||||||
update.append(UpdDel(del_count))
|
update.append(UpdDel(del_count))
|
||||||
oidx += 1
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
insert_items = []
|
insert_items = []
|
||||||
@@ -333,8 +337,9 @@ async def abroadcast(msg):
|
|||||||
|
|
||||||
async def start(app, loop):
|
async def start(app, loop):
|
||||||
config.load_config()
|
config.load_config()
|
||||||
|
use_inotify = False and sys.platform == "linux"
|
||||||
app.ctx.watcher = threading.Thread(
|
app.ctx.watcher = threading.Thread(
|
||||||
target=watcher_thread if sys.platform == "linux" else watcher_thread_poll,
|
target=watcher_thread if use_inotify else watcher_thread_poll,
|
||||||
args=[loop],
|
args=[loop],
|
||||||
)
|
)
|
||||||
app.ctx.watcher.start()
|
app.ctx.watcher.start()
|
||||||
|
|||||||
@@ -44,8 +44,6 @@ watchEffect(() => {
|
|||||||
onMounted(loadSession)
|
onMounted(loadSession)
|
||||||
onMounted(watchConnect)
|
onMounted(watchConnect)
|
||||||
onUnmounted(watchDisconnect)
|
onUnmounted(watchDisconnect)
|
||||||
// Update human-readable x seconds ago messages from mtimes
|
|
||||||
setInterval(documentStore.updateModified, 1000)
|
|
||||||
const headerMain = ref<typeof HeaderMain | null>(null)
|
const headerMain = ref<typeof HeaderMain | null>(null)
|
||||||
let vert = 0
|
let vert = 0
|
||||||
let timer: any = null
|
let timer: any = null
|
||||||
|
|||||||
@@ -79,7 +79,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, computed, watchEffect } from 'vue'
|
import { ref, computed, watchEffect, onMounted, onUnmounted } from 'vue'
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
import { useDocumentStore } from '@/stores/documents'
|
||||||
import type { Document } from '@/repositories/Document'
|
import type { Document } from '@/repositories/Document'
|
||||||
import FileRenameInput from './FileRenameInput.vue'
|
import FileRenameInput from './FileRenameInput.vue'
|
||||||
@@ -229,6 +229,13 @@ watchEffect(() => {
|
|||||||
focusBreadcrumb()
|
focusBreadcrumb()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
// Update human-readable x seconds ago messages from mtimes
|
||||||
|
let modifiedTimer: any = null
|
||||||
|
const updateModified = () => {
|
||||||
|
for (const doc of props.documents) doc.modified = formatUnixDate(doc.mtime)
|
||||||
|
}
|
||||||
|
onMounted(() => { updateModified(); modifiedTimer = setInterval(updateModified, 1000) })
|
||||||
|
onUnmounted(() => { clearInterval(modifiedTimer) })
|
||||||
const mkdir = (doc: Document, name: string) => {
|
const mkdir = (doc: Document, name: string) => {
|
||||||
const control = connect(controlUrl, {
|
const control = connect(controlUrl, {
|
||||||
open() {
|
open() {
|
||||||
|
|||||||
@@ -1,52 +0,0 @@
|
|||||||
<template>
|
|
||||||
<object
|
|
||||||
v-if="props.type === 'pdf'"
|
|
||||||
:data="dataURL"
|
|
||||||
type="application/pdf"
|
|
||||||
width="100%"
|
|
||||||
height="100%"
|
|
||||||
></object>
|
|
||||||
<a-image
|
|
||||||
v-else-if="props.type === 'image'"
|
|
||||||
width="50%"
|
|
||||||
:src="dataURL"
|
|
||||||
@click="() => setVisible(true)"
|
|
||||||
:previewMask="false"
|
|
||||||
:preview="{
|
|
||||||
visibleImg,
|
|
||||||
onVisibleChange: setVisible
|
|
||||||
}"
|
|
||||||
/>
|
|
||||||
<!-- Unknown case -->
|
|
||||||
<h1 v-else>Unsupported file type</h1>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { watchEffect, ref } from 'vue'
|
|
||||||
import Router from '@/router/index'
|
|
||||||
import { url_document_get } from '@/repositories/Document'
|
|
||||||
|
|
||||||
const dataURL = ref('')
|
|
||||||
watchEffect(() => {
|
|
||||||
dataURL.value = new URL(
|
|
||||||
url_document_get + Router.currentRoute.value.path,
|
|
||||||
location.origin
|
|
||||||
).toString()
|
|
||||||
})
|
|
||||||
const emit = defineEmits({
|
|
||||||
visibleImg(value: boolean) {
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
function setVisible(value: boolean) {
|
|
||||||
emit('visibleImg', value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const props = defineProps<{
|
|
||||||
type?: string
|
|
||||||
visibleImg: boolean
|
|
||||||
}>()
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style></style>
|
|
||||||
@@ -9,7 +9,7 @@
|
|||||||
<SvgButton
|
<SvgButton
|
||||||
name="create-folder"
|
name="create-folder"
|
||||||
data-tooltip="New folder"
|
data-tooltip="New folder"
|
||||||
@click="() => documentStore.fileExplorer.newFolder()"
|
@click="() => documentStore.fileExplorer!.newFolder()"
|
||||||
/>
|
/>
|
||||||
<slot></slot>
|
<slot></slot>
|
||||||
<div class="spacer smallgap"></div>
|
<div class="spacer smallgap"></div>
|
||||||
@@ -42,15 +42,15 @@ const showSearchInput = ref<boolean>(false)
|
|||||||
const search = ref<HTMLInputElement | null>()
|
const search = ref<HTMLInputElement | null>()
|
||||||
const searchButton = ref<HTMLButtonElement | null>()
|
const searchButton = ref<HTMLButtonElement | null>()
|
||||||
|
|
||||||
const closeSearch = ev => {
|
const closeSearch = (ev: Event) => {
|
||||||
if (!showSearchInput.value) return // Already closing
|
if (!showSearchInput.value) return // Already closing
|
||||||
showSearchInput.value = false
|
showSearchInput.value = false
|
||||||
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
||||||
breadcrumb.focus()
|
breadcrumb.focus()
|
||||||
updateSearch(ev)
|
updateSearch(ev)
|
||||||
}
|
}
|
||||||
const updateSearch = ev => {
|
const updateSearch = (ev: Event) => {
|
||||||
const q = ev.target.value
|
const q = (ev.target as HTMLInputElement).value
|
||||||
let p = props.path.join('/')
|
let p = props.path.join('/')
|
||||||
p = p ? `/${p}` : ''
|
p = p ? `/${p}` : ''
|
||||||
const url = q ? `${p}//${q}` : (p || '/')
|
const url = q ? `${p}//${q}` : (p || '/')
|
||||||
@@ -58,9 +58,9 @@ const updateSearch = ev => {
|
|||||||
if (!props.query && q) router.push(url)
|
if (!props.query && q) router.push(url)
|
||||||
else router.replace(url)
|
else router.replace(url)
|
||||||
}
|
}
|
||||||
const toggleSearchInput = () => {
|
const toggleSearchInput = (ev: Event) => {
|
||||||
showSearchInput.value = !showSearchInput.value
|
showSearchInput.value = !showSearchInput.value
|
||||||
if (!showSearchInput.value) return closeSearch()
|
if (!showSearchInput.value) return closeSearch(ev)
|
||||||
nextTick(() => {
|
nextTick(() => {
|
||||||
const input = search.value
|
const input = search.value
|
||||||
if (input) input.focus()
|
if (input) input.focus()
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ const op = (op: string, dst?: string) => {
|
|||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
if (dst !== undefined) msg.dst = dst
|
if (dst !== undefined) msg.dst = dst
|
||||||
const control = connect(controlUrl, {
|
const control = connect(controlUrl, {
|
||||||
message(ev: WebSocmetMessageEvent) {
|
message(ev: MessageEvent) {
|
||||||
const res = JSON.parse(ev.data)
|
const res = JSON.parse(ev.data)
|
||||||
if ('error' in res) {
|
if ('error' in res) {
|
||||||
console.error('Control socket error', msg, res.error)
|
console.error('Control socket error', msg, res.error)
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
<template>
|
|
||||||
<template v-for="upload in documentStore.uploadingDocuments" :key="upload.key">
|
|
||||||
<span>{{ upload.name }}</span>
|
|
||||||
<div class="progress-container">
|
|
||||||
<a-progress :percent="upload.progress" />
|
|
||||||
<CloseCircleOutlined class="close-button" @click="dismissUpload(upload.key)" />
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
</template>
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
|
||||||
const documentStore = useDocumentStore()
|
|
||||||
|
|
||||||
function dismissUpload(key: number) {
|
|
||||||
documentStore.deleteUploadingDocument(key)
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.progress-container {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
.close-button:hover {
|
|
||||||
color: #b81414;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
@@ -11,29 +11,79 @@ const props = defineProps({
|
|||||||
path: Array<string>
|
path: Array<string>
|
||||||
})
|
})
|
||||||
|
|
||||||
|
type CloudFile = {
|
||||||
|
file: File
|
||||||
|
cloudName: string
|
||||||
|
cloudPos: number
|
||||||
|
}
|
||||||
|
function pasteHandler(event: ClipboardEvent) {
|
||||||
|
const items = Array.from(event.clipboardData?.items ?? [])
|
||||||
|
const infiles = [] as File[]
|
||||||
|
const dirs = [] as FileSystemDirectoryEntry[]
|
||||||
|
for (const item of items) {
|
||||||
|
if (item.kind !== 'file') continue
|
||||||
|
const entry = item.webkitGetAsEntry()
|
||||||
|
if (entry?.isFile) {
|
||||||
|
const file = item.getAsFile()
|
||||||
|
infiles.push(file)
|
||||||
|
} else if (entry?.isDirectory) {
|
||||||
|
dirs.push(entry as FileSystemDirectoryEntry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (infiles.length || dirs.length) {
|
||||||
|
event.preventDefault()
|
||||||
|
uploadFiles(infiles)
|
||||||
|
for (const entry of dirs) pasteDirectory(entry, `${props.path!.join('/')}/${entry.name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const pasteDirectory = async (entry: FileSystemDirectoryEntry, loc: string) => {
|
||||||
|
const reader = entry.createReader()
|
||||||
|
const entries = await new Promise<any[]>(resolve => reader.readEntries(resolve))
|
||||||
|
const cloudfiles = [] as CloudFile[]
|
||||||
|
for (const entry of entries) {
|
||||||
|
const cloudName = `${loc}/${entry.name}`
|
||||||
|
if (entry.isFile) {
|
||||||
|
const file = await new Promise(resolve => entry.file(resolve)) as File
|
||||||
|
cloudfiles.push({file, cloudName, cloudPos: 0})
|
||||||
|
} else if (entry.isDirectory) {
|
||||||
|
await pasteDirectory(entry, cloudName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (cloudfiles.length) uploadCloudFiles(cloudfiles)
|
||||||
|
}
|
||||||
function uploadHandler(event: Event) {
|
function uploadHandler(event: Event) {
|
||||||
event.preventDefault()
|
event.preventDefault()
|
||||||
event.stopPropagation()
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
let infiles = Array.from(event.dataTransfer?.files || event.target.files) as File[]
|
const input = event.target as HTMLInputElement | null
|
||||||
if (!infiles.length) return
|
const infiles = Array.from((input ?? (event as DragEvent).dataTransfer)?.files ?? []) as File[]
|
||||||
|
if (input) input.value = ''
|
||||||
|
if (infiles.length) uploadFiles(infiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadFiles = (infiles: File[]) => {
|
||||||
const loc = props.path!.join('/')
|
const loc = props.path!.join('/')
|
||||||
for (const f of infiles) {
|
let files = []
|
||||||
f.cloudName = loc + '/' + (f.webkitRelativePath || f.name)
|
for (const file of infiles) {
|
||||||
f.cloudPos = 0
|
files.push({
|
||||||
|
file,
|
||||||
|
cloudName: loc + '/' + (file.webkitRelativePath || file.name),
|
||||||
|
cloudPos: 0,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
const dotfiles = infiles.filter(f => f.cloudName.includes('/.'))
|
uploadCloudFiles(files)
|
||||||
|
}
|
||||||
|
const uploadCloudFiles = (files: CloudFile[]) => {
|
||||||
|
const dotfiles = files.filter(f => f.cloudName.includes('/.'))
|
||||||
if (dotfiles.length) {
|
if (dotfiles.length) {
|
||||||
documentStore.error = "Won't upload dotfiles"
|
documentStore.error = "Won't upload dotfiles"
|
||||||
console.log("Dotfiles omitted", dotfiles)
|
console.log("Dotfiles omitted", dotfiles)
|
||||||
infiles = infiles.filter(f => !f.cloudName.includes('/.'))
|
files = files.filter(f => !f.cloudName.includes('/.'))
|
||||||
}
|
}
|
||||||
if (!infiles.length) return
|
if (!files.length) return
|
||||||
infiles.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
|
files.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
upqueue = upqueue.concat(infiles)
|
upqueue = [...upqueue, ...files]
|
||||||
statsAdd(infiles)
|
statsAdd(files)
|
||||||
startWorker()
|
startWorker()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,13 +99,14 @@ const uprogress_init = {
|
|||||||
tlast: 0,
|
tlast: 0,
|
||||||
statbytes: 0,
|
statbytes: 0,
|
||||||
statdur: 0,
|
statdur: 0,
|
||||||
files: [],
|
files: [] as CloudFile[],
|
||||||
filestart: 0,
|
filestart: 0,
|
||||||
fileidx: 0,
|
fileidx: 0,
|
||||||
filecount: 0,
|
filecount: 0,
|
||||||
filename: '',
|
filename: '',
|
||||||
filesize: 0,
|
filesize: 0,
|
||||||
filepos: 0,
|
filepos: 0,
|
||||||
|
status: 'idle',
|
||||||
}
|
}
|
||||||
const uprogress = reactive({...uprogress_init})
|
const uprogress = reactive({...uprogress_init})
|
||||||
const percent = computed(() => uprogress.uploaded / uprogress.total * 100)
|
const percent = computed(() => uprogress.uploaded / uprogress.total * 100)
|
||||||
@@ -66,7 +117,7 @@ const speed = computed(() => {
|
|||||||
if (tsince > 1 / s) return 1 / tsince // Next block is late or not coming, decay
|
if (tsince > 1 / s) return 1 / tsince // Next block is late or not coming, decay
|
||||||
return s // "Current speed"
|
return s // "Current speed"
|
||||||
})
|
})
|
||||||
const speeddisp = computed(() => speed.value ? speed.value.toFixed(speed.value < 100 ? 1 : 0) + '\u202FMB/s': 'stalled')
|
const speeddisp = computed(() => speed.value ? speed.value.toFixed(speed.value < 10 ? 1 : 0) + '\u202FMB/s': 'stalled')
|
||||||
setInterval(() => {
|
setInterval(() => {
|
||||||
if (Date.now() - uprogress.tlast > 3000) {
|
if (Date.now() - uprogress.tlast > 3000) {
|
||||||
// Reset
|
// Reset
|
||||||
@@ -78,7 +129,7 @@ setInterval(() => {
|
|||||||
uprogress.statdur *= .9
|
uprogress.statdur *= .9
|
||||||
}
|
}
|
||||||
}, 100)
|
}, 100)
|
||||||
const statUpdate = ({name, size, start, end}) => {
|
const statUpdate = ({name, size, start, end}: {name: string, size: number, start: number, end: number}) => {
|
||||||
if (name !== uprogress.filename) return // If stats have been reset
|
if (name !== uprogress.filename) return // If stats have been reset
|
||||||
const now = Date.now()
|
const now = Date.now()
|
||||||
uprogress.uploaded = uprogress.filestart + end
|
uprogress.uploaded = uprogress.filestart + end
|
||||||
@@ -97,7 +148,7 @@ const statNextFile = () => {
|
|||||||
const f = uprogress.files.shift()
|
const f = uprogress.files.shift()
|
||||||
if (!f) return statReset()
|
if (!f) return statReset()
|
||||||
uprogress.filepos = 0
|
uprogress.filepos = 0
|
||||||
uprogress.filesize = f.size
|
uprogress.filesize = f.file.size
|
||||||
uprogress.filename = f.cloudName
|
uprogress.filename = f.cloudName
|
||||||
}
|
}
|
||||||
const statReset = () => {
|
const statReset = () => {
|
||||||
@@ -105,14 +156,14 @@ const statReset = () => {
|
|||||||
uprogress.t0 = Date.now()
|
uprogress.t0 = Date.now()
|
||||||
uprogress.tlast = uprogress.t0 + 1
|
uprogress.tlast = uprogress.t0 + 1
|
||||||
}
|
}
|
||||||
const statsAdd = (f: Array<File>) => {
|
const statsAdd = (f: CloudFile[]) => {
|
||||||
if (uprogress.files.length === 0) statReset()
|
if (uprogress.files.length === 0) statReset()
|
||||||
uprogress.total += f.reduce((a, b) => a + b.size, 0)
|
uprogress.total += f.reduce((a, b) => a + b.file.size, 0)
|
||||||
uprogress.filecount += f.length
|
uprogress.filecount += f.length
|
||||||
uprogress.files = uprogress.files.concat(f)
|
uprogress.files = [...uprogress.files, ...f]
|
||||||
statNextFile()
|
statNextFile()
|
||||||
}
|
}
|
||||||
let upqueue = [] as File[]
|
let upqueue = [] as CloudFile[]
|
||||||
|
|
||||||
// TODO: Rewrite as WebSocket class
|
// TODO: Rewrite as WebSocket class
|
||||||
const WSCreate = async () => await new Promise<WebSocket>(resolve => {
|
const WSCreate = async () => await new Promise<WebSocket>(resolve => {
|
||||||
@@ -155,18 +206,17 @@ const worker = async () => {
|
|||||||
const ws = await WSCreate()
|
const ws = await WSCreate()
|
||||||
while (upqueue.length) {
|
while (upqueue.length) {
|
||||||
const f = upqueue[0]
|
const f = upqueue[0]
|
||||||
if (f.cloudPos === f.size) {
|
|
||||||
upqueue.shift()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const start = f.cloudPos
|
const start = f.cloudPos
|
||||||
const end = Math.min(f.size, start + (1<<20))
|
const end = Math.min(f.file.size, start + (1<<20))
|
||||||
const control = { name: f.cloudName, size: f.size, start, end }
|
const control = { name: f.cloudName, size: f.file.size, start, end }
|
||||||
const data = f.slice(start, end)
|
const data = f.file.slice(start, end)
|
||||||
f.cloudPos = end
|
f.cloudPos = end
|
||||||
// Note: files may get modified during I/O
|
// Note: files may get modified during I/O
|
||||||
|
// @ts-ignore FIXME proper WebSocket class, avoid attaching functions to WebSocket object
|
||||||
ws.sendMsg(control)
|
ws.sendMsg(control)
|
||||||
|
// @ts-ignore
|
||||||
await ws.sendData(data)
|
await ws.sendData(data)
|
||||||
|
if (f.cloudPos === f.file.size) upqueue.shift()
|
||||||
}
|
}
|
||||||
if (upqueue.length) startWorker()
|
if (upqueue.length) startWorker()
|
||||||
uprogress.status = "idle"
|
uprogress.status = "idle"
|
||||||
@@ -184,8 +234,10 @@ onMounted(() => {
|
|||||||
// Need to prevent both to prevent browser from opening the file
|
// Need to prevent both to prevent browser from opening the file
|
||||||
addEventListener('dragover', uploadHandler)
|
addEventListener('dragover', uploadHandler)
|
||||||
addEventListener('drop', uploadHandler)
|
addEventListener('drop', uploadHandler)
|
||||||
|
addEventListener('paste', pasteHandler)
|
||||||
})
|
})
|
||||||
onUnmounted(() => {
|
onUnmounted(() => {
|
||||||
|
removeEventListener('paste', pasteHandler)
|
||||||
removeEventListener('dragover', uploadHandler)
|
removeEventListener('dragover', uploadHandler)
|
||||||
removeEventListener('drop', uploadHandler)
|
removeEventListener('drop', uploadHandler)
|
||||||
})
|
})
|
||||||
@@ -207,7 +259,7 @@ onUnmounted(() => {
|
|||||||
{{ (uprogress.filepos / uprogress.filesize * 100).toFixed(0) + '\u202F%' }}
|
{{ (uprogress.filepos / uprogress.filesize * 100).toFixed(0) + '\u202F%' }}
|
||||||
</span>
|
</span>
|
||||||
</span>
|
</span>
|
||||||
<span class="position" v-if="uprogress.filesize > 1e7">
|
<span class="position" v-if="uprogress.total > 1e7">
|
||||||
{{ (uprogress.uploaded / 1e6).toFixed(0) + '\u202F/\u202F' + (uprogress.total / 1e6).toFixed(0) + '\u202FMB' }}
|
{{ (uprogress.uploaded / 1e6).toFixed(0) + '\u202F/\u202F' + (uprogress.total / 1e6).toFixed(0) + '\u202FMB' }}
|
||||||
</span>
|
</span>
|
||||||
<span class="speed">{{ speeddisp }}</span>
|
<span class="speed">{{ speeddisp }}</span>
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ const handleWatchMessage = (event: MessageEvent) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleRootMessage({ root }: { root: DirEntry }) {
|
function handleRootMessage({ root }: { root: FileEntry[] }) {
|
||||||
const store = useDocumentStore()
|
const store = useDocumentStore()
|
||||||
console.log('Watch root', root)
|
console.log('Watch root', root)
|
||||||
store.updateRoot(root)
|
store.updateRoot(root)
|
||||||
@@ -135,7 +135,7 @@ function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
|
|||||||
else console.log("Unknown update action", action, arg)
|
else console.log("Unknown update action", action, arg)
|
||||||
}
|
}
|
||||||
if (oidx != tree.length)
|
if (oidx != tree.length)
|
||||||
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}`)
|
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}, new tree ${newtree.length}`)
|
||||||
store.updateRoot(newtree)
|
store.updateRoot(newtree)
|
||||||
tree = newtree
|
tree = newtree
|
||||||
saveSession()
|
saveSession()
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { defineStore } from 'pinia'
|
|||||||
import { collator } from '@/utils'
|
import { collator } from '@/utils'
|
||||||
import { logoutUser } from '@/repositories/User'
|
import { logoutUser } from '@/repositories/User'
|
||||||
import { watchConnect } from '@/repositories/WS'
|
import { watchConnect } from '@/repositories/WS'
|
||||||
import { format } from 'path'
|
|
||||||
|
|
||||||
type FileData = { id: string; mtime: number; size: number; dir: DirectoryData }
|
type FileData = { id: string; mtime: number; size: number; dir: DirectoryData }
|
||||||
type DirectoryData = {
|
type DirectoryData = {
|
||||||
@@ -22,9 +21,7 @@ export const useDocumentStore = defineStore({
|
|||||||
state: () => ({
|
state: () => ({
|
||||||
document: [] as Document[],
|
document: [] as Document[],
|
||||||
selected: new Set<FUID>(),
|
selected: new Set<FUID>(),
|
||||||
uploadingDocuments: [],
|
fileExplorer: null as any,
|
||||||
uploadCount: 0 as number,
|
|
||||||
fileExplorer: null,
|
|
||||||
error: '' as string,
|
error: '' as string,
|
||||||
connected: false,
|
connected: false,
|
||||||
server: {} as Record<string, any>,
|
server: {} as Record<string, any>,
|
||||||
@@ -54,12 +51,8 @@ export const useDocumentStore = defineStore({
|
|||||||
})
|
})
|
||||||
loc.push(name)
|
loc.push(name)
|
||||||
}
|
}
|
||||||
console.log("Documents", docs)
|
|
||||||
this.document = docs as Document[]
|
this.document = docs as Document[]
|
||||||
},
|
},
|
||||||
updateModified() {
|
|
||||||
for (const doc of this.document) doc.modified = formatUnixDate(doc.mtime)
|
|
||||||
},
|
|
||||||
login(username: string, privileged: boolean) {
|
login(username: string, privileged: boolean) {
|
||||||
this.user.username = username
|
this.user.username = username
|
||||||
this.user.privileged = privileged
|
this.user.privileged = privileged
|
||||||
|
|||||||
136
tests/test_watching.py
Normal file
136
tests/test_watching.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
from pathlib import PurePosixPath
|
||||||
|
|
||||||
|
import msgspec
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from cista.protocol import FileEntry, UpdateMessage, UpdDel, UpdIns, UpdKeep
|
||||||
|
from cista.watching import State, format_update
|
||||||
|
|
||||||
|
|
||||||
|
def decode(data: str):
|
||||||
|
return msgspec.json.decode(data, type=UpdateMessage).update
|
||||||
|
|
||||||
|
|
||||||
|
# Helper function to create a list of FileEntry objects
|
||||||
|
def f(count, start=0):
|
||||||
|
return [FileEntry(i, str(i), str(i), 0, 0, 0) for i in range(start, start + count)]
|
||||||
|
|
||||||
|
|
||||||
|
def test_identical_lists():
|
||||||
|
old_list = f(3)
|
||||||
|
new_list = old_list.copy()
|
||||||
|
expected = [UpdKeep(3)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_completely_different_lists():
|
||||||
|
old_list = f(3)
|
||||||
|
new_list = f(3, 3) # Different entries
|
||||||
|
expected = [UpdDel(3), UpdIns(new_list)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_insertions():
|
||||||
|
old_list = f(3)
|
||||||
|
new_list = old_list[:2] + f(1, 10) + old_list[2:]
|
||||||
|
expected = [UpdKeep(2), UpdIns(f(1, 10)), UpdKeep(1)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_deletions():
|
||||||
|
old_list = f(3)
|
||||||
|
new_list = [old_list[0], old_list[2]]
|
||||||
|
expected = [UpdKeep(1), UpdDel(1), UpdKeep(1)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_mixed_operations():
|
||||||
|
old_list = f(4)
|
||||||
|
new_list = [old_list[0], old_list[2], *f(1, 10)]
|
||||||
|
expected = [UpdKeep(1), UpdDel(1), UpdKeep(1), UpdDel(1), UpdIns(f(1, 10))]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_old_list():
|
||||||
|
old_list = []
|
||||||
|
new_list = f(3)
|
||||||
|
expected = [UpdIns(new_list)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_new_list():
|
||||||
|
old_list = f(3)
|
||||||
|
new_list = []
|
||||||
|
expected = [UpdDel(3)]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_longer_lists():
|
||||||
|
old_list = f(6)
|
||||||
|
new_list = f(1, 6) + old_list[1:3] + old_list[4:5] + f(2, 7)
|
||||||
|
expected = [
|
||||||
|
UpdDel(1),
|
||||||
|
UpdIns(f(1, 6)),
|
||||||
|
UpdKeep(2),
|
||||||
|
UpdDel(1),
|
||||||
|
UpdKeep(1),
|
||||||
|
UpdDel(1),
|
||||||
|
UpdIns(f(2, 7)),
|
||||||
|
]
|
||||||
|
assert decode(format_update(old_list, new_list)) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def sortkey(name):
|
||||||
|
# Define the sorting key for names here
|
||||||
|
return name.lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def state():
|
||||||
|
entries = [
|
||||||
|
FileEntry(0, "", "root", 0, 0, 0),
|
||||||
|
FileEntry(1, "bar", "bar", 0, 0, 0),
|
||||||
|
FileEntry(2, "baz", "bar/baz", 0, 0, 0),
|
||||||
|
FileEntry(1, "foo", "foo", 0, 0, 0),
|
||||||
|
FileEntry(1, "xxx", "xxx", 0, 0, 0),
|
||||||
|
FileEntry(2, "yyy", "xxx/yyy", 0, 0, 1),
|
||||||
|
]
|
||||||
|
s = State()
|
||||||
|
s._listing = entries
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def test_existing_directory(state):
|
||||||
|
path = PurePosixPath("bar")
|
||||||
|
expected_slice = slice(1, 3) # Includes 'bar' and 'baz'
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
|
|
||||||
|
|
||||||
|
def test_existing_file(state):
|
||||||
|
path = PurePosixPath("xxx/yyy")
|
||||||
|
expected_slice = slice(5, 6) # Only includes 'yyy'
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
|
|
||||||
|
|
||||||
|
def test_nonexistent_directory(state):
|
||||||
|
path = PurePosixPath("zzz")
|
||||||
|
expected_slice = slice(6, 6) # 'zzz' would be inserted at end
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
|
|
||||||
|
|
||||||
|
def test_nonexistent_file(state):
|
||||||
|
path = (PurePosixPath("bar/mmm"), 1)
|
||||||
|
expected_slice = slice(3, 3) # A file would be inserted after 'baz' under 'bar'
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
|
|
||||||
|
|
||||||
|
def test_root_directory(state):
|
||||||
|
path = PurePosixPath()
|
||||||
|
expected_slice = slice(0, 6) # Entire tree
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
|
|
||||||
|
|
||||||
|
def test_directory_with_subdirs_and_files(state):
|
||||||
|
path = PurePosixPath("xxx")
|
||||||
|
expected_slice = slice(4, 6) # Includes 'xxx' and 'yyy'
|
||||||
|
assert state._slice(path) == expected_slice
|
||||||
Reference in New Issue
Block a user