Compare commits
13 Commits
37167a41a6
...
v0.3.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a08f7cbe2 | ||
|
|
dd37238510 | ||
|
|
c8d5f335b1 | ||
|
|
bb80b3ee54 | ||
|
|
06d860c601 | ||
|
|
c321de13fd | ||
|
|
278e8303c4 | ||
|
|
9854dd01cc | ||
|
|
fb03fa5430 | ||
|
|
e26cb8f70a | ||
|
|
9bbbc829a1 | ||
|
|
876d76bc1f | ||
|
|
4a53d0b8e2 |
@@ -1,6 +1,7 @@
|
|||||||
# Web File Storage
|
# Web File Storage
|
||||||
|
|
||||||
Run directly from repository with Hatch (or use pip install as usual):
|
Run directly from repository with Hatch (or use pip install as usual):
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
hatch run cista -l :3000 /path/to/files
|
hatch run cista -l :3000 /path/to/files
|
||||||
```
|
```
|
||||||
@@ -8,16 +9,17 @@ hatch run cista -l :3000 /path/to/files
|
|||||||
Settings incl. these arguments are stored to config file on the first startup and later `hatch run cista` is sufficient. If the `cista` script is missing, consider `pip install -e .` (within `hatch shell`) or some other trickery (known issue with installs made prior to adding the startup script).
|
Settings incl. these arguments are stored to config file on the first startup and later `hatch run cista` is sufficient. If the `cista` script is missing, consider `pip install -e .` (within `hatch shell`) or some other trickery (known issue with installs made prior to adding the startup script).
|
||||||
|
|
||||||
Create your user account:
|
Create your user account:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
hatch run cista --user admin --privileged
|
hatch run cista --user admin --privileged
|
||||||
```
|
```
|
||||||
|
|
||||||
## Build frontend
|
## Build frontend
|
||||||
|
|
||||||
Prebuilt frontend is provided in repository but for any changes it will need to be manually rebuilt:
|
Frontend needs to be built before using and after any frontend changes:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
cd cista-front
|
cd frontend
|
||||||
npm install
|
npm install
|
||||||
npm run build
|
npm run build
|
||||||
```
|
```
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 4.2 KiB |
@@ -1,241 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<title>Storage</title>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
max-width: 100ch;
|
|
||||||
margin: 0 auto;
|
|
||||||
padding: 1em;
|
|
||||||
background-color: #333;
|
|
||||||
color: #eee;
|
|
||||||
}
|
|
||||||
td {
|
|
||||||
text-align: right;
|
|
||||||
padding: .5em;
|
|
||||||
}
|
|
||||||
td:first-child {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
a {
|
|
||||||
color: inherit;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<div>
|
|
||||||
<h2>Quick file upload</h2>
|
|
||||||
<p>Uses parallel WebSocket connections for increased bandwidth /api/upload</p>
|
|
||||||
<input type=file id=fileInput>
|
|
||||||
<progress id=progressBar value=0 max=1></progress>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<h2>Files</h2>
|
|
||||||
<ul id=file_list></ul>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
let files = {}
|
|
||||||
let flatfiles = {}
|
|
||||||
|
|
||||||
function createWatchSocket() {
|
|
||||||
const wsurl = new URL("/api/watch", location.href.replace(/^http/, 'ws'))
|
|
||||||
const ws = new WebSocket(wsurl)
|
|
||||||
ws.onmessage = event => {
|
|
||||||
msg = JSON.parse(event.data)
|
|
||||||
if (msg.update) {
|
|
||||||
tree_update(msg.update)
|
|
||||||
file_list(files)
|
|
||||||
} else {
|
|
||||||
console.log("Unkonwn message from watch socket", msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
createWatchSocket()
|
|
||||||
|
|
||||||
function tree_update(msg) {
|
|
||||||
console.log("Tree update", msg)
|
|
||||||
let node = files
|
|
||||||
for (const elem of msg) {
|
|
||||||
if (elem.deleted) {
|
|
||||||
const p = node.dir[elem.name].path
|
|
||||||
delete node.dir[elem.name]
|
|
||||||
delete flatfiles[p]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if (elem.name !== undefined) node = node.dir[elem.name] ||= {}
|
|
||||||
if (elem.size !== undefined) node.size = elem.size
|
|
||||||
if (elem.mtime !== undefined) node.mtime = elem.mtime
|
|
||||||
if (elem.dir !== undefined) node.dir = elem.dir
|
|
||||||
}
|
|
||||||
// Update paths and flatfiles
|
|
||||||
files.path = "/"
|
|
||||||
const nodes = [files]
|
|
||||||
flatfiles = {}
|
|
||||||
while (node = nodes.pop()) {
|
|
||||||
flatfiles[node.path] = node
|
|
||||||
if (node.dir === undefined) continue
|
|
||||||
for (const name of Object.keys(node.dir)) {
|
|
||||||
const child = node.dir[name]
|
|
||||||
child.path = node.path + name + (child.dir === undefined ? "" : "/")
|
|
||||||
nodes.push(child)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var collator = new Intl.Collator(undefined, {numeric: true, sensitivity: 'base'});
|
|
||||||
|
|
||||||
const compare_path = (a, b) => collator.compare(a.path, b.path)
|
|
||||||
const compare_time = (a, b) => a.mtime > b.mtime
|
|
||||||
|
|
||||||
function file_list(files) {
|
|
||||||
const table = document.getElementById("file_list")
|
|
||||||
const sorted = Object.values(flatfiles).sort(compare_time)
|
|
||||||
table.innerHTML = ""
|
|
||||||
for (const f of sorted) {
|
|
||||||
const {path, size, mtime} = f
|
|
||||||
const tr = document.createElement("tr")
|
|
||||||
const name_td = document.createElement("td")
|
|
||||||
const size_td = document.createElement("td")
|
|
||||||
const mtime_td = document.createElement("td")
|
|
||||||
const a = document.createElement("a")
|
|
||||||
table.appendChild(tr)
|
|
||||||
tr.appendChild(name_td)
|
|
||||||
tr.appendChild(size_td)
|
|
||||||
tr.appendChild(mtime_td)
|
|
||||||
name_td.appendChild(a)
|
|
||||||
size_td.textContent = size
|
|
||||||
mtime_td.textContent = formatUnixDate(mtime)
|
|
||||||
a.textContent = path
|
|
||||||
a.href = `/files${path}`
|
|
||||||
/*a.onclick = event => {
|
|
||||||
if (window.showSaveFilePicker) {
|
|
||||||
event.preventDefault()
|
|
||||||
download_ws(name, size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
a.download = ""*/
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatUnixDate(t) {
|
|
||||||
const date = new Date(t * 1000)
|
|
||||||
const now = new Date()
|
|
||||||
const diff = date - now
|
|
||||||
const formatter = new Intl.RelativeTimeFormat('en', { numeric: 'auto' })
|
|
||||||
|
|
||||||
if (Math.abs(diff) <= 60000) {
|
|
||||||
return formatter.format(Math.round(diff / 1000), 'second')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Math.abs(diff) <= 3600000) {
|
|
||||||
return formatter.format(Math.round(diff / 60000), 'minute')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Math.abs(diff) <= 86400000) {
|
|
||||||
return formatter.format(Math.round(diff / 3600000), 'hour')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Math.abs(diff) <= 604800000) {
|
|
||||||
return formatter.format(Math.round(diff / 86400000), 'day')
|
|
||||||
}
|
|
||||||
|
|
||||||
return date.toLocaleDateString()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function download_ws(name, size) {
|
|
||||||
const fh = await window.showSaveFilePicker({
|
|
||||||
suggestedName: name,
|
|
||||||
})
|
|
||||||
const writer = await fh.createWritable()
|
|
||||||
writer.truncate(size)
|
|
||||||
const wsurl = new URL("/api/download", location.href.replace(/^http/, 'ws'))
|
|
||||||
const ws = new WebSocket(wsurl)
|
|
||||||
let pos = 0
|
|
||||||
ws.onopen = () => {
|
|
||||||
console.log("Downloading over WebSocket", name, size)
|
|
||||||
ws.send(JSON.stringify({name, start: 0, end: size, size}))
|
|
||||||
}
|
|
||||||
ws.onmessage = event => {
|
|
||||||
if (typeof event.data === 'string') {
|
|
||||||
const msg = JSON.parse(event.data)
|
|
||||||
console.log("Download finished", msg)
|
|
||||||
ws.close()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
console.log("Received chunk", name, pos, pos + event.data.size)
|
|
||||||
pos += event.data.size
|
|
||||||
writer.write(event.data)
|
|
||||||
}
|
|
||||||
ws.onclose = () => {
|
|
||||||
if (pos < size) {
|
|
||||||
console.log("Download aborted", name, pos)
|
|
||||||
writer.truncate(pos)
|
|
||||||
}
|
|
||||||
writer.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileInput = document.getElementById("fileInput")
|
|
||||||
const progress = document.getElementById("progressBar")
|
|
||||||
const numConnections = 2
|
|
||||||
const chunkSize = 1<<20
|
|
||||||
const wsConnections = new Set()
|
|
||||||
|
|
||||||
//for (let i = 0; i < numConnections; i++) createUploadWS()
|
|
||||||
|
|
||||||
function createUploadWS() {
|
|
||||||
const wsurl = new URL("/api/upload", location.href.replace(/^http/, 'ws'))
|
|
||||||
const ws = new WebSocket(wsurl)
|
|
||||||
ws.binaryType = 'arraybuffer'
|
|
||||||
ws.onopen = () => {
|
|
||||||
wsConnections.add(ws)
|
|
||||||
console.log("Upload socket connected")
|
|
||||||
}
|
|
||||||
ws.onmessage = event => {
|
|
||||||
msg = JSON.parse(event.data)
|
|
||||||
if (msg.written) progress.value += +msg.written
|
|
||||||
else console.log(`Error: ${msg.error}`)
|
|
||||||
}
|
|
||||||
ws.onclose = () => {
|
|
||||||
wsConnections.delete(ws)
|
|
||||||
console.log("Upload socket disconnected, reconnecting...")
|
|
||||||
setTimeout(createUploadWS, 1000)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function load(file, start, end) {
|
|
||||||
const reader = new FileReader()
|
|
||||||
const load = new Promise(resolve => reader.onload = resolve)
|
|
||||||
reader.readAsArrayBuffer(file.slice(start, end))
|
|
||||||
const event = await load
|
|
||||||
return event.target.result
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendChunk(file, start, end, ws) {
|
|
||||||
const chunk = await load(file, start, end)
|
|
||||||
ws.send(JSON.stringify({
|
|
||||||
name: file.name,
|
|
||||||
size: file.size,
|
|
||||||
start: start,
|
|
||||||
end: end
|
|
||||||
}))
|
|
||||||
ws.send(chunk)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileInput.addEventListener("change", async function() {
|
|
||||||
const file = this.files[0]
|
|
||||||
const numChunks = Math.ceil(file.size / chunkSize)
|
|
||||||
progress.value = 0
|
|
||||||
progress.max = file.size
|
|
||||||
|
|
||||||
console.log(wsConnections)
|
|
||||||
for (let i = 0; i < numChunks; i++) {
|
|
||||||
const ws = Array.from(wsConnections)[i % wsConnections.size]
|
|
||||||
const start = i * chunkSize
|
|
||||||
const end = Math.min(file.size, start + chunkSize)
|
|
||||||
const res = await sendChunk(file, start, end, ws)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
</script>
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
<template>
|
|
||||||
<object
|
|
||||||
v-if="props.type === 'pdf'"
|
|
||||||
:data="dataURL"
|
|
||||||
type="application/pdf"
|
|
||||||
width="100%"
|
|
||||||
height="100%"
|
|
||||||
></object>
|
|
||||||
<a-image
|
|
||||||
v-else-if="props.type === 'image'"
|
|
||||||
width="50%"
|
|
||||||
:src="dataURL"
|
|
||||||
@click="() => setVisible(true)"
|
|
||||||
:previewMask="false"
|
|
||||||
:preview="{
|
|
||||||
visibleImg,
|
|
||||||
onVisibleChange: setVisible
|
|
||||||
}"
|
|
||||||
/>
|
|
||||||
<!-- Unknown case -->
|
|
||||||
<h1 v-else>Unsupported file type</h1>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { watchEffect, ref } from 'vue'
|
|
||||||
import Router from '@/router/index'
|
|
||||||
import { url_document_get } from '@/repositories/Document'
|
|
||||||
|
|
||||||
const dataURL = ref('')
|
|
||||||
watchEffect(() => {
|
|
||||||
dataURL.value = new URL(
|
|
||||||
url_document_get + Router.currentRoute.value.path,
|
|
||||||
location.origin
|
|
||||||
).toString()
|
|
||||||
})
|
|
||||||
const emit = defineEmits({
|
|
||||||
visibleImg(value: boolean) {
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
function setVisible(value: boolean) {
|
|
||||||
emit('visibleImg', value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const props = defineProps<{
|
|
||||||
type?: string
|
|
||||||
visibleImg: boolean
|
|
||||||
}>()
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style></style>
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
<template>
|
|
||||||
<template v-for="upload in documentStore.uploadingDocuments" :key="upload.key">
|
|
||||||
<span>{{ upload.name }}</span>
|
|
||||||
<div class="progress-container">
|
|
||||||
<a-progress :percent="upload.progress" />
|
|
||||||
<CloseCircleOutlined class="close-button" @click="dismissUpload(upload.key)" />
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
</template>
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
|
||||||
const documentStore = useDocumentStore()
|
|
||||||
|
|
||||||
function dismissUpload(key: number) {
|
|
||||||
documentStore.deleteUploadingDocument(key)
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.progress-container {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
.close-button:hover {
|
|
||||||
color: #b81414;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
<script setup lang="ts">
|
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
|
||||||
import { h, ref } from 'vue'
|
|
||||||
|
|
||||||
const fileUploadButton = ref()
|
|
||||||
const folderUploadButton = ref()
|
|
||||||
const documentStore = useDocumentStore()
|
|
||||||
const open = (placement: any) => openNotification(placement)
|
|
||||||
|
|
||||||
const isNotificationOpen = ref(false)
|
|
||||||
const openNotification = (placement: any) => {
|
|
||||||
if (!isNotificationOpen.value) {
|
|
||||||
/*
|
|
||||||
api.open({
|
|
||||||
message: `Uploading documents`,
|
|
||||||
description: h(NotificationLoading),
|
|
||||||
placement,
|
|
||||||
duration: 0,
|
|
||||||
onClose: () => { isNotificationOpen.value = false }
|
|
||||||
});*/
|
|
||||||
isNotificationOpen.value = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function uploadFileHandler() {
|
|
||||||
fileUploadButton.value.click()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function load(file: File, start: number, end: number): Promise<ArrayBuffer> {
|
|
||||||
const reader = new FileReader()
|
|
||||||
const load = new Promise<Event>(resolve => (reader.onload = resolve))
|
|
||||||
reader.readAsArrayBuffer(file.slice(start, end))
|
|
||||||
const event = await load
|
|
||||||
if (event.target && event.target instanceof FileReader) {
|
|
||||||
return event.target.result as ArrayBuffer
|
|
||||||
} else {
|
|
||||||
throw new Error('Error loading file')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendChunk(file: File, start: number, end: number) {
|
|
||||||
const ws = documentStore.wsUpload
|
|
||||||
if (ws) {
|
|
||||||
const chunk = await load(file, start, end)
|
|
||||||
|
|
||||||
ws.send(
|
|
||||||
JSON.stringify({
|
|
||||||
name: file.name,
|
|
||||||
size: file.size,
|
|
||||||
start: start,
|
|
||||||
end: end
|
|
||||||
})
|
|
||||||
)
|
|
||||||
ws.send(chunk)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function uploadHandler(event: Event) {
|
|
||||||
const target = event.target as HTMLInputElement
|
|
||||||
const chunkSize = 1 << 20
|
|
||||||
if (!target?.files?.length) {
|
|
||||||
documentStore.error = 'No files selected'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for (const idx in target.files) {
|
|
||||||
const file = target.files[idx]
|
|
||||||
console.log('Uploading', file)
|
|
||||||
const numChunks = Math.ceil(file.size / chunkSize)
|
|
||||||
const document = documentStore.pushUploadingDocuments(file.name)
|
|
||||||
open('bottomRight')
|
|
||||||
for (let i = 0; i < numChunks; i++) {
|
|
||||||
const start = i * chunkSize
|
|
||||||
const end = Math.min(file.size, start + chunkSize)
|
|
||||||
const res = await sendChunk(file, start, end)
|
|
||||||
console.log('progress: ' + (100 * (i + 1)) / numChunks)
|
|
||||||
console.log('Num Chunks: ' + numChunks)
|
|
||||||
documentStore.updateUploadingDocuments(document.key, (100 * (i + 1)) / numChunks)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<template>
|
|
||||||
<template>
|
|
||||||
<input
|
|
||||||
ref="fileUploadButton"
|
|
||||||
@change="uploadHandler"
|
|
||||||
class="upload-input"
|
|
||||||
type="file"
|
|
||||||
multiple
|
|
||||||
/>
|
|
||||||
<input
|
|
||||||
ref="folderUploadButton"
|
|
||||||
@change="uploadHandler"
|
|
||||||
class="upload-input"
|
|
||||||
type="file"
|
|
||||||
webkitdirectory
|
|
||||||
/>
|
|
||||||
</template>
|
|
||||||
<SvgButton name="add-file" data-tooltip="Upload files" @click="fileUploadButton.click()" />
|
|
||||||
<SvgButton name="add-folder" data-tooltip="Upload folder" @click="folderUploadButton.click()" />
|
|
||||||
</template>
|
|
||||||
@@ -105,9 +105,9 @@ def _confdir(args):
|
|||||||
if confdir.exists() and not confdir.is_dir():
|
if confdir.exists() and not confdir.is_dir():
|
||||||
if confdir.name != config.conffile.name:
|
if confdir.name != config.conffile.name:
|
||||||
raise ValueError("Config path is not a directory")
|
raise ValueError("Config path is not a directory")
|
||||||
# Accidentally pointed to the cista.toml, use parent
|
# Accidentally pointed to the db.toml, use parent
|
||||||
confdir = confdir.parent
|
confdir = confdir.parent
|
||||||
config.conffile = config.conffile.with_parent(confdir)
|
config.conffile = confdir / config.conffile.name
|
||||||
|
|
||||||
|
|
||||||
def _user(args):
|
def _user(args):
|
||||||
|
|||||||
20
cista/api.py
@@ -37,10 +37,18 @@ async def upload(req, ws):
|
|||||||
)
|
)
|
||||||
req = msgspec.json.decode(text, type=FileRange)
|
req = msgspec.json.decode(text, type=FileRange)
|
||||||
pos = req.start
|
pos = req.start
|
||||||
data = None
|
while True:
|
||||||
while pos < req.end and (data := await ws.recv()) and isinstance(data, bytes):
|
data = await ws.recv()
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
break
|
||||||
|
if len(data) > req.end - pos:
|
||||||
|
raise ValueError(
|
||||||
|
f"Expected up to {req.end - pos} bytes, got {len(data)} bytes"
|
||||||
|
)
|
||||||
sentsize = await alink(("upload", req.name, pos, data, req.size))
|
sentsize = await alink(("upload", req.name, pos, data, req.size))
|
||||||
pos += typing.cast(int, sentsize)
|
pos += typing.cast(int, sentsize)
|
||||||
|
if pos >= req.end:
|
||||||
|
break
|
||||||
if pos != req.end:
|
if pos != req.end:
|
||||||
d = f"{len(data)} bytes" if isinstance(data, bytes) else data
|
d = f"{len(data)} bytes" if isinstance(data, bytes) else data
|
||||||
raise ValueError(f"Expected {req.end - pos} more bytes, got {d}")
|
raise ValueError(f"Expected {req.end - pos} more bytes, got {d}")
|
||||||
@@ -88,7 +96,7 @@ async def watch(req, ws):
|
|||||||
msgspec.json.encode(
|
msgspec.json.encode(
|
||||||
{
|
{
|
||||||
"server": {
|
"server": {
|
||||||
"name": "Cista", # Should be configurable
|
"name": config.config.name or config.config.path.name,
|
||||||
"version": __version__,
|
"version": __version__,
|
||||||
"public": config.config.public,
|
"public": config.config.public,
|
||||||
},
|
},
|
||||||
@@ -103,11 +111,11 @@ async def watch(req, ws):
|
|||||||
)
|
)
|
||||||
uuid = token_bytes(16)
|
uuid = token_bytes(16)
|
||||||
try:
|
try:
|
||||||
with watching.tree_lock:
|
with watching.state.lock:
|
||||||
q = watching.pubsub[uuid] = asyncio.Queue()
|
q = watching.pubsub[uuid] = asyncio.Queue()
|
||||||
# Init with disk usage and full tree
|
# Init with disk usage and full tree
|
||||||
await ws.send(watching.format_du())
|
await ws.send(watching.format_space(watching.state.space))
|
||||||
await ws.send(watching.format_tree())
|
await ws.send(watching.format_root(watching.state.root))
|
||||||
# Send updates
|
# Send updates
|
||||||
while True:
|
while True:
|
||||||
await ws.send(await q.get())
|
await ws.send(await q.get())
|
||||||
|
|||||||
122
cista/app.py
@@ -1,10 +1,8 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import datetime
|
import datetime
|
||||||
import mimetypes
|
import mimetypes
|
||||||
from collections import deque
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from importlib.resources import files
|
from pathlib import Path, PurePath, PurePosixPath
|
||||||
from pathlib import Path
|
|
||||||
from stat import S_IFDIR, S_IFREG
|
from stat import S_IFDIR, S_IFREG
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
from wsgiref.handlers import format_date_time
|
from wsgiref.handlers import format_date_time
|
||||||
@@ -12,15 +10,13 @@ from wsgiref.handlers import format_date_time
|
|||||||
import brotli
|
import brotli
|
||||||
import sanic.helpers
|
import sanic.helpers
|
||||||
from blake3 import blake3
|
from blake3 import blake3
|
||||||
from natsort import natsorted, ns
|
|
||||||
from sanic import Blueprint, Sanic, empty, raw
|
from sanic import Blueprint, Sanic, empty, raw
|
||||||
from sanic.exceptions import Forbidden, NotFound
|
from sanic.exceptions import Forbidden, NotFound, ServerError
|
||||||
from sanic.log import logging
|
from sanic.log import logging
|
||||||
from stream_zip import ZIP_AUTO, stream_zip
|
from stream_zip import ZIP_AUTO, stream_zip
|
||||||
|
|
||||||
from cista import auth, config, session, watching
|
from cista import auth, config, session, watching
|
||||||
from cista.api import bp
|
from cista.api import bp
|
||||||
from cista.protocol import DirEntry
|
|
||||||
from cista.util.apphelpers import handle_sanic_exception
|
from cista.util.apphelpers import handle_sanic_exception
|
||||||
|
|
||||||
# Workaround until Sanic PR #2824 is merged
|
# Workaround until Sanic PR #2824 is merged
|
||||||
@@ -36,7 +32,9 @@ app.exception(Exception)(handle_sanic_exception)
|
|||||||
async def main_start(app, loop):
|
async def main_start(app, loop):
|
||||||
config.load_config()
|
config.load_config()
|
||||||
await watching.start(app, loop)
|
await watching.start(app, loop)
|
||||||
app.ctx.threadexec = ThreadPoolExecutor(max_workers=8)
|
app.ctx.threadexec = ThreadPoolExecutor(
|
||||||
|
max_workers=8, thread_name_prefix="cista-ioworker"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.after_server_stop
|
@app.after_server_stop
|
||||||
@@ -49,8 +47,8 @@ async def main_stop(app, loop):
|
|||||||
async def use_session(req):
|
async def use_session(req):
|
||||||
req.ctx.session = session.get(req)
|
req.ctx.session = session.get(req)
|
||||||
try:
|
try:
|
||||||
req.ctx.username = req.ctx.session["username"]
|
req.ctx.username = req.ctx.session["username"] # type: ignore
|
||||||
req.ctx.user = config.config.users[req.ctx.session["username"]] # type: ignore
|
req.ctx.user = config.config.users[req.ctx.username]
|
||||||
except (AttributeError, KeyError, TypeError):
|
except (AttributeError, KeyError, TypeError):
|
||||||
req.ctx.username = None
|
req.ctx.username = None
|
||||||
req.ctx.user = None
|
req.ctx.user = None
|
||||||
@@ -81,22 +79,16 @@ def http_fileserver(app, _):
|
|||||||
www = {}
|
www = {}
|
||||||
|
|
||||||
|
|
||||||
@app.before_server_start
|
|
||||||
async def load_wwwroot(*_ignored):
|
|
||||||
global www
|
|
||||||
www = await asyncio.get_event_loop().run_in_executor(None, _load_wwwroot, www)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_wwwroot(www):
|
def _load_wwwroot(www):
|
||||||
wwwnew = {}
|
wwwnew = {}
|
||||||
base = files("cista") / "wwwroot"
|
base = Path(__file__).with_name("wwwroot")
|
||||||
paths = ["."]
|
paths = [PurePath()]
|
||||||
while paths:
|
while paths:
|
||||||
path = paths.pop(0)
|
path = paths.pop(0)
|
||||||
current = base / path
|
current = base / path
|
||||||
for p in current.iterdir():
|
for p in current.iterdir():
|
||||||
if p.is_dir():
|
if p.is_dir():
|
||||||
paths.append(current / p.parts[-1])
|
paths.append(p.relative_to(base))
|
||||||
continue
|
continue
|
||||||
name = p.relative_to(base).as_posix()
|
name = p.relative_to(base).as_posix()
|
||||||
mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
|
mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
|
||||||
@@ -127,15 +119,35 @@ def _load_wwwroot(www):
|
|||||||
if len(br) >= len(data):
|
if len(br) >= len(data):
|
||||||
br = False
|
br = False
|
||||||
wwwnew[name] = data, br, headers
|
wwwnew[name] = data, br, headers
|
||||||
|
if not wwwnew:
|
||||||
|
raise ServerError(
|
||||||
|
"Web frontend missing. Did you forget npm run build?",
|
||||||
|
extra={"wwwroot": str(base)},
|
||||||
|
quiet=True,
|
||||||
|
)
|
||||||
return wwwnew
|
return wwwnew
|
||||||
|
|
||||||
|
|
||||||
@app.add_task
|
@app.before_server_start
|
||||||
|
async def start(app):
|
||||||
|
await load_wwwroot(app)
|
||||||
|
if app.debug:
|
||||||
|
app.add_task(refresh_wwwroot())
|
||||||
|
|
||||||
|
|
||||||
|
async def load_wwwroot(app):
|
||||||
|
global www
|
||||||
|
www = await asyncio.get_event_loop().run_in_executor(
|
||||||
|
app.ctx.threadexec, _load_wwwroot, www
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def refresh_wwwroot():
|
async def refresh_wwwroot():
|
||||||
while True:
|
while True:
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
try:
|
try:
|
||||||
wwwold = www
|
wwwold = www
|
||||||
await load_wwwroot()
|
await load_wwwroot(app)
|
||||||
changes = ""
|
changes = ""
|
||||||
for name in sorted(www):
|
for name in sorted(www):
|
||||||
attr = www[name]
|
attr = www[name]
|
||||||
@@ -151,7 +163,6 @@ async def refresh_wwwroot():
|
|||||||
print("Error loading wwwroot", e)
|
print("Error loading wwwroot", e)
|
||||||
if not app.debug:
|
if not app.debug:
|
||||||
return
|
return
|
||||||
await asyncio.sleep(0.5)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route("/<path:path>", methods=["GET", "HEAD"])
|
@app.route("/<path:path>", methods=["GET", "HEAD"])
|
||||||
@@ -166,66 +177,70 @@ async def wwwroot(req, path=""):
|
|||||||
return empty(304, headers=headers)
|
return empty(304, headers=headers)
|
||||||
# Brotli compressed?
|
# Brotli compressed?
|
||||||
if br and "br" in req.headers.accept_encoding.split(", "):
|
if br and "br" in req.headers.accept_encoding.split(", "):
|
||||||
headers = {
|
headers = {**headers, "content-encoding": "br"}
|
||||||
**headers,
|
|
||||||
"content-encoding": "br",
|
|
||||||
}
|
|
||||||
data = br
|
data = br
|
||||||
return raw(data, headers=headers)
|
return raw(data, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
def get_files(wanted: set) -> list[tuple[PurePosixPath, Path]]:
|
||||||
|
loc = PurePosixPath()
|
||||||
|
idx = 0
|
||||||
|
ret = []
|
||||||
|
level: int | None = None
|
||||||
|
parent: PurePosixPath | None = None
|
||||||
|
with watching.state.lock:
|
||||||
|
root = watching.state.root
|
||||||
|
while idx < len(root):
|
||||||
|
f = root[idx]
|
||||||
|
loc = PurePosixPath(*loc.parts[: f.level - 1]) / f.name
|
||||||
|
if parent is not None and f.level <= level:
|
||||||
|
level = parent = None
|
||||||
|
if f.key in wanted:
|
||||||
|
level, parent = f.level, loc.parent
|
||||||
|
if parent is not None:
|
||||||
|
wanted.discard(f.key)
|
||||||
|
ret.append((loc.relative_to(parent), watching.rootpath / loc))
|
||||||
|
idx += 1
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
@app.get("/zip/<keys>/<zipfile:ext=zip>")
|
@app.get("/zip/<keys>/<zipfile:ext=zip>")
|
||||||
async def zip_download(req, keys, zipfile, ext):
|
async def zip_download(req, keys, zipfile, ext):
|
||||||
"""Download a zip archive of the given keys"""
|
"""Download a zip archive of the given keys"""
|
||||||
|
|
||||||
wanted = set(keys.split("+"))
|
wanted = set(keys.split("+"))
|
||||||
with watching.tree_lock:
|
files = get_files(wanted)
|
||||||
q = deque([([], None, watching.tree[""].dir)])
|
|
||||||
files = []
|
|
||||||
while q:
|
|
||||||
locpar, relpar, d = q.pop()
|
|
||||||
for name, attr in d.items():
|
|
||||||
loc = [*locpar, name]
|
|
||||||
rel = None
|
|
||||||
if relpar or attr.key in wanted:
|
|
||||||
rel = [*relpar, name] if relpar else [name]
|
|
||||||
wanted.discard(attr.key)
|
|
||||||
isdir = isinstance(attr, DirEntry)
|
|
||||||
if isdir:
|
|
||||||
q.append((loc, rel, attr.dir))
|
|
||||||
if rel:
|
|
||||||
files.append(
|
|
||||||
("/".join(rel), Path(watching.rootpath.joinpath(*loc)))
|
|
||||||
)
|
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
raise NotFound(
|
raise NotFound(
|
||||||
"No files found",
|
"No files found",
|
||||||
context={"keys": keys, "zipfile": zipfile, "wanted": wanted},
|
context={"keys": keys, "zipfile": f"{zipfile}.{ext}", "wanted": wanted},
|
||||||
)
|
)
|
||||||
if wanted:
|
if wanted:
|
||||||
raise NotFound("Files not found", context={"missing": wanted})
|
raise NotFound("Files not found", context={"missing": wanted})
|
||||||
|
|
||||||
files = natsorted(files, key=lambda f: f[0], alg=ns.IGNORECASE)
|
|
||||||
|
|
||||||
def local_files(files):
|
def local_files(files):
|
||||||
for rel, p in files:
|
for rel, p in files:
|
||||||
s = p.stat()
|
s = p.stat()
|
||||||
size = s.st_size
|
size = s.st_size
|
||||||
modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC)
|
modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC)
|
||||||
|
name = rel.as_posix()
|
||||||
if p.is_dir():
|
if p.is_dir():
|
||||||
yield rel, modified, S_IFDIR | 0o755, ZIP_AUTO(size), b""
|
yield f"{name}/", modified, S_IFDIR | 0o755, ZIP_AUTO(size), iter(b"")
|
||||||
else:
|
else:
|
||||||
yield rel, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p)
|
yield name, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p, size)
|
||||||
|
|
||||||
def contents(name):
|
def contents(name, size):
|
||||||
with name.open("rb") as f:
|
with name.open("rb") as f:
|
||||||
while chunk := f.read(65536):
|
while size > 0 and (chunk := f.read(min(size, 1 << 20))):
|
||||||
|
size -= len(chunk)
|
||||||
yield chunk
|
yield chunk
|
||||||
|
assert size == 0
|
||||||
|
|
||||||
def worker():
|
def worker():
|
||||||
try:
|
try:
|
||||||
for chunk in stream_zip(local_files(files)):
|
for chunk in stream_zip(local_files(files)):
|
||||||
asyncio.run_coroutine_threadsafe(queue.put(chunk), loop)
|
asyncio.run_coroutine_threadsafe(queue.put(chunk), loop).result()
|
||||||
except Exception:
|
except Exception:
|
||||||
logging.exception("Error streaming ZIP")
|
logging.exception("Error streaming ZIP")
|
||||||
raise
|
raise
|
||||||
@@ -238,7 +253,10 @@ async def zip_download(req, keys, zipfile, ext):
|
|||||||
thread = loop.run_in_executor(app.ctx.threadexec, worker)
|
thread = loop.run_in_executor(app.ctx.threadexec, worker)
|
||||||
|
|
||||||
# Stream the response
|
# Stream the response
|
||||||
res = await req.respond(content_type="application/zip")
|
res = await req.respond(
|
||||||
|
content_type="application/zip",
|
||||||
|
headers={"cache-control": "no-store"},
|
||||||
|
)
|
||||||
while chunk := await queue.get():
|
while chunk := await queue.get():
|
||||||
await res.send(chunk)
|
await res.send(chunk)
|
||||||
|
|
||||||
|
|||||||
@@ -68,10 +68,10 @@ def verify(request, *, privileged=False):
|
|||||||
if request.ctx.user:
|
if request.ctx.user:
|
||||||
if request.ctx.user.privileged:
|
if request.ctx.user.privileged:
|
||||||
return
|
return
|
||||||
raise Forbidden("Access Forbidden: Only for privileged users")
|
raise Forbidden("Access Forbidden: Only for privileged users", quiet=True)
|
||||||
elif config.config.public or request.ctx.user:
|
elif config.config.public or request.ctx.user:
|
||||||
return
|
return
|
||||||
raise Unauthorized("Login required", "cookie")
|
raise Unauthorized("Login required", "cookie", quiet=True)
|
||||||
|
|
||||||
|
|
||||||
bp = Blueprint("auth")
|
bp = Blueprint("auth")
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ class Config(msgspec.Struct):
|
|||||||
listen: str
|
listen: str
|
||||||
secret: str = secrets.token_hex(12)
|
secret: str = secrets.token_hex(12)
|
||||||
public: bool = False
|
public: bool = False
|
||||||
|
name: str = ""
|
||||||
users: dict[str, User] = {}
|
users: dict[str, User] = {}
|
||||||
links: dict[str, Link] = {}
|
links: dict[str, Link] = {}
|
||||||
|
|
||||||
|
|||||||
@@ -34,9 +34,11 @@ class File:
|
|||||||
self.open_rw()
|
self.open_rw()
|
||||||
assert self.fd is not None
|
assert self.fd is not None
|
||||||
if file_size is not None:
|
if file_size is not None:
|
||||||
|
assert pos + len(buffer) <= file_size
|
||||||
os.ftruncate(self.fd, file_size)
|
os.ftruncate(self.fd, file_size)
|
||||||
os.lseek(self.fd, pos, os.SEEK_SET)
|
if buffer:
|
||||||
os.write(self.fd, buffer)
|
os.lseek(self.fd, pos, os.SEEK_SET)
|
||||||
|
os.write(self.fd, buffer)
|
||||||
|
|
||||||
def __getitem__(self, slice):
|
def __getitem__(self, slice):
|
||||||
if self.fd is None:
|
if self.fd is None:
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ class MkDir(ControlBase):
|
|||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
path = config.config.path / filename.sanitize(self.path)
|
path = config.config.path / filename.sanitize(self.path)
|
||||||
path.mkdir(parents=False, exist_ok=False)
|
path.mkdir(parents=True, exist_ok=False)
|
||||||
|
|
||||||
|
|
||||||
class Rename(ControlBase):
|
class Rename(ControlBase):
|
||||||
@@ -112,47 +112,43 @@ class ErrorMsg(msgspec.Struct):
|
|||||||
## Directory listings
|
## Directory listings
|
||||||
|
|
||||||
|
|
||||||
class FileEntry(msgspec.Struct):
|
class FileEntry(msgspec.Struct, array_like=True):
|
||||||
key: str
|
level: int
|
||||||
size: int
|
|
||||||
mtime: int
|
|
||||||
|
|
||||||
|
|
||||||
class DirEntry(msgspec.Struct):
|
|
||||||
key: str
|
|
||||||
size: int
|
|
||||||
mtime: int
|
|
||||||
dir: DirList
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
return self.dir[name]
|
|
||||||
|
|
||||||
def __setitem__(self, name, value):
|
|
||||||
self.dir[name] = value
|
|
||||||
|
|
||||||
def __contains__(self, name):
|
|
||||||
return name in self.dir
|
|
||||||
|
|
||||||
def __delitem__(self, name):
|
|
||||||
del self.dir[name]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def props(self):
|
|
||||||
return {k: v for k, v in self.__struct_fields__ if k != "dir"}
|
|
||||||
|
|
||||||
|
|
||||||
DirList = dict[str, FileEntry | DirEntry]
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateEntry(msgspec.Struct, omit_defaults=True):
|
|
||||||
"""Updates the named entry in the tree. Fields that are set replace old values. A list of entries recurses directories."""
|
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
key: str
|
key: str
|
||||||
deleted: bool = False
|
mtime: int
|
||||||
size: int | None = None
|
size: int
|
||||||
mtime: int | None = None
|
isfile: int
|
||||||
dir: DirList | None = None
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.key or "FileEntry()"
|
||||||
|
|
||||||
|
|
||||||
|
class Update(msgspec.Struct, array_like=True):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class UpdKeep(Update, tag="k"):
|
||||||
|
count: int
|
||||||
|
|
||||||
|
|
||||||
|
class UpdDel(Update, tag="d"):
|
||||||
|
count: int
|
||||||
|
|
||||||
|
|
||||||
|
class UpdIns(Update, tag="i"):
|
||||||
|
items: list[FileEntry]
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateMessage(msgspec.Struct):
|
||||||
|
update: list[UpdKeep | UpdDel | UpdIns]
|
||||||
|
|
||||||
|
|
||||||
|
class Space(msgspec.Struct):
|
||||||
|
disk: int
|
||||||
|
free: int
|
||||||
|
usage: int
|
||||||
|
storage: int
|
||||||
|
|
||||||
|
|
||||||
def make_dir_data(root):
|
def make_dir_data(root):
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path
|
||||||
|
|
||||||
from sanic import Sanic
|
from sanic import Sanic
|
||||||
|
|
||||||
@@ -15,7 +15,6 @@ def run(*, dev=False):
|
|||||||
# Silence Sanic's warning about running in production rather than debug
|
# Silence Sanic's warning about running in production rather than debug
|
||||||
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "1"
|
os.environ["SANIC_IGNORE_PRODUCTION_WARNING"] = "1"
|
||||||
confdir = config.conffile.parent
|
confdir = config.conffile.parent
|
||||||
wwwroot = PurePath(__file__).parent / "wwwroot"
|
|
||||||
if opts.get("ssl"):
|
if opts.get("ssl"):
|
||||||
# Run plain HTTP redirect/acme server on port 80
|
# Run plain HTTP redirect/acme server on port 80
|
||||||
server80.app.prepare(port=80, motd=False)
|
server80.app.prepare(port=80, motd=False)
|
||||||
@@ -27,7 +26,7 @@ def run(*, dev=False):
|
|||||||
motd=False,
|
motd=False,
|
||||||
dev=dev,
|
dev=dev,
|
||||||
auto_reload=dev,
|
auto_reload=dev,
|
||||||
reload_dir={confdir, wwwroot},
|
reload_dir={confdir},
|
||||||
access_log=True,
|
access_log=True,
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
if dev:
|
if dev:
|
||||||
|
|||||||
@@ -1,20 +1,137 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import shutil
|
import shutil
|
||||||
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from os import stat_result
|
||||||
from pathlib import Path, PurePosixPath
|
from pathlib import Path, PurePosixPath
|
||||||
|
|
||||||
import msgspec
|
import msgspec
|
||||||
|
from natsort import humansorted, natsort_keygen, ns
|
||||||
from sanic.log import logging
|
from sanic.log import logging
|
||||||
|
|
||||||
from cista import config
|
from cista import config
|
||||||
from cista.fileio import fuid
|
from cista.fileio import fuid
|
||||||
from cista.protocol import DirEntry, FileEntry, UpdateEntry
|
from cista.protocol import FileEntry, Space, UpdDel, UpdIns, UpdKeep
|
||||||
|
|
||||||
pubsub = {}
|
pubsub = {}
|
||||||
tree = {"": None}
|
sortkey = natsort_keygen(alg=ns.LOCALE)
|
||||||
tree_lock = threading.Lock()
|
|
||||||
|
|
||||||
|
class State:
|
||||||
|
def __init__(self):
|
||||||
|
self.lock = threading.RLock()
|
||||||
|
self._space = Space(0, 0, 0, 0)
|
||||||
|
self._listing: list[FileEntry] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def space(self):
|
||||||
|
with self.lock:
|
||||||
|
return self._space
|
||||||
|
|
||||||
|
@space.setter
|
||||||
|
def space(self, space):
|
||||||
|
with self.lock:
|
||||||
|
self._space = space
|
||||||
|
|
||||||
|
@property
|
||||||
|
def root(self) -> list[FileEntry]:
|
||||||
|
with self.lock:
|
||||||
|
return self._listing[:]
|
||||||
|
|
||||||
|
@root.setter
|
||||||
|
def root(self, listing: list[FileEntry]):
|
||||||
|
with self.lock:
|
||||||
|
self._listing = listing
|
||||||
|
|
||||||
|
def _slice(self, idx: PurePosixPath | tuple[PurePosixPath, int]):
|
||||||
|
relpath, relfile = idx if isinstance(idx, tuple) else (idx, 0)
|
||||||
|
begin, end = 0, len(self._listing)
|
||||||
|
level = 0
|
||||||
|
isfile = 0
|
||||||
|
|
||||||
|
# Special case for root
|
||||||
|
if not relpath.parts:
|
||||||
|
return slice(begin, end)
|
||||||
|
|
||||||
|
begin += 1
|
||||||
|
for part in relpath.parts:
|
||||||
|
level += 1
|
||||||
|
found = False
|
||||||
|
|
||||||
|
while begin < end:
|
||||||
|
entry = self._listing[begin]
|
||||||
|
|
||||||
|
if entry.level < level:
|
||||||
|
break
|
||||||
|
|
||||||
|
if entry.level == level:
|
||||||
|
if entry.name == part:
|
||||||
|
found = True
|
||||||
|
if level == len(relpath.parts):
|
||||||
|
isfile = relfile
|
||||||
|
else:
|
||||||
|
begin += 1
|
||||||
|
break
|
||||||
|
cmp = entry.isfile - isfile or sortkey(entry.name) > sortkey(part)
|
||||||
|
if cmp > 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
begin += 1
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
return slice(begin, begin)
|
||||||
|
|
||||||
|
# Found the starting point, now find the end of the slice
|
||||||
|
for end in range(begin + 1, len(self._listing) + 1):
|
||||||
|
if end == len(self._listing) or self._listing[end].level <= level:
|
||||||
|
break
|
||||||
|
return slice(begin, end)
|
||||||
|
|
||||||
|
def __getitem__(self, index: PurePosixPath | tuple[PurePosixPath, int]):
|
||||||
|
with self.lock:
|
||||||
|
return self._listing[self._slice(index)]
|
||||||
|
|
||||||
|
def __setitem__(
|
||||||
|
self, index: tuple[PurePosixPath, int], value: list[FileEntry]
|
||||||
|
) -> None:
|
||||||
|
rel, isfile = index
|
||||||
|
with self.lock:
|
||||||
|
if rel.parts:
|
||||||
|
parent = self._slice(rel.parent)
|
||||||
|
if parent.start == parent.stop:
|
||||||
|
raise ValueError(
|
||||||
|
f"Parent folder {rel.as_posix()} is missing for {rel.name}"
|
||||||
|
)
|
||||||
|
self._listing[self._slice(index)] = value
|
||||||
|
|
||||||
|
def __delitem__(self, relpath: PurePosixPath):
|
||||||
|
with self.lock:
|
||||||
|
del self._listing[self._slice(relpath)]
|
||||||
|
|
||||||
|
def _index(self, rel: PurePosixPath):
|
||||||
|
idx = 0
|
||||||
|
ret = []
|
||||||
|
|
||||||
|
def _dir(self, idx: int):
|
||||||
|
level = self._listing[idx].level + 1
|
||||||
|
end = len(self._listing)
|
||||||
|
idx += 1
|
||||||
|
ret = []
|
||||||
|
while idx < end and (r := self._listing[idx]).level >= level:
|
||||||
|
if r.level == level:
|
||||||
|
ret.append(idx)
|
||||||
|
return ret, idx
|
||||||
|
|
||||||
|
def update(self, rel: PurePosixPath, value: FileEntry):
|
||||||
|
begin = 0
|
||||||
|
parents = []
|
||||||
|
while self._listing[begin].level < len(rel.parts):
|
||||||
|
parents.append(begin)
|
||||||
|
|
||||||
|
|
||||||
|
state = State()
|
||||||
rootpath: Path = None # type: ignore
|
rootpath: Path = None # type: ignore
|
||||||
quit = False
|
quit = False
|
||||||
modified_flags = (
|
modified_flags = (
|
||||||
@@ -26,23 +143,22 @@ modified_flags = (
|
|||||||
"IN_MOVED_FROM",
|
"IN_MOVED_FROM",
|
||||||
"IN_MOVED_TO",
|
"IN_MOVED_TO",
|
||||||
)
|
)
|
||||||
disk_usage = None
|
|
||||||
|
|
||||||
|
|
||||||
def watcher_thread(loop):
|
def watcher_thread(loop):
|
||||||
global disk_usage, rootpath
|
global rootpath
|
||||||
import inotify.adapters
|
import inotify.adapters
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
rootpath = config.config.path
|
rootpath = config.config.path
|
||||||
i = inotify.adapters.InotifyTree(rootpath.as_posix())
|
i = inotify.adapters.InotifyTree(rootpath.as_posix())
|
||||||
old = format_tree() if tree[""] else None
|
# Initialize the tree from filesystem
|
||||||
with tree_lock:
|
new = walk()
|
||||||
# Initialize the tree from filesystem
|
with state.lock:
|
||||||
tree[""] = walk(rootpath)
|
old = state.root
|
||||||
msg = format_tree()
|
if old != new:
|
||||||
if msg != old:
|
state.root = new
|
||||||
asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
|
broadcast(format_update(old, new), loop)
|
||||||
|
|
||||||
# The watching is not entirely reliable, so do a full refresh every minute
|
# The watching is not entirely reliable, so do a full refresh every minute
|
||||||
refreshdl = time.monotonic() + 60.0
|
refreshdl = time.monotonic() + 60.0
|
||||||
@@ -52,9 +168,10 @@ def watcher_thread(loop):
|
|||||||
return
|
return
|
||||||
# Disk usage update
|
# Disk usage update
|
||||||
du = shutil.disk_usage(rootpath)
|
du = shutil.disk_usage(rootpath)
|
||||||
if du != disk_usage:
|
space = Space(*du, storage=state.root[0].size)
|
||||||
disk_usage = du
|
if space != state.space:
|
||||||
asyncio.run_coroutine_threadsafe(broadcast(format_du()), loop)
|
state.space = space
|
||||||
|
broadcast(format_space(space), loop)
|
||||||
break
|
break
|
||||||
# Do a full refresh?
|
# Do a full refresh?
|
||||||
if time.monotonic() > refreshdl:
|
if time.monotonic() > refreshdl:
|
||||||
@@ -75,144 +192,141 @@ def watcher_thread(loop):
|
|||||||
|
|
||||||
|
|
||||||
def watcher_thread_poll(loop):
|
def watcher_thread_poll(loop):
|
||||||
global disk_usage, rootpath
|
global rootpath
|
||||||
|
|
||||||
while not quit:
|
while not quit:
|
||||||
rootpath = config.config.path
|
rootpath = config.config.path
|
||||||
old = format_tree() if tree[""] else None
|
new = walk()
|
||||||
with tree_lock:
|
with state.lock:
|
||||||
# Initialize the tree from filesystem
|
old = state.root
|
||||||
tree[""] = walk(rootpath)
|
if old != new:
|
||||||
msg = format_tree()
|
state.root = new
|
||||||
if msg != old:
|
broadcast(format_update(old, new), loop)
|
||||||
asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
|
|
||||||
|
|
||||||
# Disk usage update
|
# Disk usage update
|
||||||
du = shutil.disk_usage(rootpath)
|
du = shutil.disk_usage(rootpath)
|
||||||
if du != disk_usage:
|
space = Space(*du, storage=state.root[0].size)
|
||||||
disk_usage = du
|
if space != state.space:
|
||||||
asyncio.run_coroutine_threadsafe(broadcast(format_du()), loop)
|
state.space = space
|
||||||
|
broadcast(format_space(space), loop)
|
||||||
|
|
||||||
time.sleep(1.0)
|
time.sleep(2.0)
|
||||||
|
|
||||||
|
|
||||||
def format_du():
|
def walk(rel=PurePosixPath()) -> list[FileEntry]: # noqa: B008
|
||||||
return msgspec.json.encode(
|
path = rootpath / rel
|
||||||
{
|
|
||||||
"space": {
|
|
||||||
"disk": disk_usage.total,
|
|
||||||
"used": disk_usage.used,
|
|
||||||
"free": disk_usage.free,
|
|
||||||
"storage": tree[""].size,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
).decode()
|
|
||||||
|
|
||||||
|
|
||||||
def format_tree():
|
|
||||||
root = tree[""]
|
|
||||||
return msgspec.json.encode({"root": root}).decode()
|
|
||||||
|
|
||||||
|
|
||||||
def walk(path: Path) -> DirEntry | FileEntry | None:
|
|
||||||
try:
|
try:
|
||||||
s = path.stat()
|
st = path.stat()
|
||||||
key = fuid(s)
|
except OSError:
|
||||||
assert key, repr(key)
|
return []
|
||||||
mtime = int(s.st_mtime)
|
return _walk(rel, int(not stat.S_ISDIR(st.st_mode)), st)
|
||||||
if path.is_file():
|
|
||||||
return FileEntry(key, s.st_size, mtime)
|
|
||||||
|
|
||||||
tree = {
|
|
||||||
p.name: v
|
def _walk(rel: PurePosixPath, isfile: int, st: stat_result) -> list[FileEntry]:
|
||||||
for p in path.iterdir()
|
entry = FileEntry(
|
||||||
if not p.name.startswith(".")
|
level=len(rel.parts),
|
||||||
if (v := walk(p)) is not None
|
name=rel.name,
|
||||||
}
|
key=fuid(st),
|
||||||
if tree:
|
mtime=int(st.st_mtime),
|
||||||
size = sum(v.size for v in tree.values())
|
size=st.st_size if isfile else 0,
|
||||||
mtime = max(mtime, *(v.mtime for v in tree.values()))
|
isfile=isfile,
|
||||||
else:
|
)
|
||||||
size = 0
|
if isfile:
|
||||||
return DirEntry(key, size, mtime, tree)
|
return [entry]
|
||||||
|
ret = [entry]
|
||||||
|
path = rootpath / rel
|
||||||
|
try:
|
||||||
|
li = []
|
||||||
|
for f in path.iterdir():
|
||||||
|
if f.name.startswith("."):
|
||||||
|
continue # No dotfiles
|
||||||
|
s = f.stat()
|
||||||
|
li.append((int(not stat.S_ISDIR(s.st_mode)), f.name, s))
|
||||||
|
for [isfile, name, s] in humansorted(li):
|
||||||
|
subtree = _walk(rel / name, isfile, s)
|
||||||
|
child = subtree[0]
|
||||||
|
entry.mtime = max(entry.mtime, child.mtime)
|
||||||
|
entry.size += child.size
|
||||||
|
ret.extend(subtree)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return None
|
pass # Things may be rapidly in motion
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
print("OS error walking path", path, e)
|
print("OS error walking path", path, e)
|
||||||
return None
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def update(relpath: PurePosixPath, loop):
|
def update(relpath: PurePosixPath, loop):
|
||||||
"""Called by inotify updates, check the filesystem and broadcast any changes."""
|
"""Called by inotify updates, check the filesystem and broadcast any changes."""
|
||||||
if rootpath is None or relpath is None:
|
if rootpath is None or relpath is None:
|
||||||
print("ERROR", rootpath, relpath)
|
print("ERROR", rootpath, relpath)
|
||||||
new = walk(rootpath / relpath)
|
new = walk(relpath)
|
||||||
with tree_lock:
|
with state.lock:
|
||||||
update = update_internal(relpath, new)
|
old = state[relpath]
|
||||||
if not update:
|
if old == new:
|
||||||
return # No changes
|
return
|
||||||
msg = msgspec.json.encode({"update": update}).decode()
|
old = state.root
|
||||||
asyncio.run_coroutine_threadsafe(broadcast(msg), loop)
|
if new:
|
||||||
|
state[relpath, new[0].isfile] = new
|
||||||
|
else:
|
||||||
|
del state[relpath]
|
||||||
|
broadcast(format_update(old, state.root), loop)
|
||||||
|
|
||||||
|
|
||||||
def update_internal(
|
def format_update(old, new):
|
||||||
relpath: PurePosixPath,
|
# Make keep/del/insert diff until one of the lists ends
|
||||||
new: DirEntry | FileEntry | None,
|
oidx, nidx = 0, 0
|
||||||
) -> list[UpdateEntry]:
|
|
||||||
path = "", *relpath.parts
|
|
||||||
old = tree
|
|
||||||
elems = []
|
|
||||||
for name in path:
|
|
||||||
if name not in old:
|
|
||||||
# File or folder created
|
|
||||||
old = None
|
|
||||||
elems.append((name, None))
|
|
||||||
if len(elems) < len(path):
|
|
||||||
# We got a notify for an item whose parent is not in tree
|
|
||||||
print("Tree out of sync DEBUG", relpath)
|
|
||||||
print(elems)
|
|
||||||
print("Current tree:")
|
|
||||||
print(tree[""])
|
|
||||||
print("Walking all:")
|
|
||||||
print(walk(rootpath))
|
|
||||||
raise ValueError("Tree out of sync")
|
|
||||||
break
|
|
||||||
old = old[name]
|
|
||||||
elems.append((name, old))
|
|
||||||
if old == new:
|
|
||||||
return []
|
|
||||||
mt = new.mtime if new else 0
|
|
||||||
szdiff = (new.size if new else 0) - (old.size if old else 0)
|
|
||||||
# Update parents
|
|
||||||
update = []
|
update = []
|
||||||
for name, entry in elems[:-1]:
|
keep_count = 0
|
||||||
u = UpdateEntry(name, entry.key)
|
while oidx < len(old) and nidx < len(new):
|
||||||
if szdiff:
|
if old[oidx] == new[nidx]:
|
||||||
entry.size += szdiff
|
keep_count += 1
|
||||||
u.size = entry.size
|
oidx += 1
|
||||||
if mt > entry.mtime:
|
nidx += 1
|
||||||
u.mtime = entry.mtime = mt
|
continue
|
||||||
update.append(u)
|
if keep_count > 0:
|
||||||
# The last element is the one that changed
|
update.append(UpdKeep(keep_count))
|
||||||
name, entry = elems[-1]
|
keep_count = 0
|
||||||
parent = elems[-2][1] if len(elems) > 1 else tree
|
|
||||||
u = UpdateEntry(name, new.key if new else entry.key)
|
del_count = 0
|
||||||
if new:
|
rest = new[nidx:]
|
||||||
parent[name] = new
|
while oidx < len(old) and old[oidx] not in rest:
|
||||||
if u.size != new.size:
|
del_count += 1
|
||||||
u.size = new.size
|
oidx += 1
|
||||||
if u.mtime != new.mtime:
|
if del_count:
|
||||||
u.mtime = new.mtime
|
update.append(UpdDel(del_count))
|
||||||
if isinstance(new, DirEntry) and u.dir != new.dir:
|
continue
|
||||||
u.dir = new.dir
|
|
||||||
else:
|
insert_items = []
|
||||||
del parent[name]
|
rest = old[oidx:]
|
||||||
u.deleted = True
|
while nidx < len(new) and new[nidx] not in rest:
|
||||||
update.append(u)
|
insert_items.append(new[nidx])
|
||||||
return update
|
nidx += 1
|
||||||
|
update.append(UpdIns(insert_items))
|
||||||
|
|
||||||
|
# Diff any remaining
|
||||||
|
if keep_count > 0:
|
||||||
|
update.append(UpdKeep(keep_count))
|
||||||
|
if oidx < len(old):
|
||||||
|
update.append(UpdDel(len(old) - oidx))
|
||||||
|
elif nidx < len(new):
|
||||||
|
update.append(UpdIns(new[nidx:]))
|
||||||
|
|
||||||
|
return msgspec.json.encode({"update": update}).decode()
|
||||||
|
|
||||||
|
|
||||||
async def broadcast(msg):
|
def format_space(usage):
|
||||||
|
return msgspec.json.encode({"space": usage}).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def format_root(root):
|
||||||
|
return msgspec.json.encode({"root": root}).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def broadcast(msg, loop):
|
||||||
|
return asyncio.run_coroutine_threadsafe(abroadcast(msg), loop).result()
|
||||||
|
|
||||||
|
|
||||||
|
async def abroadcast(msg):
|
||||||
try:
|
try:
|
||||||
for queue in pubsub.values():
|
for queue in pubsub.values():
|
||||||
queue.put_nowait(msg)
|
queue.put_nowait(msg)
|
||||||
@@ -223,8 +337,9 @@ async def broadcast(msg):
|
|||||||
|
|
||||||
async def start(app, loop):
|
async def start(app, loop):
|
||||||
config.load_config()
|
config.load_config()
|
||||||
|
use_inotify = False and sys.platform == "linux"
|
||||||
app.ctx.watcher = threading.Thread(
|
app.ctx.watcher = threading.Thread(
|
||||||
target=watcher_thread if sys.platform == "linux" else watcher_thread_poll,
|
target=watcher_thread if use_inotify else watcher_thread_poll,
|
||||||
args=[loop],
|
args=[loop],
|
||||||
)
|
)
|
||||||
app.ctx.watcher.start()
|
app.ctx.watcher.start()
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang=en>
|
<html lang=en>
|
||||||
<meta charset=UTF-8>
|
<meta charset=UTF-8>
|
||||||
<title>Cista</title>
|
<title>Cista Storage</title>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
<link rel="icon" href="/favicon.ico">
|
<link rel="icon" href="/src/assets/logo.svg">
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Roboto+Mono&family=Roboto:wght@400;700&display=swap" rel="stylesheet">
|
<link href="https://fonts.googleapis.com/css2?family=Roboto+Mono&family=Roboto:wght@400;700&display=swap" rel="stylesheet">
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "front",
|
"name": "cista-frontend",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
2
frontend/public/robots.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
User-agent: *
|
||||||
|
Disallow: /
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
<template>
|
<template>
|
||||||
<LoginModal />
|
<LoginModal />
|
||||||
<header>
|
<header>
|
||||||
<HeaderMain ref="headerMain">
|
<HeaderMain ref="headerMain" :path="path.pathList" :query="path.query">
|
||||||
<HeaderSelected :path="path.pathList" />
|
<HeaderSelected :path="path.pathList" />
|
||||||
</HeaderMain>
|
</HeaderMain>
|
||||||
<BreadCrumb :path="path.pathList" tabindex="-1"/>
|
<BreadCrumb :path="path.pathList" tabindex="-1"/>
|
||||||
</header>
|
</header>
|
||||||
<main>
|
<main>
|
||||||
<RouterView :path="path.pathList" />
|
<RouterView :path="path.pathList" :query="path.query" />
|
||||||
</main>
|
</main>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
@@ -15,8 +15,8 @@
|
|||||||
import { RouterView } from 'vue-router'
|
import { RouterView } from 'vue-router'
|
||||||
import type { ComputedRef } from 'vue'
|
import type { ComputedRef } from 'vue'
|
||||||
import type HeaderMain from '@/components/HeaderMain.vue'
|
import type HeaderMain from '@/components/HeaderMain.vue'
|
||||||
import { onMounted, onUnmounted, ref } from 'vue'
|
import { onMounted, onUnmounted, ref, watchEffect } from 'vue'
|
||||||
import { watchConnect, watchDisconnect } from '@/repositories/WS'
|
import { loadSession, watchConnect, watchDisconnect } from '@/repositories/WS'
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
import { useDocumentStore } from '@/stores/documents'
|
||||||
|
|
||||||
import { computed } from 'vue'
|
import { computed } from 'vue'
|
||||||
@@ -25,20 +25,25 @@ import Router from '@/router/index'
|
|||||||
interface Path {
|
interface Path {
|
||||||
path: string
|
path: string
|
||||||
pathList: string[]
|
pathList: string[]
|
||||||
|
query: string
|
||||||
}
|
}
|
||||||
const documentStore = useDocumentStore()
|
const documentStore = useDocumentStore()
|
||||||
const path: ComputedRef<Path> = computed(() => {
|
const path: ComputedRef<Path> = computed(() => {
|
||||||
const p = decodeURIComponent(Router.currentRoute.value.path)
|
const p = decodeURIComponent(Router.currentRoute.value.path).split('//')
|
||||||
const pathList = p.split('/').filter(value => value !== '')
|
const pathList = p[0].split('/').filter(value => value !== '')
|
||||||
|
const query = p.slice(1).join('//')
|
||||||
return {
|
return {
|
||||||
path: p,
|
path: p[0],
|
||||||
pathList
|
pathList,
|
||||||
|
query
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
watchEffect(() => {
|
||||||
|
document.title = path.value.path.replace(/\/$/, '').split('/').pop() || documentStore.server.name || 'Cista Storage'
|
||||||
|
})
|
||||||
|
onMounted(loadSession)
|
||||||
onMounted(watchConnect)
|
onMounted(watchConnect)
|
||||||
onUnmounted(watchDisconnect)
|
onUnmounted(watchDisconnect)
|
||||||
// Update human-readable x seconds ago messages from mtimes
|
|
||||||
setInterval(documentStore.updateModified, 1000)
|
|
||||||
const headerMain = ref<typeof HeaderMain | null>(null)
|
const headerMain = ref<typeof HeaderMain | null>(null)
|
||||||
let vert = 0
|
let vert = 0
|
||||||
let timer: any = null
|
let timer: any = null
|
||||||
1
frontend/src/assets/logo.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 512 512"><rect width="512" height="512" fill="#f80" rx="64" ry="64"/><path fill="#fff" d="M381 298h-84V167h-66L339 35l108 132h-66zm-168-84h-84v131H63l108 132 108-132h-66z"/></svg>
|
||||||
|
After Width: | Height: | Size: 258 B |
|
Before Width: | Height: | Size: 158 B After Width: | Height: | Size: 158 B |
|
Before Width: | Height: | Size: 168 B After Width: | Height: | Size: 168 B |
|
Before Width: | Height: | Size: 388 B After Width: | Height: | Size: 388 B |
|
Before Width: | Height: | Size: 128 B After Width: | Height: | Size: 128 B |
|
Before Width: | Height: | Size: 126 B After Width: | Height: | Size: 126 B |
|
Before Width: | Height: | Size: 158 B After Width: | Height: | Size: 158 B |
|
Before Width: | Height: | Size: 208 B After Width: | Height: | Size: 208 B |
|
Before Width: | Height: | Size: 563 B After Width: | Height: | Size: 563 B |
|
Before Width: | Height: | Size: 212 B After Width: | Height: | Size: 212 B |
|
Before Width: | Height: | Size: 293 B After Width: | Height: | Size: 293 B |
|
Before Width: | Height: | Size: 310 B After Width: | Height: | Size: 310 B |
|
Before Width: | Height: | Size: 193 B After Width: | Height: | Size: 193 B |
|
Before Width: | Height: | Size: 278 B After Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 711 B After Width: | Height: | Size: 711 B |
|
Before Width: | Height: | Size: 365 B After Width: | Height: | Size: 365 B |
|
Before Width: | Height: | Size: 783 B After Width: | Height: | Size: 783 B |
|
Before Width: | Height: | Size: 382 B After Width: | Height: | Size: 382 B |
|
Before Width: | Height: | Size: 200 B After Width: | Height: | Size: 200 B |
|
Before Width: | Height: | Size: 698 B After Width: | Height: | Size: 698 B |
|
Before Width: | Height: | Size: 156 B After Width: | Height: | Size: 156 B |
|
Before Width: | Height: | Size: 416 B After Width: | Height: | Size: 416 B |
|
Before Width: | Height: | Size: 517 B After Width: | Height: | Size: 517 B |
|
Before Width: | Height: | Size: 257 B After Width: | Height: | Size: 257 B |
|
Before Width: | Height: | Size: 297 B After Width: | Height: | Size: 297 B |
|
Before Width: | Height: | Size: 312 B After Width: | Height: | Size: 312 B |
|
Before Width: | Height: | Size: 109 B After Width: | Height: | Size: 109 B |
|
Before Width: | Height: | Size: 587 B After Width: | Height: | Size: 587 B |
|
Before Width: | Height: | Size: 269 B After Width: | Height: | Size: 269 B |
|
Before Width: | Height: | Size: 106 B After Width: | Height: | Size: 106 B |
|
Before Width: | Height: | Size: 393 B After Width: | Height: | Size: 393 B |
|
Before Width: | Height: | Size: 94 B After Width: | Height: | Size: 94 B |
|
Before Width: | Height: | Size: 229 B After Width: | Height: | Size: 229 B |
|
Before Width: | Height: | Size: 108 B After Width: | Height: | Size: 108 B |
|
Before Width: | Height: | Size: 407 B After Width: | Height: | Size: 407 B |
|
Before Width: | Height: | Size: 887 B After Width: | Height: | Size: 887 B |
|
Before Width: | Height: | Size: 908 B After Width: | Height: | Size: 908 B |
|
Before Width: | Height: | Size: 417 B After Width: | Height: | Size: 417 B |
|
Before Width: | Height: | Size: 554 B After Width: | Height: | Size: 554 B |
|
Before Width: | Height: | Size: 552 B After Width: | Height: | Size: 552 B |
|
Before Width: | Height: | Size: 114 B After Width: | Height: | Size: 114 B |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 91 B After Width: | Height: | Size: 91 B |
|
Before Width: | Height: | Size: 647 B After Width: | Height: | Size: 647 B |
|
Before Width: | Height: | Size: 95 B After Width: | Height: | Size: 95 B |
|
Before Width: | Height: | Size: 208 B After Width: | Height: | Size: 208 B |
|
Before Width: | Height: | Size: 104 B After Width: | Height: | Size: 104 B |
|
Before Width: | Height: | Size: 508 B After Width: | Height: | Size: 508 B |
|
Before Width: | Height: | Size: 1009 B After Width: | Height: | Size: 1009 B |
|
Before Width: | Height: | Size: 278 B After Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 753 B After Width: | Height: | Size: 753 B |
|
Before Width: | Height: | Size: 353 B After Width: | Height: | Size: 353 B |
|
Before Width: | Height: | Size: 542 B After Width: | Height: | Size: 542 B |
|
Before Width: | Height: | Size: 292 B After Width: | Height: | Size: 292 B |
|
Before Width: | Height: | Size: 621 B After Width: | Height: | Size: 621 B |
|
Before Width: | Height: | Size: 517 B After Width: | Height: | Size: 517 B |
|
Before Width: | Height: | Size: 289 B After Width: | Height: | Size: 289 B |
|
Before Width: | Height: | Size: 498 B After Width: | Height: | Size: 498 B |
|
Before Width: | Height: | Size: 464 B After Width: | Height: | Size: 464 B |
@@ -46,8 +46,11 @@ const isCurrent = (index: number) => index == props.path.length ? 'location' : u
|
|||||||
const navigate = (index: number) => {
|
const navigate = (index: number) => {
|
||||||
const link = links[index]
|
const link = links[index]
|
||||||
if (!link) throw Error(`No link at index ${index} (path: ${props.path})`)
|
if (!link) throw Error(`No link at index ${index} (path: ${props.path})`)
|
||||||
|
const url = `/${longest.value.slice(0, index).join('/')}/`
|
||||||
|
const here = `/${longest.value.join('/')}/`
|
||||||
link.focus()
|
link.focus()
|
||||||
router.replace(`/${longest.value.slice(0, index).join('/')}`)
|
if (here.startsWith(location.hash.slice(1))) router.replace(url)
|
||||||
|
else router.push(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
const move = (dir: number) => {
|
const move = (dir: number) => {
|
||||||
@@ -3,34 +3,11 @@
|
|||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th class="selection">
|
<th class="selection">
|
||||||
<input
|
<input type="checkbox" tabindex="-1" v-model="allSelected" :indeterminate="selectionIndeterminate">
|
||||||
type="checkbox"
|
|
||||||
tabindex="-1"
|
|
||||||
v-model="allSelected"
|
|
||||||
:indeterminate="selectionIndeterminate"
|
|
||||||
/>
|
|
||||||
</th>
|
|
||||||
<th
|
|
||||||
class="sortcolumn"
|
|
||||||
:class="{ sortactive: sort === 'name' }"
|
|
||||||
@click="toggleSort('name')"
|
|
||||||
>
|
|
||||||
Name
|
|
||||||
</th>
|
|
||||||
<th
|
|
||||||
class="sortcolumn modified right"
|
|
||||||
:class="{ sortactive: sort === 'modified' }"
|
|
||||||
@click="toggleSort('modified')"
|
|
||||||
>
|
|
||||||
Modified
|
|
||||||
</th>
|
|
||||||
<th
|
|
||||||
class="sortcolumn size right"
|
|
||||||
:class="{ sortactive: sort === 'size' }"
|
|
||||||
@click="toggleSort('size')"
|
|
||||||
>
|
|
||||||
Size
|
|
||||||
</th>
|
</th>
|
||||||
|
<th class="sortcolumn" :class="{ sortactive: sort === 'name' }" @click="toggleSort('name')">Name</th>
|
||||||
|
<th class="sortcolumn modified right" :class="{ sortactive: sort === 'modified' }" @click="toggleSort('modified')">Modified</th>
|
||||||
|
<th class="sortcolumn size right" :class="{ sortactive: sort === 'size' }" @click="toggleSort('size')">Size</th>
|
||||||
<th class="menu"></th>
|
<th class="menu"></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
@@ -38,27 +15,13 @@
|
|||||||
<tr v-if="editing?.key === 'new'" class="folder">
|
<tr v-if="editing?.key === 'new'" class="folder">
|
||||||
<td class="selection"></td>
|
<td class="selection"></td>
|
||||||
<td class="name">
|
<td class="name">
|
||||||
<FileRenameInput
|
<FileRenameInput :doc="editing" :rename="mkdir" :exit="() => {editing = null}" />
|
||||||
:doc="editing"
|
|
||||||
:rename="mkdir"
|
|
||||||
:exit="
|
|
||||||
() => {
|
|
||||||
editing = null
|
|
||||||
}
|
|
||||||
"
|
|
||||||
/>
|
|
||||||
</td>
|
</td>
|
||||||
<td class="modified right">
|
<FileModified :doc=editing />
|
||||||
<time :datetime="new Date(editing.mtime).toISOString().replace('.000', '')">{{
|
<FileSize :doc=editing />
|
||||||
editing.modified
|
|
||||||
}}</time>
|
|
||||||
</td>
|
|
||||||
<td class="size right">{{ editing.sizedisp }}</td>
|
|
||||||
<td class="menu"></td>
|
<td class="menu"></td>
|
||||||
</tr>
|
</tr>
|
||||||
<template
|
<template v-for="(doc, index) in sortedDocuments" :key="doc.key">
|
||||||
v-for="(doc, index) in sortedDocuments"
|
|
||||||
:key="doc.key">
|
|
||||||
<tr class="folder-change" v-if="showFolderBreadcrumb(index)">
|
<tr class="folder-change" v-if="showFolderBreadcrumb(index)">
|
||||||
<th colspan="5"><BreadCrumb :path="doc.loc ? doc.loc.split('/') : []" /></th>
|
<th colspan="5"><BreadCrumb :path="doc.loc ? doc.loc.split('/') : []" /></th>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -82,50 +45,26 @@
|
|||||||
/>
|
/>
|
||||||
</td>
|
</td>
|
||||||
<td class="name">
|
<td class="name">
|
||||||
<template v-if="editing === doc"
|
<template v-if="editing === doc">
|
||||||
><FileRenameInput
|
<FileRenameInput :doc="doc" :rename="rename" :exit="() => {editing = null}" />
|
||||||
:doc="doc"
|
</template>
|
||||||
:rename="rename"
|
|
||||||
:exit="
|
|
||||||
() => {
|
|
||||||
editing = null
|
|
||||||
}
|
|
||||||
"
|
|
||||||
/></template>
|
|
||||||
<template v-else>
|
<template v-else>
|
||||||
<a
|
<a
|
||||||
:href="url_for(doc)"
|
:href="url_for(doc)"
|
||||||
tabindex="-1"
|
tabindex="-1"
|
||||||
@contextmenu.prevent
|
@contextmenu.prevent
|
||||||
@focus.stop="cursor = doc"
|
@focus.stop="cursor = doc"
|
||||||
@blur="ev => { if (!editing) cursor = null }"
|
|
||||||
@keyup.left="router.back()"
|
@keyup.left="router.back()"
|
||||||
@keyup.right.stop="ev => { if (doc.dir) (ev.target as HTMLElement).click() }"
|
@keyup.right.stop="ev => { if (doc.dir) (ev.target as HTMLElement).click() }"
|
||||||
>{{ doc.name }}</a
|
>{{ doc.name }}</a
|
||||||
>
|
>
|
||||||
<button
|
<button v-if="cursor == doc" class="rename-button" @click="() => (editing = doc)">🖊️</button>
|
||||||
v-if="cursor == doc"
|
|
||||||
class="rename-button"
|
|
||||||
@click="() => (editing = doc)"
|
|
||||||
>
|
|
||||||
🖊️
|
|
||||||
</button>
|
|
||||||
</template>
|
</template>
|
||||||
</td>
|
</td>
|
||||||
<td class="modified right">
|
<FileModified :doc=doc />
|
||||||
<time
|
<FileSize :doc=doc />
|
||||||
:data-tooltip="new Date(1000 * doc.mtime).toISOString().replace('T', '\n').replace('.000Z', ' UTC')"
|
|
||||||
>{{ doc.modified }}</time
|
|
||||||
>
|
|
||||||
</td>
|
|
||||||
<td class="size right">{{ doc.sizedisp }}</td>
|
|
||||||
<td class="menu">
|
<td class="menu">
|
||||||
<button
|
<button tabindex="-1" @click.stop="contextMenu($event, doc)">⋮</button>
|
||||||
tabindex="-1"
|
|
||||||
@click.stop="contextMenu($event, doc)"
|
|
||||||
>
|
|
||||||
⋮
|
|
||||||
</button>
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</template>
|
</template>
|
||||||
@@ -140,7 +79,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, computed, watchEffect } from 'vue'
|
import { ref, computed, watchEffect, onMounted, onUnmounted } from 'vue'
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
import { useDocumentStore } from '@/stores/documents'
|
||||||
import type { Document } from '@/repositories/Document'
|
import type { Document } from '@/repositories/Document'
|
||||||
import FileRenameInput from './FileRenameInput.vue'
|
import FileRenameInput from './FileRenameInput.vue'
|
||||||
@@ -148,13 +87,10 @@ import { connect, controlUrl } from '@/repositories/WS'
|
|||||||
import { collator, formatSize, formatUnixDate } from '@/utils'
|
import { collator, formatSize, formatUnixDate } from '@/utils'
|
||||||
import { useRouter } from 'vue-router'
|
import { useRouter } from 'vue-router'
|
||||||
|
|
||||||
const props = withDefaults(
|
const props = defineProps<{
|
||||||
defineProps<{
|
path: Array<string>
|
||||||
path: Array<string>
|
documents: Document[]
|
||||||
documents: Document[]
|
}>()
|
||||||
}>(),
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
const documentStore = useDocumentStore()
|
const documentStore = useDocumentStore()
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const url_for = (doc: Document) => {
|
const url_for = (doc: Document) => {
|
||||||
@@ -201,7 +137,7 @@ defineExpose({
|
|||||||
loc: loc.value,
|
loc: loc.value,
|
||||||
key: 'new',
|
key: 'new',
|
||||||
name: 'New Folder',
|
name: 'New Folder',
|
||||||
type: 'folder',
|
dir: true,
|
||||||
mtime: now,
|
mtime: now,
|
||||||
size: 0,
|
size: 0,
|
||||||
sizedisp: formatSize(0),
|
sizedisp: formatSize(0),
|
||||||
@@ -293,6 +229,13 @@ watchEffect(() => {
|
|||||||
focusBreadcrumb()
|
focusBreadcrumb()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
// Update human-readable x seconds ago messages from mtimes
|
||||||
|
let modifiedTimer: any = null
|
||||||
|
const updateModified = () => {
|
||||||
|
for (const doc of props.documents) doc.modified = formatUnixDate(doc.mtime)
|
||||||
|
}
|
||||||
|
onMounted(() => { updateModified(); modifiedTimer = setInterval(updateModified, 1000) })
|
||||||
|
onUnmounted(() => { clearInterval(modifiedTimer) })
|
||||||
const mkdir = (doc: Document, name: string) => {
|
const mkdir = (doc: Document, name: string) => {
|
||||||
const control = connect(controlUrl, {
|
const control = connect(controlUrl, {
|
||||||
open() {
|
open() {
|
||||||
@@ -310,7 +253,7 @@ const mkdir = (doc: Document, name: string) => {
|
|||||||
editing.value = null
|
editing.value = null
|
||||||
} else {
|
} else {
|
||||||
console.log('mkdir', msg)
|
console.log('mkdir', msg)
|
||||||
router.push(`/${doc.loc}/${name}/`)
|
router.push(doc.loc ? `/${doc.loc}/${name}/` : `/${name}/`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -401,7 +344,7 @@ table .selection {
|
|||||||
text-overflow: clip;
|
text-overflow: clip;
|
||||||
}
|
}
|
||||||
table .modified {
|
table .modified {
|
||||||
width: 8em;
|
width: 9em;
|
||||||
}
|
}
|
||||||
table .size {
|
table .size {
|
||||||
width: 5em;
|
width: 5em;
|
||||||
22
frontend/src/components/FileModified.vue
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
<template>
|
||||||
|
<td class="modified right">
|
||||||
|
<time :data-tooltip=tooltip :datetime=datetime>{{ doc.modified }}</time>
|
||||||
|
</td>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
import type { Document } from '@/repositories/Document'
|
||||||
|
import { computed } from 'vue'
|
||||||
|
|
||||||
|
const datetime = computed(() =>
|
||||||
|
new Date(1000 * props.doc.mtime).toISOString().replace('.000Z', 'Z')
|
||||||
|
)
|
||||||
|
|
||||||
|
const tooltip = computed(() =>
|
||||||
|
datetime.value.replace('T', '\n').replace('Z', ' UTC')
|
||||||
|
)
|
||||||
|
|
||||||
|
const props = defineProps<{
|
||||||
|
doc: Document
|
||||||
|
}>()
|
||||||
|
</script>
|
||||||
@@ -46,8 +46,8 @@ const apply = () => {
|
|||||||
|
|
||||||
<style>
|
<style>
|
||||||
input#FileRenameInput {
|
input#FileRenameInput {
|
||||||
color: var(--primary-color);
|
color: var(--input-color);
|
||||||
background: var(--primary-background);
|
background: var(--input-background);
|
||||||
border: 0;
|
border: 0;
|
||||||
border-radius: 0.3rem;
|
border-radius: 0.3rem;
|
||||||
padding: 0.4rem;
|
padding: 0.4rem;
|
||||||
43
frontend/src/components/FileSize.vue
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
<template>
|
||||||
|
<td class="size right" :class=sizeClass>{{ doc.sizedisp }}</td>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
import type { Document } from '@/repositories/Document'
|
||||||
|
import { computed } from 'vue'
|
||||||
|
|
||||||
|
const sizeClass = computed(() => {
|
||||||
|
const unit = props.doc.sizedisp.split('\u202F').slice(-1)[0]
|
||||||
|
return +unit ? "bytes" : unit
|
||||||
|
})
|
||||||
|
|
||||||
|
const props = defineProps<{
|
||||||
|
doc: Document
|
||||||
|
}>()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
.size.empty { color: #555 }
|
||||||
|
.size.bytes { color: #77a }
|
||||||
|
.size.kB { color: #474 }
|
||||||
|
.size.MB { color: #a80 }
|
||||||
|
.size.GB { color: #f83 }
|
||||||
|
.size.TB, .size.PB, .size.EB, .size.huge {
|
||||||
|
color: #f44;
|
||||||
|
text-shadow: 0 0 .2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
.size.empty { color: #bbb }
|
||||||
|
.size.bytes { color: #99d }
|
||||||
|
.size.kB { color: #aea }
|
||||||
|
.size.MB { color: #ff4 }
|
||||||
|
.size.GB { color: #f86 }
|
||||||
|
.size.TB, .size.PB, .size.EB, .size.huge { color: #f55 }
|
||||||
|
}
|
||||||
|
|
||||||
|
.cursor .size {
|
||||||
|
color: inherit;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
@@ -5,11 +5,11 @@
|
|||||||
<div class="error-message" @click="documentStore.error = ''">{{ documentStore.error }}</div>
|
<div class="error-message" @click="documentStore.error = ''">{{ documentStore.error }}</div>
|
||||||
<div class="smallgap"></div>
|
<div class="smallgap"></div>
|
||||||
</template>
|
</template>
|
||||||
<UploadButton />
|
<UploadButton :path="props.path" />
|
||||||
<SvgButton
|
<SvgButton
|
||||||
name="create-folder"
|
name="create-folder"
|
||||||
data-tooltip="New folder"
|
data-tooltip="New folder"
|
||||||
@click="() => documentStore.fileExplorer.newFolder()"
|
@click="() => documentStore.fileExplorer!.newFolder()"
|
||||||
/>
|
/>
|
||||||
<slot></slot>
|
<slot></slot>
|
||||||
<div class="spacer smallgap"></div>
|
<div class="spacer smallgap"></div>
|
||||||
@@ -17,7 +17,9 @@
|
|||||||
<input
|
<input
|
||||||
ref="search"
|
ref="search"
|
||||||
type="search"
|
type="search"
|
||||||
v-model="documentStore.search"
|
:value="query"
|
||||||
|
@blur="ev => { if (!query) closeSearch(ev) }"
|
||||||
|
@input="updateSearch"
|
||||||
placeholder="Search words"
|
placeholder="Search words"
|
||||||
class="margin-input"
|
class="margin-input"
|
||||||
@keyup.escape="closeSearch"
|
@keyup.escape="closeSearch"
|
||||||
@@ -31,30 +33,42 @@
|
|||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { useDocumentStore } from '@/stores/documents'
|
import { useDocumentStore } from '@/stores/documents'
|
||||||
import { ref, nextTick } from 'vue'
|
import { ref, nextTick, watchEffect } from 'vue'
|
||||||
import ContextMenu from '@imengyu/vue3-context-menu'
|
import ContextMenu from '@imengyu/vue3-context-menu'
|
||||||
|
import router from '@/router';
|
||||||
|
|
||||||
const documentStore = useDocumentStore()
|
const documentStore = useDocumentStore()
|
||||||
const showSearchInput = ref<boolean>(false)
|
const showSearchInput = ref<boolean>(false)
|
||||||
const search = ref<HTMLInputElement | null>()
|
const search = ref<HTMLInputElement | null>()
|
||||||
const searchButton = ref<HTMLButtonElement | null>()
|
const searchButton = ref<HTMLButtonElement | null>()
|
||||||
|
|
||||||
const closeSearch = () => {
|
const closeSearch = (ev: Event) => {
|
||||||
if (!showSearchInput.value) return // Already closing
|
if (!showSearchInput.value) return // Already closing
|
||||||
showSearchInput.value = false
|
showSearchInput.value = false
|
||||||
documentStore.search = ''
|
|
||||||
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
||||||
breadcrumb.focus()
|
breadcrumb.focus()
|
||||||
|
updateSearch(ev)
|
||||||
}
|
}
|
||||||
const toggleSearchInput = () => {
|
const updateSearch = (ev: Event) => {
|
||||||
|
const q = (ev.target as HTMLInputElement).value
|
||||||
|
let p = props.path.join('/')
|
||||||
|
p = p ? `/${p}` : ''
|
||||||
|
const url = q ? `${p}//${q}` : (p || '/')
|
||||||
|
console.log("Update search", url)
|
||||||
|
if (!props.query && q) router.push(url)
|
||||||
|
else router.replace(url)
|
||||||
|
}
|
||||||
|
const toggleSearchInput = (ev: Event) => {
|
||||||
showSearchInput.value = !showSearchInput.value
|
showSearchInput.value = !showSearchInput.value
|
||||||
if (!showSearchInput.value) return closeSearch()
|
if (!showSearchInput.value) return closeSearch(ev)
|
||||||
nextTick(() => {
|
nextTick(() => {
|
||||||
const input = search.value
|
const input = search.value
|
||||||
if (input) input.focus()
|
if (input) input.focus()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
watchEffect(() => {
|
||||||
|
if (props.query) showSearchInput.value = true
|
||||||
|
})
|
||||||
const settingsMenu = (e: Event) => {
|
const settingsMenu = (e: Event) => {
|
||||||
// show the context menu
|
// show the context menu
|
||||||
const items = []
|
const items = []
|
||||||
@@ -69,6 +83,10 @@ const settingsMenu = (e: Event) => {
|
|||||||
items,
|
items,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
const props = defineProps<{
|
||||||
|
path: Array<string>
|
||||||
|
query: string
|
||||||
|
}>()
|
||||||
|
|
||||||
defineExpose({
|
defineExpose({
|
||||||
toggleSearchInput,
|
toggleSearchInput,
|
||||||
@@ -34,7 +34,7 @@ const op = (op: string, dst?: string) => {
|
|||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
if (dst !== undefined) msg.dst = dst
|
if (dst !== undefined) msg.dst = dst
|
||||||
const control = connect(controlUrl, {
|
const control = connect(controlUrl, {
|
||||||
message(ev: WebSocmetMessageEvent) {
|
message(ev: MessageEvent) {
|
||||||
const res = JSON.parse(ev.data)
|
const res = JSON.parse(ev.data)
|
||||||
if ('error' in res) {
|
if ('error' in res) {
|
||||||
console.error('Control socket error', msg, res.error)
|
console.error('Control socket error', msg, res.error)
|
||||||
304
frontend/src/components/UploadButton.vue
Normal file
@@ -0,0 +1,304 @@
|
|||||||
|
<script setup lang="ts">
|
||||||
|
import { connect, uploadUrl } from '@/repositories/WS';
|
||||||
|
import { useDocumentStore } from '@/stores/documents'
|
||||||
|
import { collator } from '@/utils';
|
||||||
|
import { computed, onMounted, onUnmounted, reactive, ref } from 'vue'
|
||||||
|
|
||||||
|
const fileInput = ref()
|
||||||
|
const folderInput = ref()
|
||||||
|
const documentStore = useDocumentStore()
|
||||||
|
const props = defineProps({
|
||||||
|
path: Array<string>
|
||||||
|
})
|
||||||
|
|
||||||
|
type CloudFile = {
|
||||||
|
file: File
|
||||||
|
cloudName: string
|
||||||
|
cloudPos: number
|
||||||
|
}
|
||||||
|
function pasteHandler(event: ClipboardEvent) {
|
||||||
|
const items = Array.from(event.clipboardData?.items ?? [])
|
||||||
|
const infiles = [] as File[]
|
||||||
|
const dirs = [] as FileSystemDirectoryEntry[]
|
||||||
|
for (const item of items) {
|
||||||
|
if (item.kind !== 'file') continue
|
||||||
|
const entry = item.webkitGetAsEntry()
|
||||||
|
if (entry?.isFile) {
|
||||||
|
const file = item.getAsFile()
|
||||||
|
infiles.push(file)
|
||||||
|
} else if (entry?.isDirectory) {
|
||||||
|
dirs.push(entry as FileSystemDirectoryEntry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (infiles.length || dirs.length) {
|
||||||
|
event.preventDefault()
|
||||||
|
uploadFiles(infiles)
|
||||||
|
for (const entry of dirs) pasteDirectory(entry, `${props.path!.join('/')}/${entry.name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const pasteDirectory = async (entry: FileSystemDirectoryEntry, loc: string) => {
|
||||||
|
const reader = entry.createReader()
|
||||||
|
const entries = await new Promise<any[]>(resolve => reader.readEntries(resolve))
|
||||||
|
const cloudfiles = [] as CloudFile[]
|
||||||
|
for (const entry of entries) {
|
||||||
|
const cloudName = `${loc}/${entry.name}`
|
||||||
|
if (entry.isFile) {
|
||||||
|
const file = await new Promise(resolve => entry.file(resolve)) as File
|
||||||
|
cloudfiles.push({file, cloudName, cloudPos: 0})
|
||||||
|
} else if (entry.isDirectory) {
|
||||||
|
await pasteDirectory(entry, cloudName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (cloudfiles.length) uploadCloudFiles(cloudfiles)
|
||||||
|
}
|
||||||
|
function uploadHandler(event: Event) {
|
||||||
|
event.preventDefault()
|
||||||
|
// @ts-ignore
|
||||||
|
const input = event.target as HTMLInputElement | null
|
||||||
|
const infiles = Array.from((input ?? (event as DragEvent).dataTransfer)?.files ?? []) as File[]
|
||||||
|
if (input) input.value = ''
|
||||||
|
if (infiles.length) uploadFiles(infiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadFiles = (infiles: File[]) => {
|
||||||
|
const loc = props.path!.join('/')
|
||||||
|
let files = []
|
||||||
|
for (const file of infiles) {
|
||||||
|
files.push({
|
||||||
|
file,
|
||||||
|
cloudName: loc + '/' + (file.webkitRelativePath || file.name),
|
||||||
|
cloudPos: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
uploadCloudFiles(files)
|
||||||
|
}
|
||||||
|
const uploadCloudFiles = (files: CloudFile[]) => {
|
||||||
|
const dotfiles = files.filter(f => f.cloudName.includes('/.'))
|
||||||
|
if (dotfiles.length) {
|
||||||
|
documentStore.error = "Won't upload dotfiles"
|
||||||
|
console.log("Dotfiles omitted", dotfiles)
|
||||||
|
files = files.filter(f => !f.cloudName.includes('/.'))
|
||||||
|
}
|
||||||
|
if (!files.length) return
|
||||||
|
files.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
|
||||||
|
// @ts-ignore
|
||||||
|
upqueue = [...upqueue, ...files]
|
||||||
|
statsAdd(files)
|
||||||
|
startWorker()
|
||||||
|
}
|
||||||
|
|
||||||
|
const cancelUploads = () => {
|
||||||
|
upqueue = []
|
||||||
|
statReset()
|
||||||
|
}
|
||||||
|
|
||||||
|
const uprogress_init = {
|
||||||
|
total: 0,
|
||||||
|
uploaded: 0,
|
||||||
|
t0: 0,
|
||||||
|
tlast: 0,
|
||||||
|
statbytes: 0,
|
||||||
|
statdur: 0,
|
||||||
|
files: [] as CloudFile[],
|
||||||
|
filestart: 0,
|
||||||
|
fileidx: 0,
|
||||||
|
filecount: 0,
|
||||||
|
filename: '',
|
||||||
|
filesize: 0,
|
||||||
|
filepos: 0,
|
||||||
|
status: 'idle',
|
||||||
|
}
|
||||||
|
const uprogress = reactive({...uprogress_init})
|
||||||
|
const percent = computed(() => uprogress.uploaded / uprogress.total * 100)
|
||||||
|
const speed = computed(() => {
|
||||||
|
let s = uprogress.statbytes / uprogress.statdur / 1e3
|
||||||
|
const tsince = (Date.now() - uprogress.tlast) / 1e3
|
||||||
|
if (tsince > 5 / s) return 0 // Less than fifth of previous speed => stalled
|
||||||
|
if (tsince > 1 / s) return 1 / tsince // Next block is late or not coming, decay
|
||||||
|
return s // "Current speed"
|
||||||
|
})
|
||||||
|
const speeddisp = computed(() => speed.value ? speed.value.toFixed(speed.value < 10 ? 1 : 0) + '\u202FMB/s': 'stalled')
|
||||||
|
setInterval(() => {
|
||||||
|
if (Date.now() - uprogress.tlast > 3000) {
|
||||||
|
// Reset
|
||||||
|
uprogress.statbytes = 0
|
||||||
|
uprogress.statdur = 1
|
||||||
|
} else {
|
||||||
|
// Running average by decay
|
||||||
|
uprogress.statbytes *= .9
|
||||||
|
uprogress.statdur *= .9
|
||||||
|
}
|
||||||
|
}, 100)
|
||||||
|
const statUpdate = ({name, size, start, end}: {name: string, size: number, start: number, end: number}) => {
|
||||||
|
if (name !== uprogress.filename) return // If stats have been reset
|
||||||
|
const now = Date.now()
|
||||||
|
uprogress.uploaded = uprogress.filestart + end
|
||||||
|
uprogress.filepos = end
|
||||||
|
uprogress.statbytes += end - start
|
||||||
|
uprogress.statdur += now - uprogress.tlast
|
||||||
|
uprogress.tlast = now
|
||||||
|
// File finished?
|
||||||
|
if (end === size) {
|
||||||
|
uprogress.filestart += size
|
||||||
|
statNextFile()
|
||||||
|
if (++uprogress.fileidx >= uprogress.filecount) statReset()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const statNextFile = () => {
|
||||||
|
const f = uprogress.files.shift()
|
||||||
|
if (!f) return statReset()
|
||||||
|
uprogress.filepos = 0
|
||||||
|
uprogress.filesize = f.file.size
|
||||||
|
uprogress.filename = f.cloudName
|
||||||
|
}
|
||||||
|
const statReset = () => {
|
||||||
|
Object.assign(uprogress, uprogress_init)
|
||||||
|
uprogress.t0 = Date.now()
|
||||||
|
uprogress.tlast = uprogress.t0 + 1
|
||||||
|
}
|
||||||
|
const statsAdd = (f: CloudFile[]) => {
|
||||||
|
if (uprogress.files.length === 0) statReset()
|
||||||
|
uprogress.total += f.reduce((a, b) => a + b.file.size, 0)
|
||||||
|
uprogress.filecount += f.length
|
||||||
|
uprogress.files = [...uprogress.files, ...f]
|
||||||
|
statNextFile()
|
||||||
|
}
|
||||||
|
let upqueue = [] as CloudFile[]
|
||||||
|
|
||||||
|
// TODO: Rewrite as WebSocket class
|
||||||
|
const WSCreate = async () => await new Promise<WebSocket>(resolve => {
|
||||||
|
const ws = connect(uploadUrl, {
|
||||||
|
open(ev: Event) { resolve(ws) },
|
||||||
|
error(ev: Event) {
|
||||||
|
console.error('Upload socket error', ev)
|
||||||
|
documentStore.error = 'Upload socket error'
|
||||||
|
},
|
||||||
|
message(ev: MessageEvent) {
|
||||||
|
const res = JSON.parse(ev!.data)
|
||||||
|
if ('error' in res) {
|
||||||
|
console.error('Upload socket error', res.error)
|
||||||
|
documentStore.error = res.error.message
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (res.status === 'ack') {
|
||||||
|
statUpdate(res.req)
|
||||||
|
} else console.log('Unknown upload response', res)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
// @ts-ignore
|
||||||
|
ws.sendMsg = (msg: any) => ws.send(JSON.stringify(msg))
|
||||||
|
// @ts-ignore
|
||||||
|
ws.sendData = async (data: any) => {
|
||||||
|
// Wait until the WS is ready to send another message
|
||||||
|
uprogress.status = "uploading"
|
||||||
|
await new Promise(resolve => {
|
||||||
|
const t = setInterval(() => {
|
||||||
|
if (ws.bufferedAmount > 1<<20) return
|
||||||
|
resolve(undefined)
|
||||||
|
clearInterval(t)
|
||||||
|
}, 1)
|
||||||
|
})
|
||||||
|
uprogress.status = "processing"
|
||||||
|
ws.send(data)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
const worker = async () => {
|
||||||
|
const ws = await WSCreate()
|
||||||
|
while (upqueue.length) {
|
||||||
|
const f = upqueue[0]
|
||||||
|
const start = f.cloudPos
|
||||||
|
const end = Math.min(f.file.size, start + (1<<20))
|
||||||
|
const control = { name: f.cloudName, size: f.file.size, start, end }
|
||||||
|
const data = f.file.slice(start, end)
|
||||||
|
f.cloudPos = end
|
||||||
|
// Note: files may get modified during I/O
|
||||||
|
// @ts-ignore FIXME proper WebSocket class, avoid attaching functions to WebSocket object
|
||||||
|
ws.sendMsg(control)
|
||||||
|
// @ts-ignore
|
||||||
|
await ws.sendData(data)
|
||||||
|
if (f.cloudPos === f.file.size) upqueue.shift()
|
||||||
|
}
|
||||||
|
if (upqueue.length) startWorker()
|
||||||
|
uprogress.status = "idle"
|
||||||
|
workerRunning = false
|
||||||
|
}
|
||||||
|
let workerRunning: any = false
|
||||||
|
const startWorker = () => {
|
||||||
|
if (workerRunning === false) workerRunning = setTimeout(() => {
|
||||||
|
workerRunning = true
|
||||||
|
worker()
|
||||||
|
}, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
onMounted(() => {
|
||||||
|
// Need to prevent both to prevent browser from opening the file
|
||||||
|
addEventListener('dragover', uploadHandler)
|
||||||
|
addEventListener('drop', uploadHandler)
|
||||||
|
addEventListener('paste', pasteHandler)
|
||||||
|
})
|
||||||
|
onUnmounted(() => {
|
||||||
|
removeEventListener('paste', pasteHandler)
|
||||||
|
removeEventListener('dragover', uploadHandler)
|
||||||
|
removeEventListener('drop', uploadHandler)
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
<template>
|
||||||
|
<template>
|
||||||
|
<input ref="fileInput" @change="uploadHandler" type="file" multiple>
|
||||||
|
<input ref="folderInput" @change="uploadHandler" type="file" webkitdirectory>
|
||||||
|
</template>
|
||||||
|
<SvgButton name="add-file" data-tooltip="Upload files" @click="fileInput.click()" />
|
||||||
|
<SvgButton name="add-folder" data-tooltip="Upload folder" @click="folderInput.click()" />
|
||||||
|
<div class="uploadprogress" v-if="uprogress.total" :style="`background: linear-gradient(to right, var(--bar) 0, var(--bar) ${percent}%, var(--nobar) ${percent}%, var(--nobar) 100%);`">
|
||||||
|
<div class="statustext">
|
||||||
|
<span v-if="uprogress.filecount > 1" class="index">
|
||||||
|
[{{ uprogress.fileidx }}/{{ uprogress.filecount }}]
|
||||||
|
</span>
|
||||||
|
<span class="filename">{{ uprogress.filename.split('/').pop() }}
|
||||||
|
<span v-if="uprogress.filesize > 1e7" class="percent">
|
||||||
|
{{ (uprogress.filepos / uprogress.filesize * 100).toFixed(0) + '\u202F%' }}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
<span class="position" v-if="uprogress.total > 1e7">
|
||||||
|
{{ (uprogress.uploaded / 1e6).toFixed(0) + '\u202F/\u202F' + (uprogress.total / 1e6).toFixed(0) + '\u202FMB' }}
|
||||||
|
</span>
|
||||||
|
<span class="speed">{{ speeddisp }}</span>
|
||||||
|
<button class="close" @click="cancelUploads">❌</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
.uploadprogress {
|
||||||
|
--bar: var(--accent-color);
|
||||||
|
--nobar: var(--header-background);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
color: var(--primary-color);
|
||||||
|
position: fixed;
|
||||||
|
left: 0;
|
||||||
|
bottom: 0;
|
||||||
|
width: 100vw;
|
||||||
|
}
|
||||||
|
.statustext {
|
||||||
|
display: flex;
|
||||||
|
padding: 0.5rem 0;
|
||||||
|
}
|
||||||
|
span {
|
||||||
|
color: #ccc;
|
||||||
|
white-space: nowrap;
|
||||||
|
text-align: right;
|
||||||
|
padding: 0 0.5em;
|
||||||
|
}
|
||||||
|
.filename {
|
||||||
|
color: #fff;
|
||||||
|
flex: 1 1;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
.index { min-width: 3.5em }
|
||||||
|
.position { min-width: 4em }
|
||||||
|
.speed { min-width: 4em }
|
||||||
|
</style>
|
||||||
@@ -22,29 +22,16 @@ export type errorEvent = {
|
|||||||
|
|
||||||
// Raw types the backend /api/watch sends us
|
// Raw types the backend /api/watch sends us
|
||||||
|
|
||||||
export type FileEntry = {
|
export type FileEntry = [
|
||||||
key: FUID
|
number, // level
|
||||||
size: number
|
string, // name
|
||||||
mtime: number
|
FUID,
|
||||||
}
|
number, //mtime
|
||||||
|
number, // size
|
||||||
|
number, // isfile
|
||||||
|
]
|
||||||
|
|
||||||
export type DirEntry = {
|
export type UpdateEntry = ['k', number] | ['d', number] | ['i', Array<FileEntry>]
|
||||||
key: FUID
|
|
||||||
size: number
|
|
||||||
mtime: number
|
|
||||||
dir: DirList
|
|
||||||
}
|
|
||||||
|
|
||||||
export type DirList = Record<string, FileEntry | DirEntry>
|
|
||||||
|
|
||||||
export type UpdateEntry = {
|
|
||||||
name: string
|
|
||||||
deleted?: boolean
|
|
||||||
key?: FUID
|
|
||||||
size?: number
|
|
||||||
mtime?: number
|
|
||||||
dir?: DirList
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper structure for selections
|
// Helper structure for selections
|
||||||
export interface SelectedItems {
|
export interface SelectedItems {
|
||||||
@@ -1,14 +1,29 @@
|
|||||||
import { useDocumentStore } from "@/stores/documents"
|
import { useDocumentStore } from "@/stores/documents"
|
||||||
import type { DirEntry, UpdateEntry, errorEvent } from "./Document"
|
import type { FileEntry, UpdateEntry, errorEvent } from "./Document"
|
||||||
|
|
||||||
export const controlUrl = '/api/control'
|
export const controlUrl = '/api/control'
|
||||||
export const uploadUrl = '/api/upload'
|
export const uploadUrl = '/api/upload'
|
||||||
export const watchUrl = '/api/watch'
|
export const watchUrl = '/api/watch'
|
||||||
|
|
||||||
let tree = null as DirEntry | null
|
let tree = [] as FileEntry[]
|
||||||
let reconnectDuration = 500
|
let reconnectDuration = 500
|
||||||
let wsWatch = null as WebSocket | null
|
let wsWatch = null as WebSocket | null
|
||||||
|
|
||||||
|
export const loadSession = () => {
|
||||||
|
const store = useDocumentStore()
|
||||||
|
try {
|
||||||
|
tree = JSON.parse(sessionStorage["cista-files"])
|
||||||
|
store.updateRoot(tree)
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveSession = () => {
|
||||||
|
sessionStorage["cista-files"] = JSON.stringify(tree)
|
||||||
|
}
|
||||||
|
|
||||||
export const connect = (path: string, handlers: Partial<Record<keyof WebSocketEventMap, any>>) => {
|
export const connect = (path: string, handlers: Partial<Record<keyof WebSocketEventMap, any>>) => {
|
||||||
const webSocket = new WebSocket(new URL(path, location.origin.replace(/^http/, 'ws')))
|
const webSocket = new WebSocket(new URL(path, location.origin.replace(/^http/, 'ws')))
|
||||||
for (const [event, handler] of Object.entries(handlers)) webSocket.addEventListener(event, handler)
|
for (const [event, handler] of Object.entries(handlers)) webSocket.addEventListener(event, handler)
|
||||||
@@ -42,6 +57,7 @@ export const watchConnect = () => {
|
|||||||
}
|
}
|
||||||
if ("server" in msg) {
|
if ("server" in msg) {
|
||||||
console.log('Connected to backend', msg)
|
console.log('Connected to backend', msg)
|
||||||
|
store.server = msg.server
|
||||||
store.connected = true
|
store.connected = true
|
||||||
reconnectDuration = 500
|
reconnectDuration = 500
|
||||||
store.error = ''
|
store.error = ''
|
||||||
@@ -93,34 +109,36 @@ const handleWatchMessage = (event: MessageEvent) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleRootMessage({ root }: { root: DirEntry }) {
|
function handleRootMessage({ root }: { root: FileEntry[] }) {
|
||||||
const store = useDocumentStore()
|
const store = useDocumentStore()
|
||||||
console.log('Watch root', root)
|
console.log('Watch root', root)
|
||||||
store.updateRoot(root)
|
store.updateRoot(root)
|
||||||
tree = root
|
tree = root
|
||||||
|
saveSession()
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
|
function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
|
||||||
const store = useDocumentStore()
|
const store = useDocumentStore()
|
||||||
console.log('Watch update', updateData.update)
|
const update = updateData.update
|
||||||
|
console.log('Watch update', update)
|
||||||
if (!tree) return console.error('Watch update before root')
|
if (!tree) return console.error('Watch update before root')
|
||||||
let node: DirEntry = tree
|
let newtree = []
|
||||||
for (const elem of updateData.update) {
|
let oidx = 0
|
||||||
if (elem.deleted) {
|
|
||||||
delete node.dir[elem.name]
|
for (const [action, arg] of update) {
|
||||||
break // Deleted elements can't have further children
|
if (action === 'k') {
|
||||||
|
newtree.push(...tree.slice(oidx, oidx + arg))
|
||||||
|
oidx += arg
|
||||||
}
|
}
|
||||||
if (elem.name) {
|
else if (action === 'd') oidx += arg
|
||||||
// @ts-ignore
|
else if (action === 'i') newtree.push(...arg)
|
||||||
console.log(node, elem.name)
|
else console.log("Unknown update action", action, arg)
|
||||||
node = node.dir[elem.name] ||= {}
|
|
||||||
}
|
|
||||||
if (elem.key !== undefined) node.key = elem.key
|
|
||||||
if (elem.size !== undefined) node.size = elem.size
|
|
||||||
if (elem.mtime !== undefined) node.mtime = elem.mtime
|
|
||||||
if (elem.dir !== undefined) node.dir = elem.dir
|
|
||||||
}
|
}
|
||||||
store.updateRoot(tree)
|
if (oidx != tree.length)
|
||||||
|
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}, new tree ${newtree.length}`)
|
||||||
|
store.updateRoot(newtree)
|
||||||
|
tree = newtree
|
||||||
|
saveSession()
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleError(msg: errorEvent) {
|
function handleError(msg: errorEvent) {
|
||||||
@@ -1,10 +1,4 @@
|
|||||||
import type {
|
import type { Document, FileEntry, FUID, SelectedItems } from '@/repositories/Document'
|
||||||
Document,
|
|
||||||
DirEntry,
|
|
||||||
FileEntry,
|
|
||||||
FUID,
|
|
||||||
SelectedItems
|
|
||||||
} from '@/repositories/Document'
|
|
||||||
import { formatSize, formatUnixDate, haystackFormat } from '@/utils'
|
import { formatSize, formatUnixDate, haystackFormat } from '@/utils'
|
||||||
import { defineStore } from 'pinia'
|
import { defineStore } from 'pinia'
|
||||||
import { collator } from '@/utils'
|
import { collator } from '@/utils'
|
||||||
@@ -26,13 +20,11 @@ export const useDocumentStore = defineStore({
|
|||||||
id: 'documents',
|
id: 'documents',
|
||||||
state: () => ({
|
state: () => ({
|
||||||
document: [] as Document[],
|
document: [] as Document[],
|
||||||
search: "" as string,
|
|
||||||
selected: new Set<FUID>(),
|
selected: new Set<FUID>(),
|
||||||
uploadingDocuments: [],
|
fileExplorer: null as any,
|
||||||
uploadCount: 0 as number,
|
|
||||||
fileExplorer: null,
|
|
||||||
error: '' as string,
|
error: '' as string,
|
||||||
connected: false,
|
connected: false,
|
||||||
|
server: {} as Record<string, any>,
|
||||||
user: {
|
user: {
|
||||||
username: '',
|
username: '',
|
||||||
privileged: false,
|
privileged: false,
|
||||||
@@ -40,71 +32,27 @@ export const useDocumentStore = defineStore({
|
|||||||
isOpenLoginModal: false
|
isOpenLoginModal: false
|
||||||
} as User
|
} as User
|
||||||
}),
|
}),
|
||||||
persist: {
|
|
||||||
storage: sessionStorage,
|
|
||||||
paths: ['document'],
|
|
||||||
},
|
|
||||||
actions: {
|
actions: {
|
||||||
updateRoot(root: DirEntry | null = null) {
|
updateRoot(root: FileEntry[]) {
|
||||||
if (!root) {
|
|
||||||
this.document = []
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Transform tree data to flat documents array
|
|
||||||
let loc = ""
|
|
||||||
const mapper = ([name, attr]: [string, FileEntry | DirEntry]) => ({
|
|
||||||
...attr,
|
|
||||||
loc,
|
|
||||||
name,
|
|
||||||
sizedisp: formatSize(attr.size),
|
|
||||||
modified: formatUnixDate(attr.mtime),
|
|
||||||
haystack: haystackFormat(name),
|
|
||||||
})
|
|
||||||
const queue = [...Object.entries(root.dir ?? {}).map(mapper)]
|
|
||||||
const docs = []
|
const docs = []
|
||||||
for (let doc; (doc = queue.shift()) !== undefined;) {
|
let loc = [] as string[]
|
||||||
docs.push(doc)
|
for (const [level, name, key, mtime, size, isfile] of root) {
|
||||||
if ("dir" in doc) {
|
loc = loc.slice(0, level - 1)
|
||||||
// Recurse but replace recursive structure with boolean
|
docs.push({
|
||||||
loc = doc.loc ? `${doc.loc}/${doc.name}` : doc.name
|
name,
|
||||||
queue.push(...Object.entries(doc.dir).map(mapper))
|
loc: level ? loc.join('/') : '/',
|
||||||
// @ts-ignore
|
key,
|
||||||
doc.dir = true
|
size,
|
||||||
}
|
sizedisp: formatSize(size),
|
||||||
// @ts-ignore
|
mtime,
|
||||||
else doc.dir = false
|
modified: formatUnixDate(mtime),
|
||||||
|
haystack: haystackFormat(name),
|
||||||
|
dir: !isfile,
|
||||||
|
})
|
||||||
|
loc.push(name)
|
||||||
}
|
}
|
||||||
// Pre sort directory entries folders first then files, names in natural ordering
|
|
||||||
docs.sort((a, b) =>
|
|
||||||
// @ts-ignore
|
|
||||||
b.dir - a.dir ||
|
|
||||||
collator.compare(a.name, b.name)
|
|
||||||
)
|
|
||||||
this.document = docs as Document[]
|
this.document = docs as Document[]
|
||||||
},
|
},
|
||||||
updateUploadingDocuments(key: number, progress: number) {
|
|
||||||
for (const d of this.uploadingDocuments) {
|
|
||||||
if (d.key === key) d.progress = progress
|
|
||||||
}
|
|
||||||
},
|
|
||||||
pushUploadingDocuments(name: string) {
|
|
||||||
this.uploadCount++
|
|
||||||
const document = {
|
|
||||||
key: this.uploadCount,
|
|
||||||
name: name,
|
|
||||||
progress: 0
|
|
||||||
}
|
|
||||||
this.uploadingDocuments.push(document)
|
|
||||||
return document
|
|
||||||
},
|
|
||||||
deleteUploadingDocument(key: number) {
|
|
||||||
this.uploadingDocuments = this.uploadingDocuments.filter(e => e.key !== key)
|
|
||||||
},
|
|
||||||
updateModified() {
|
|
||||||
for (const d of this.document) {
|
|
||||||
if ('mtime' in d) d.modified = formatUnixDate(d.mtime)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
login(username: string, privileged: boolean) {
|
login(username: string, privileged: boolean) {
|
||||||
this.user.username = username
|
this.user.username = username
|
||||||
this.user.privileged = privileged
|
this.user.privileged = privileged
|
||||||