Compare commits
98 Commits
v0.2.0
...
d42f0f7601
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d42f0f7601 | ||
|
|
4c51029c9f | ||
|
|
4de2027959 | ||
|
|
d5e1304c0d | ||
|
|
54d6ea6332 | ||
|
|
c695c09ecc | ||
|
|
d36605cd5b | ||
|
|
fc1fb3ea5d | ||
|
|
32fa005c62 | ||
|
|
fabec4dd7e | ||
|
|
ece64f48be | ||
|
|
1f24313d23 | ||
|
|
e3af21af91 | ||
|
|
6938740b0f | ||
|
|
b25d0fc14b | ||
|
|
5386508e28 | ||
|
|
129250e072 | ||
|
|
c2be2ecd31 | ||
|
|
dd1d85f412 | ||
|
|
4c7b310f82 | ||
|
|
1250037cfd | ||
|
|
cdc936d2d5 | ||
|
|
4f370440d9 | ||
|
|
feaa8e315e | ||
|
|
14f7253ece | ||
|
|
9d3d27faf3 | ||
|
|
dd235e8f25 | ||
|
|
139ff51dcd | ||
|
|
589e5a682c | ||
|
|
8114d679ef | ||
|
|
32b8e0702c | ||
|
|
cc74912bb9 | ||
|
|
c3cf4caa9a | ||
|
|
b3eacf04f7 | ||
|
|
047facaacb | ||
|
|
41fbd3d122 | ||
|
|
40a45568c1 | ||
|
|
8c6690ea98 | ||
|
|
997e0b8549 | ||
|
|
115bb5db59 | ||
|
|
5f1eb0503a | ||
|
|
4aae194060 | ||
|
|
12eabd29c3 | ||
|
|
589b21f944 | ||
|
|
d3f584b738 | ||
|
|
225f2b0651 | ||
|
|
b759d8324c | ||
|
|
119aba2b3c | ||
|
|
f52d58d645 | ||
|
|
6cba674b30 | ||
|
|
831b2716f7 | ||
|
|
7e5901a2cf | ||
|
|
a4f95d730b | ||
|
|
56082cba15 | ||
|
|
3479a0da57 | ||
|
|
f99d92b217 | ||
|
|
68a701538b | ||
|
|
05a16e3037 | ||
|
|
52ecbc3d36 | ||
|
|
042f1b7f42 | ||
|
|
d27cb2133a | ||
|
|
a8ea43194d | ||
|
|
07fe7448cc | ||
|
|
783af44e26 | ||
|
|
0d6180e8a4 | ||
|
|
bdc0bbd44f | ||
|
|
ba36eaec1b | ||
|
|
a435a30c88 | ||
|
|
742b05ed66 | ||
|
|
a26dc42d88 | ||
|
|
9002afbc7e | ||
|
|
acdd776b92 | ||
| b3fd9637eb | |||
| 2b72508206 | |||
|
|
8cc3ed1a04 | ||
|
|
0d186726b5 | ||
|
|
63bbe84859 | ||
|
|
202f28ff15 | ||
| 41772e6c18 | |||
| e52379d515 | |||
| 74987898c9 | |||
| 859d312913 | |||
| 4bc9cf4534 | |||
| 754d779069 | |||
| 367e4ba0ea | |||
| c2e9a4af05 | |||
| 6cdc37a172 | |||
| 19699564c2 | |||
| 7baf8b3f9b | |||
| 47329ac04e | |||
| f4013d1196 | |||
| 3672156b5e | |||
| f2b37852da | |||
| 708e54d080 | |||
| d051265f40 | |||
| 5cf133465e | |||
| 1c91bf2e87 | |||
| 9cd6f83bec |
@@ -1,7 +1,6 @@
|
||||
# Web File Storage
|
||||
|
||||
Run directly from repository with Hatch (or use pip install as usual):
|
||||
|
||||
```sh
|
||||
hatch run cista -l :3000 /path/to/files
|
||||
```
|
||||
@@ -9,17 +8,16 @@ hatch run cista -l :3000 /path/to/files
|
||||
Settings incl. these arguments are stored to config file on the first startup and later `hatch run cista` is sufficient. If the `cista` script is missing, consider `pip install -e .` (within `hatch shell`) or some other trickery (known issue with installs made prior to adding the startup script).
|
||||
|
||||
Create your user account:
|
||||
|
||||
```sh
|
||||
hatch run cista --user admin --privileged
|
||||
```
|
||||
|
||||
## Build frontend
|
||||
|
||||
Frontend needs to be built before using and after any frontend changes:
|
||||
Prebuilt frontend is provided in repository but for any changes it will need to be manually rebuilt:
|
||||
|
||||
```sh
|
||||
cd frontend
|
||||
cd cista-front
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang=en>
|
||||
<meta charset=UTF-8>
|
||||
<title>Cista Storage</title>
|
||||
<title>Cista</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<link rel="icon" href="/src/assets/logo.svg">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Roboto+Mono&family=Roboto:wght@400;700&display=swap" rel="stylesheet">
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "cista-frontend",
|
||||
"name": "front",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
BIN
cista-front/public/favicon.ico
Normal file
|
After Width: | Height: | Size: 4.2 KiB |
241
cista-front/public/old-index.html
Executable file
@@ -0,0 +1,241 @@
|
||||
<!DOCTYPE html>
|
||||
<title>Storage</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: sans-serif;
|
||||
max-width: 100ch;
|
||||
margin: 0 auto;
|
||||
padding: 1em;
|
||||
background-color: #333;
|
||||
color: #eee;
|
||||
}
|
||||
td {
|
||||
text-align: right;
|
||||
padding: .5em;
|
||||
}
|
||||
td:first-child {
|
||||
text-align: left;
|
||||
}
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
</style>
|
||||
<div>
|
||||
<h2>Quick file upload</h2>
|
||||
<p>Uses parallel WebSocket connections for increased bandwidth /api/upload</p>
|
||||
<input type=file id=fileInput>
|
||||
<progress id=progressBar value=0 max=1></progress>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h2>Files</h2>
|
||||
<ul id=file_list></ul>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
let files = {}
|
||||
let flatfiles = {}
|
||||
|
||||
function createWatchSocket() {
|
||||
const wsurl = new URL("/api/watch", location.href.replace(/^http/, 'ws'))
|
||||
const ws = new WebSocket(wsurl)
|
||||
ws.onmessage = event => {
|
||||
msg = JSON.parse(event.data)
|
||||
if (msg.update) {
|
||||
tree_update(msg.update)
|
||||
file_list(files)
|
||||
} else {
|
||||
console.log("Unkonwn message from watch socket", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
createWatchSocket()
|
||||
|
||||
function tree_update(msg) {
|
||||
console.log("Tree update", msg)
|
||||
let node = files
|
||||
for (const elem of msg) {
|
||||
if (elem.deleted) {
|
||||
const p = node.dir[elem.name].path
|
||||
delete node.dir[elem.name]
|
||||
delete flatfiles[p]
|
||||
break
|
||||
}
|
||||
if (elem.name !== undefined) node = node.dir[elem.name] ||= {}
|
||||
if (elem.size !== undefined) node.size = elem.size
|
||||
if (elem.mtime !== undefined) node.mtime = elem.mtime
|
||||
if (elem.dir !== undefined) node.dir = elem.dir
|
||||
}
|
||||
// Update paths and flatfiles
|
||||
files.path = "/"
|
||||
const nodes = [files]
|
||||
flatfiles = {}
|
||||
while (node = nodes.pop()) {
|
||||
flatfiles[node.path] = node
|
||||
if (node.dir === undefined) continue
|
||||
for (const name of Object.keys(node.dir)) {
|
||||
const child = node.dir[name]
|
||||
child.path = node.path + name + (child.dir === undefined ? "" : "/")
|
||||
nodes.push(child)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var collator = new Intl.Collator(undefined, {numeric: true, sensitivity: 'base'});
|
||||
|
||||
const compare_path = (a, b) => collator.compare(a.path, b.path)
|
||||
const compare_time = (a, b) => a.mtime > b.mtime
|
||||
|
||||
function file_list(files) {
|
||||
const table = document.getElementById("file_list")
|
||||
const sorted = Object.values(flatfiles).sort(compare_time)
|
||||
table.innerHTML = ""
|
||||
for (const f of sorted) {
|
||||
const {path, size, mtime} = f
|
||||
const tr = document.createElement("tr")
|
||||
const name_td = document.createElement("td")
|
||||
const size_td = document.createElement("td")
|
||||
const mtime_td = document.createElement("td")
|
||||
const a = document.createElement("a")
|
||||
table.appendChild(tr)
|
||||
tr.appendChild(name_td)
|
||||
tr.appendChild(size_td)
|
||||
tr.appendChild(mtime_td)
|
||||
name_td.appendChild(a)
|
||||
size_td.textContent = size
|
||||
mtime_td.textContent = formatUnixDate(mtime)
|
||||
a.textContent = path
|
||||
a.href = `/files${path}`
|
||||
/*a.onclick = event => {
|
||||
if (window.showSaveFilePicker) {
|
||||
event.preventDefault()
|
||||
download_ws(name, size)
|
||||
}
|
||||
}
|
||||
a.download = ""*/
|
||||
}
|
||||
}
|
||||
|
||||
function formatUnixDate(t) {
|
||||
const date = new Date(t * 1000)
|
||||
const now = new Date()
|
||||
const diff = date - now
|
||||
const formatter = new Intl.RelativeTimeFormat('en', { numeric: 'auto' })
|
||||
|
||||
if (Math.abs(diff) <= 60000) {
|
||||
return formatter.format(Math.round(diff / 1000), 'second')
|
||||
}
|
||||
|
||||
if (Math.abs(diff) <= 3600000) {
|
||||
return formatter.format(Math.round(diff / 60000), 'minute')
|
||||
}
|
||||
|
||||
if (Math.abs(diff) <= 86400000) {
|
||||
return formatter.format(Math.round(diff / 3600000), 'hour')
|
||||
}
|
||||
|
||||
if (Math.abs(diff) <= 604800000) {
|
||||
return formatter.format(Math.round(diff / 86400000), 'day')
|
||||
}
|
||||
|
||||
return date.toLocaleDateString()
|
||||
}
|
||||
|
||||
async function download_ws(name, size) {
|
||||
const fh = await window.showSaveFilePicker({
|
||||
suggestedName: name,
|
||||
})
|
||||
const writer = await fh.createWritable()
|
||||
writer.truncate(size)
|
||||
const wsurl = new URL("/api/download", location.href.replace(/^http/, 'ws'))
|
||||
const ws = new WebSocket(wsurl)
|
||||
let pos = 0
|
||||
ws.onopen = () => {
|
||||
console.log("Downloading over WebSocket", name, size)
|
||||
ws.send(JSON.stringify({name, start: 0, end: size, size}))
|
||||
}
|
||||
ws.onmessage = event => {
|
||||
if (typeof event.data === 'string') {
|
||||
const msg = JSON.parse(event.data)
|
||||
console.log("Download finished", msg)
|
||||
ws.close()
|
||||
return
|
||||
}
|
||||
console.log("Received chunk", name, pos, pos + event.data.size)
|
||||
pos += event.data.size
|
||||
writer.write(event.data)
|
||||
}
|
||||
ws.onclose = () => {
|
||||
if (pos < size) {
|
||||
console.log("Download aborted", name, pos)
|
||||
writer.truncate(pos)
|
||||
}
|
||||
writer.close()
|
||||
}
|
||||
}
|
||||
|
||||
const fileInput = document.getElementById("fileInput")
|
||||
const progress = document.getElementById("progressBar")
|
||||
const numConnections = 2
|
||||
const chunkSize = 1<<20
|
||||
const wsConnections = new Set()
|
||||
|
||||
//for (let i = 0; i < numConnections; i++) createUploadWS()
|
||||
|
||||
function createUploadWS() {
|
||||
const wsurl = new URL("/api/upload", location.href.replace(/^http/, 'ws'))
|
||||
const ws = new WebSocket(wsurl)
|
||||
ws.binaryType = 'arraybuffer'
|
||||
ws.onopen = () => {
|
||||
wsConnections.add(ws)
|
||||
console.log("Upload socket connected")
|
||||
}
|
||||
ws.onmessage = event => {
|
||||
msg = JSON.parse(event.data)
|
||||
if (msg.written) progress.value += +msg.written
|
||||
else console.log(`Error: ${msg.error}`)
|
||||
}
|
||||
ws.onclose = () => {
|
||||
wsConnections.delete(ws)
|
||||
console.log("Upload socket disconnected, reconnecting...")
|
||||
setTimeout(createUploadWS, 1000)
|
||||
}
|
||||
}
|
||||
|
||||
async function load(file, start, end) {
|
||||
const reader = new FileReader()
|
||||
const load = new Promise(resolve => reader.onload = resolve)
|
||||
reader.readAsArrayBuffer(file.slice(start, end))
|
||||
const event = await load
|
||||
return event.target.result
|
||||
}
|
||||
|
||||
async function sendChunk(file, start, end, ws) {
|
||||
const chunk = await load(file, start, end)
|
||||
ws.send(JSON.stringify({
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
start: start,
|
||||
end: end
|
||||
}))
|
||||
ws.send(chunk)
|
||||
}
|
||||
|
||||
fileInput.addEventListener("change", async function() {
|
||||
const file = this.files[0]
|
||||
const numChunks = Math.ceil(file.size / chunkSize)
|
||||
progress.value = 0
|
||||
progress.max = file.size
|
||||
|
||||
console.log(wsConnections)
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
const ws = Array.from(wsConnections)[i % wsConnections.size]
|
||||
const start = i * chunkSize
|
||||
const end = Math.min(file.size, start + chunkSize)
|
||||
const res = await sendChunk(file, start, end, ws)
|
||||
}
|
||||
})
|
||||
|
||||
</script>
|
||||
@@ -1,13 +1,13 @@
|
||||
<template>
|
||||
<LoginModal />
|
||||
<header>
|
||||
<HeaderMain ref="headerMain" :path="path.pathList" :query="path.query">
|
||||
<HeaderMain ref="headerMain">
|
||||
<HeaderSelected :path="path.pathList" />
|
||||
</HeaderMain>
|
||||
<BreadCrumb :path="path.pathList" tabindex="-1"/>
|
||||
</header>
|
||||
<main>
|
||||
<RouterView :path="path.pathList" :query="path.query" />
|
||||
<RouterView :path="path.pathList" />
|
||||
</main>
|
||||
</template>
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
import { RouterView } from 'vue-router'
|
||||
import type { ComputedRef } from 'vue'
|
||||
import type HeaderMain from '@/components/HeaderMain.vue'
|
||||
import { onMounted, onUnmounted, ref, watchEffect } from 'vue'
|
||||
import { loadSession, watchConnect, watchDisconnect } from '@/repositories/WS'
|
||||
import { onMounted, onUnmounted, ref } from 'vue'
|
||||
import { watchConnect, watchDisconnect } from '@/repositories/WS'
|
||||
import { useDocumentStore } from '@/stores/documents'
|
||||
|
||||
import { computed } from 'vue'
|
||||
@@ -25,23 +25,16 @@ import Router from '@/router/index'
|
||||
interface Path {
|
||||
path: string
|
||||
pathList: string[]
|
||||
query: string
|
||||
}
|
||||
const documentStore = useDocumentStore()
|
||||
const path: ComputedRef<Path> = computed(() => {
|
||||
const p = decodeURIComponent(Router.currentRoute.value.path).split('//')
|
||||
const pathList = p[0].split('/').filter(value => value !== '')
|
||||
const query = p.slice(1).join('//')
|
||||
const p = decodeURIComponent(Router.currentRoute.value.path)
|
||||
const pathList = p.split('/').filter(value => value !== '')
|
||||
return {
|
||||
path: p[0],
|
||||
pathList,
|
||||
query
|
||||
path: p,
|
||||
pathList
|
||||
}
|
||||
})
|
||||
watchEffect(() => {
|
||||
document.title = path.value.path.replace(/\/$/, '').split('/').pop() || documentStore.server.name || 'Cista Storage'
|
||||
})
|
||||
onMounted(loadSession)
|
||||
onMounted(watchConnect)
|
||||
onUnmounted(watchDisconnect)
|
||||
// Update human-readable x seconds ago messages from mtimes
|
||||
|
Before Width: | Height: | Size: 158 B After Width: | Height: | Size: 158 B |
|
Before Width: | Height: | Size: 168 B After Width: | Height: | Size: 168 B |
|
Before Width: | Height: | Size: 388 B After Width: | Height: | Size: 388 B |
|
Before Width: | Height: | Size: 128 B After Width: | Height: | Size: 128 B |
|
Before Width: | Height: | Size: 126 B After Width: | Height: | Size: 126 B |
|
Before Width: | Height: | Size: 158 B After Width: | Height: | Size: 158 B |
|
Before Width: | Height: | Size: 208 B After Width: | Height: | Size: 208 B |
|
Before Width: | Height: | Size: 563 B After Width: | Height: | Size: 563 B |
|
Before Width: | Height: | Size: 212 B After Width: | Height: | Size: 212 B |
|
Before Width: | Height: | Size: 293 B After Width: | Height: | Size: 293 B |
|
Before Width: | Height: | Size: 310 B After Width: | Height: | Size: 310 B |
|
Before Width: | Height: | Size: 193 B After Width: | Height: | Size: 193 B |
|
Before Width: | Height: | Size: 278 B After Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 711 B After Width: | Height: | Size: 711 B |
|
Before Width: | Height: | Size: 365 B After Width: | Height: | Size: 365 B |
|
Before Width: | Height: | Size: 783 B After Width: | Height: | Size: 783 B |
|
Before Width: | Height: | Size: 382 B After Width: | Height: | Size: 382 B |
|
Before Width: | Height: | Size: 200 B After Width: | Height: | Size: 200 B |
|
Before Width: | Height: | Size: 698 B After Width: | Height: | Size: 698 B |
|
Before Width: | Height: | Size: 156 B After Width: | Height: | Size: 156 B |
|
Before Width: | Height: | Size: 416 B After Width: | Height: | Size: 416 B |
|
Before Width: | Height: | Size: 517 B After Width: | Height: | Size: 517 B |
|
Before Width: | Height: | Size: 257 B After Width: | Height: | Size: 257 B |
|
Before Width: | Height: | Size: 297 B After Width: | Height: | Size: 297 B |
|
Before Width: | Height: | Size: 312 B After Width: | Height: | Size: 312 B |
|
Before Width: | Height: | Size: 109 B After Width: | Height: | Size: 109 B |
|
Before Width: | Height: | Size: 587 B After Width: | Height: | Size: 587 B |
|
Before Width: | Height: | Size: 269 B After Width: | Height: | Size: 269 B |
|
Before Width: | Height: | Size: 106 B After Width: | Height: | Size: 106 B |
|
Before Width: | Height: | Size: 393 B After Width: | Height: | Size: 393 B |
|
Before Width: | Height: | Size: 94 B After Width: | Height: | Size: 94 B |
|
Before Width: | Height: | Size: 229 B After Width: | Height: | Size: 229 B |
|
Before Width: | Height: | Size: 108 B After Width: | Height: | Size: 108 B |
|
Before Width: | Height: | Size: 407 B After Width: | Height: | Size: 407 B |
|
Before Width: | Height: | Size: 887 B After Width: | Height: | Size: 887 B |
|
Before Width: | Height: | Size: 908 B After Width: | Height: | Size: 908 B |
|
Before Width: | Height: | Size: 417 B After Width: | Height: | Size: 417 B |
|
Before Width: | Height: | Size: 554 B After Width: | Height: | Size: 554 B |
|
Before Width: | Height: | Size: 552 B After Width: | Height: | Size: 552 B |
|
Before Width: | Height: | Size: 114 B After Width: | Height: | Size: 114 B |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 91 B After Width: | Height: | Size: 91 B |
|
Before Width: | Height: | Size: 647 B After Width: | Height: | Size: 647 B |
|
Before Width: | Height: | Size: 95 B After Width: | Height: | Size: 95 B |
|
Before Width: | Height: | Size: 208 B After Width: | Height: | Size: 208 B |
|
Before Width: | Height: | Size: 104 B After Width: | Height: | Size: 104 B |
|
Before Width: | Height: | Size: 508 B After Width: | Height: | Size: 508 B |
|
Before Width: | Height: | Size: 1009 B After Width: | Height: | Size: 1009 B |
|
Before Width: | Height: | Size: 278 B After Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 753 B After Width: | Height: | Size: 753 B |
|
Before Width: | Height: | Size: 353 B After Width: | Height: | Size: 353 B |
|
Before Width: | Height: | Size: 542 B After Width: | Height: | Size: 542 B |
|
Before Width: | Height: | Size: 292 B After Width: | Height: | Size: 292 B |
|
Before Width: | Height: | Size: 621 B After Width: | Height: | Size: 621 B |
|
Before Width: | Height: | Size: 517 B After Width: | Height: | Size: 517 B |
|
Before Width: | Height: | Size: 289 B After Width: | Height: | Size: 289 B |
|
Before Width: | Height: | Size: 498 B After Width: | Height: | Size: 498 B |
|
Before Width: | Height: | Size: 464 B After Width: | Height: | Size: 464 B |
@@ -46,11 +46,8 @@ const isCurrent = (index: number) => index == props.path.length ? 'location' : u
|
||||
const navigate = (index: number) => {
|
||||
const link = links[index]
|
||||
if (!link) throw Error(`No link at index ${index} (path: ${props.path})`)
|
||||
const url = `/${longest.value.slice(0, index).join('/')}/`
|
||||
const here = `/${longest.value.join('/')}/`
|
||||
link.focus()
|
||||
if (here.startsWith(location.hash.slice(1))) router.replace(url)
|
||||
else router.push(url)
|
||||
router.replace(`/${longest.value.slice(0, index).join('/')}`)
|
||||
}
|
||||
|
||||
const move = (dir: number) => {
|
||||
@@ -3,11 +3,34 @@
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="selection">
|
||||
<input type="checkbox" tabindex="-1" v-model="allSelected" :indeterminate="selectionIndeterminate">
|
||||
<input
|
||||
type="checkbox"
|
||||
tabindex="-1"
|
||||
v-model="allSelected"
|
||||
:indeterminate="selectionIndeterminate"
|
||||
/>
|
||||
</th>
|
||||
<th
|
||||
class="sortcolumn"
|
||||
:class="{ sortactive: sort === 'name' }"
|
||||
@click="toggleSort('name')"
|
||||
>
|
||||
Name
|
||||
</th>
|
||||
<th
|
||||
class="sortcolumn modified right"
|
||||
:class="{ sortactive: sort === 'modified' }"
|
||||
@click="toggleSort('modified')"
|
||||
>
|
||||
Modified
|
||||
</th>
|
||||
<th
|
||||
class="sortcolumn size right"
|
||||
:class="{ sortactive: sort === 'size' }"
|
||||
@click="toggleSort('size')"
|
||||
>
|
||||
Size
|
||||
</th>
|
||||
<th class="sortcolumn" :class="{ sortactive: sort === 'name' }" @click="toggleSort('name')">Name</th>
|
||||
<th class="sortcolumn modified right" :class="{ sortactive: sort === 'modified' }" @click="toggleSort('modified')">Modified</th>
|
||||
<th class="sortcolumn size right" :class="{ sortactive: sort === 'size' }" @click="toggleSort('size')">Size</th>
|
||||
<th class="menu"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -15,13 +38,27 @@
|
||||
<tr v-if="editing?.key === 'new'" class="folder">
|
||||
<td class="selection"></td>
|
||||
<td class="name">
|
||||
<FileRenameInput :doc="editing" :rename="mkdir" :exit="() => {editing = null}" />
|
||||
<FileRenameInput
|
||||
:doc="editing"
|
||||
:rename="mkdir"
|
||||
:exit="
|
||||
() => {
|
||||
editing = null
|
||||
}
|
||||
"
|
||||
/>
|
||||
</td>
|
||||
<FileModified :doc=editing />
|
||||
<FileSize :doc=editing />
|
||||
<td class="modified right">
|
||||
<time :datetime="new Date(editing.mtime).toISOString().replace('.000', '')">{{
|
||||
editing.modified
|
||||
}}</time>
|
||||
</td>
|
||||
<td class="size right">{{ editing.sizedisp }}</td>
|
||||
<td class="menu"></td>
|
||||
</tr>
|
||||
<template v-for="(doc, index) in sortedDocuments" :key="doc.key">
|
||||
<template
|
||||
v-for="(doc, index) in sortedDocuments"
|
||||
:key="doc.key">
|
||||
<tr class="folder-change" v-if="showFolderBreadcrumb(index)">
|
||||
<th colspan="5"><BreadCrumb :path="doc.loc ? doc.loc.split('/') : []" /></th>
|
||||
</tr>
|
||||
@@ -45,26 +82,50 @@
|
||||
/>
|
||||
</td>
|
||||
<td class="name">
|
||||
<template v-if="editing === doc">
|
||||
<FileRenameInput :doc="doc" :rename="rename" :exit="() => {editing = null}" />
|
||||
</template>
|
||||
<template v-if="editing === doc"
|
||||
><FileRenameInput
|
||||
:doc="doc"
|
||||
:rename="rename"
|
||||
:exit="
|
||||
() => {
|
||||
editing = null
|
||||
}
|
||||
"
|
||||
/></template>
|
||||
<template v-else>
|
||||
<a
|
||||
:href="url_for(doc)"
|
||||
tabindex="-1"
|
||||
@contextmenu.prevent
|
||||
@focus.stop="cursor = doc"
|
||||
@blur="ev => { if (!editing) cursor = null }"
|
||||
@keyup.left="router.back()"
|
||||
@keyup.right.stop="ev => { if (doc.dir) (ev.target as HTMLElement).click() }"
|
||||
>{{ doc.name }}</a
|
||||
>
|
||||
<button v-if="cursor == doc" class="rename-button" @click="() => (editing = doc)">🖊️</button>
|
||||
<button
|
||||
v-if="cursor == doc"
|
||||
class="rename-button"
|
||||
@click="() => (editing = doc)"
|
||||
>
|
||||
🖊️
|
||||
</button>
|
||||
</template>
|
||||
</td>
|
||||
<FileModified :doc=doc />
|
||||
<FileSize :doc=doc />
|
||||
<td class="modified right">
|
||||
<time
|
||||
:data-tooltip="new Date(1000 * doc.mtime).toISOString().replace('T', '\n').replace('.000Z', ' UTC')"
|
||||
>{{ doc.modified }}</time
|
||||
>
|
||||
</td>
|
||||
<td class="size right">{{ doc.sizedisp }}</td>
|
||||
<td class="menu">
|
||||
<button tabindex="-1" @click.stop="contextMenu($event, doc)">⋮</button>
|
||||
<button
|
||||
tabindex="-1"
|
||||
@click.stop="contextMenu($event, doc)"
|
||||
>
|
||||
⋮
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
</template>
|
||||
@@ -87,10 +148,13 @@ import { connect, controlUrl } from '@/repositories/WS'
|
||||
import { collator, formatSize, formatUnixDate } from '@/utils'
|
||||
import { useRouter } from 'vue-router'
|
||||
|
||||
const props = defineProps<{
|
||||
path: Array<string>
|
||||
documents: Document[]
|
||||
}>()
|
||||
const props = withDefaults(
|
||||
defineProps<{
|
||||
path: Array<string>
|
||||
documents: Document[]
|
||||
}>(),
|
||||
{}
|
||||
)
|
||||
const documentStore = useDocumentStore()
|
||||
const router = useRouter()
|
||||
const url_for = (doc: Document) => {
|
||||
@@ -137,7 +201,7 @@ defineExpose({
|
||||
loc: loc.value,
|
||||
key: 'new',
|
||||
name: 'New Folder',
|
||||
dir: true,
|
||||
type: 'folder',
|
||||
mtime: now,
|
||||
size: 0,
|
||||
sizedisp: formatSize(0),
|
||||
@@ -246,7 +310,7 @@ const mkdir = (doc: Document, name: string) => {
|
||||
editing.value = null
|
||||
} else {
|
||||
console.log('mkdir', msg)
|
||||
router.push(doc.loc ? `/${doc.loc}/${name}/` : `/${name}/`)
|
||||
router.push(`/${doc.loc}/${name}/`)
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -337,7 +401,7 @@ table .selection {
|
||||
text-overflow: clip;
|
||||
}
|
||||
table .modified {
|
||||
width: 9em;
|
||||
width: 8em;
|
||||
}
|
||||
table .size {
|
||||
width: 5em;
|
||||
@@ -46,8 +46,8 @@ const apply = () => {
|
||||
|
||||
<style>
|
||||
input#FileRenameInput {
|
||||
color: var(--input-color);
|
||||
background: var(--input-background);
|
||||
color: var(--primary-color);
|
||||
background: var(--primary-background);
|
||||
border: 0;
|
||||
border-radius: 0.3rem;
|
||||
padding: 0.4rem;
|
||||
52
cista-front/src/components/FileViewer.vue
Normal file
@@ -0,0 +1,52 @@
|
||||
<template>
|
||||
<object
|
||||
v-if="props.type === 'pdf'"
|
||||
:data="dataURL"
|
||||
type="application/pdf"
|
||||
width="100%"
|
||||
height="100%"
|
||||
></object>
|
||||
<a-image
|
||||
v-else-if="props.type === 'image'"
|
||||
width="50%"
|
||||
:src="dataURL"
|
||||
@click="() => setVisible(true)"
|
||||
:previewMask="false"
|
||||
:preview="{
|
||||
visibleImg,
|
||||
onVisibleChange: setVisible
|
||||
}"
|
||||
/>
|
||||
<!-- Unknown case -->
|
||||
<h1 v-else>Unsupported file type</h1>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { watchEffect, ref } from 'vue'
|
||||
import Router from '@/router/index'
|
||||
import { url_document_get } from '@/repositories/Document'
|
||||
|
||||
const dataURL = ref('')
|
||||
watchEffect(() => {
|
||||
dataURL.value = new URL(
|
||||
url_document_get + Router.currentRoute.value.path,
|
||||
location.origin
|
||||
).toString()
|
||||
})
|
||||
const emit = defineEmits({
|
||||
visibleImg(value: boolean) {
|
||||
return value
|
||||
}
|
||||
})
|
||||
|
||||
function setVisible(value: boolean) {
|
||||
emit('visibleImg', value)
|
||||
}
|
||||
|
||||
const props = defineProps<{
|
||||
type?: string
|
||||
visibleImg: boolean
|
||||
}>()
|
||||
</script>
|
||||
|
||||
<style></style>
|
||||
@@ -5,11 +5,11 @@
|
||||
<div class="error-message" @click="documentStore.error = ''">{{ documentStore.error }}</div>
|
||||
<div class="smallgap"></div>
|
||||
</template>
|
||||
<UploadButton :path="props.path" />
|
||||
<UploadButton />
|
||||
<SvgButton
|
||||
name="create-folder"
|
||||
data-tooltip="New folder"
|
||||
@click="() => documentStore.fileExplorer!.newFolder()"
|
||||
@click="() => documentStore.fileExplorer.newFolder()"
|
||||
/>
|
||||
<slot></slot>
|
||||
<div class="spacer smallgap"></div>
|
||||
@@ -17,9 +17,7 @@
|
||||
<input
|
||||
ref="search"
|
||||
type="search"
|
||||
:value="query"
|
||||
@blur="ev => { if (!query) closeSearch(ev) }"
|
||||
@input="updateSearch"
|
||||
v-model="documentStore.search"
|
||||
placeholder="Search words"
|
||||
class="margin-input"
|
||||
@keyup.escape="closeSearch"
|
||||
@@ -33,42 +31,30 @@
|
||||
|
||||
<script setup lang="ts">
|
||||
import { useDocumentStore } from '@/stores/documents'
|
||||
import { ref, nextTick, watchEffect } from 'vue'
|
||||
import { ref, nextTick } from 'vue'
|
||||
import ContextMenu from '@imengyu/vue3-context-menu'
|
||||
import router from '@/router';
|
||||
|
||||
const documentStore = useDocumentStore()
|
||||
const showSearchInput = ref<boolean>(false)
|
||||
const search = ref<HTMLInputElement | null>()
|
||||
const searchButton = ref<HTMLButtonElement | null>()
|
||||
|
||||
const closeSearch = (ev: Event) => {
|
||||
const closeSearch = () => {
|
||||
if (!showSearchInput.value) return // Already closing
|
||||
showSearchInput.value = false
|
||||
documentStore.search = ''
|
||||
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
||||
breadcrumb.focus()
|
||||
updateSearch(ev)
|
||||
}
|
||||
const updateSearch = (ev: Event) => {
|
||||
const q = (ev.target as HTMLInputElement).value
|
||||
let p = props.path.join('/')
|
||||
p = p ? `/${p}` : ''
|
||||
const url = q ? `${p}//${q}` : (p || '/')
|
||||
console.log("Update search", url)
|
||||
if (!props.query && q) router.push(url)
|
||||
else router.replace(url)
|
||||
}
|
||||
const toggleSearchInput = (ev: Event) => {
|
||||
const toggleSearchInput = () => {
|
||||
showSearchInput.value = !showSearchInput.value
|
||||
if (!showSearchInput.value) return closeSearch(ev)
|
||||
if (!showSearchInput.value) return closeSearch()
|
||||
nextTick(() => {
|
||||
const input = search.value
|
||||
if (input) input.focus()
|
||||
})
|
||||
}
|
||||
watchEffect(() => {
|
||||
if (props.query) showSearchInput.value = true
|
||||
})
|
||||
|
||||
const settingsMenu = (e: Event) => {
|
||||
// show the context menu
|
||||
const items = []
|
||||
@@ -83,10 +69,6 @@ const settingsMenu = (e: Event) => {
|
||||
items,
|
||||
})
|
||||
}
|
||||
const props = defineProps<{
|
||||
path: Array<string>
|
||||
query: string
|
||||
}>()
|
||||
|
||||
defineExpose({
|
||||
toggleSearchInput,
|
||||
@@ -34,7 +34,7 @@ const op = (op: string, dst?: string) => {
|
||||
// @ts-ignore
|
||||
if (dst !== undefined) msg.dst = dst
|
||||
const control = connect(controlUrl, {
|
||||
message(ev: MessageEvent) {
|
||||
message(ev: WebSocmetMessageEvent) {
|
||||
const res = JSON.parse(ev.data)
|
||||
if ('error' in res) {
|
||||
console.error('Control socket error', msg, res.error)
|
||||
@@ -138,8 +138,7 @@ const download = async () => {
|
||||
}
|
||||
}
|
||||
// Otherwise, zip and download
|
||||
const name = sel.keys.length === 1 ? sel.docs[sel.keys[0]].name : 'download'
|
||||
linkdl(`/zip/${Array.from(sel.keys).join('+')}/${name}.zip`)
|
||||
linkdl(`/zip/${Array.from(sel.keys).join('+')}/download.zip`)
|
||||
documentStore.selected.clear()
|
||||
}
|
||||
</script>
|
||||
27
cista-front/src/components/NotificationLoading.vue
Normal file
@@ -0,0 +1,27 @@
|
||||
<template>
|
||||
<template v-for="upload in documentStore.uploadingDocuments" :key="upload.key">
|
||||
<span>{{ upload.name }}</span>
|
||||
<div class="progress-container">
|
||||
<a-progress :percent="upload.progress" />
|
||||
<CloseCircleOutlined class="close-button" @click="dismissUpload(upload.key)" />
|
||||
</div>
|
||||
</template>
|
||||
</template>
|
||||
<script setup lang="ts">
|
||||
import { useDocumentStore } from '@/stores/documents'
|
||||
const documentStore = useDocumentStore()
|
||||
|
||||
function dismissUpload(key: number) {
|
||||
documentStore.deleteUploadingDocument(key)
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.progress-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.close-button:hover {
|
||||
color: #b81414;
|
||||
}
|
||||
</style>
|
||||
96
cista-front/src/components/UploadButton.vue
Normal file
@@ -0,0 +1,96 @@
|
||||
<script setup lang="ts">
|
||||
import { useDocumentStore } from '@/stores/documents'
|
||||
import { h, ref } from 'vue'
|
||||
|
||||
const fileUploadButton = ref()
|
||||
const folderUploadButton = ref()
|
||||
const documentStore = useDocumentStore()
|
||||
const open = (placement: any) => openNotification(placement)
|
||||
|
||||
const isNotificationOpen = ref(false)
|
||||
const openNotification = (placement: any) => {
|
||||
if (!isNotificationOpen.value) {
|
||||
/*
|
||||
api.open({
|
||||
message: `Uploading documents`,
|
||||
description: h(NotificationLoading),
|
||||
placement,
|
||||
duration: 0,
|
||||
onClose: () => { isNotificationOpen.value = false }
|
||||
});*/
|
||||
isNotificationOpen.value = true
|
||||
}
|
||||
}
|
||||
|
||||
function uploadFileHandler() {
|
||||
fileUploadButton.value.click()
|
||||
}
|
||||
|
||||
async function load(file: File, start: number, end: number): Promise<ArrayBuffer> {
|
||||
const reader = new FileReader()
|
||||
const load = new Promise<Event>(resolve => (reader.onload = resolve))
|
||||
reader.readAsArrayBuffer(file.slice(start, end))
|
||||
const event = await load
|
||||
if (event.target && event.target instanceof FileReader) {
|
||||
return event.target.result as ArrayBuffer
|
||||
} else {
|
||||
throw new Error('Error loading file')
|
||||
}
|
||||
}
|
||||
|
||||
async function sendChunk(file: File, start: number, end: number) {
|
||||
const ws = documentStore.wsUpload
|
||||
if (ws) {
|
||||
const chunk = await load(file, start, end)
|
||||
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
start: start,
|
||||
end: end
|
||||
})
|
||||
)
|
||||
ws.send(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadFileChangeHandler(event: Event) {
|
||||
const target = event.target as HTMLInputElement
|
||||
const chunkSize = 1 << 20
|
||||
if (target && target.files && target.files.length > 0) {
|
||||
const file = target.files[0]
|
||||
const numChunks = Math.ceil(file.size / chunkSize)
|
||||
const document = documentStore.pushUploadingDocuments(file.name)
|
||||
open('bottomRight')
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
const start = i * chunkSize
|
||||
const end = Math.min(file.size, start + chunkSize)
|
||||
const res = await sendChunk(file, start, end)
|
||||
console.log('progress: ' + (100 * (i + 1)) / numChunks)
|
||||
console.log('Num Chunks: ' + numChunks)
|
||||
documentStore.updateUploadingDocuments(document.key, (100 * (i + 1)) / numChunks)
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<template>
|
||||
<template>
|
||||
<input
|
||||
ref="fileUploadButton"
|
||||
@change="uploadFileChangeHandler"
|
||||
class="upload-input"
|
||||
type="file"
|
||||
multiple
|
||||
/>
|
||||
<input
|
||||
ref="folderUploadButton"
|
||||
@change="uploadFileChangeHandler"
|
||||
class="upload-input"
|
||||
type="file"
|
||||
webkitdirectory
|
||||
/>
|
||||
</template>
|
||||
<SvgButton name="add-file" data-tooltip="Upload files" @click="fileUploadButton.click()" />
|
||||
<SvgButton name="add-folder" data-tooltip="Upload folder" @click="folderUploadButton.click()" />
|
||||
</template>
|
||||
@@ -22,16 +22,29 @@ export type errorEvent = {
|
||||
|
||||
// Raw types the backend /api/watch sends us
|
||||
|
||||
export type FileEntry = [
|
||||
number, // level
|
||||
string, // name
|
||||
FUID,
|
||||
number, //mtime
|
||||
number, // size
|
||||
number, // isfile
|
||||
]
|
||||
export type FileEntry = {
|
||||
key: FUID
|
||||
size: number
|
||||
mtime: number
|
||||
}
|
||||
|
||||
export type UpdateEntry = ['k', number] | ['d', number] | ['i', Array<FileEntry>]
|
||||
export type DirEntry = {
|
||||
key: FUID
|
||||
size: number
|
||||
mtime: number
|
||||
dir: DirList
|
||||
}
|
||||
|
||||
export type DirList = Record<string, FileEntry | DirEntry>
|
||||
|
||||
export type UpdateEntry = {
|
||||
name: string
|
||||
deleted?: boolean
|
||||
key?: FUID
|
||||
size?: number
|
||||
mtime?: number
|
||||
dir?: DirList
|
||||
}
|
||||
|
||||
// Helper structure for selections
|
||||
export interface SelectedItems {
|
||||
@@ -1,29 +1,14 @@
|
||||
import { useDocumentStore } from "@/stores/documents"
|
||||
import type { FileEntry, UpdateEntry, errorEvent } from "./Document"
|
||||
import type { DirEntry, UpdateEntry, errorEvent } from "./Document"
|
||||
|
||||
export const controlUrl = '/api/control'
|
||||
export const uploadUrl = '/api/upload'
|
||||
export const watchUrl = '/api/watch'
|
||||
|
||||
let tree = [] as FileEntry[]
|
||||
let tree = null as DirEntry | null
|
||||
let reconnectDuration = 500
|
||||
let wsWatch = null as WebSocket | null
|
||||
|
||||
export const loadSession = () => {
|
||||
const store = useDocumentStore()
|
||||
try {
|
||||
tree = JSON.parse(sessionStorage["cista-files"])
|
||||
store.updateRoot(tree)
|
||||
return true
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const saveSession = () => {
|
||||
sessionStorage["cista-files"] = JSON.stringify(tree)
|
||||
}
|
||||
|
||||
export const connect = (path: string, handlers: Partial<Record<keyof WebSocketEventMap, any>>) => {
|
||||
const webSocket = new WebSocket(new URL(path, location.origin.replace(/^http/, 'ws')))
|
||||
for (const [event, handler] of Object.entries(handlers)) webSocket.addEventListener(event, handler)
|
||||
@@ -57,7 +42,6 @@ export const watchConnect = () => {
|
||||
}
|
||||
if ("server" in msg) {
|
||||
console.log('Connected to backend', msg)
|
||||
store.server = msg.server
|
||||
store.connected = true
|
||||
reconnectDuration = 500
|
||||
store.error = ''
|
||||
@@ -109,36 +93,34 @@ const handleWatchMessage = (event: MessageEvent) => {
|
||||
}
|
||||
}
|
||||
|
||||
function handleRootMessage({ root }: { root: FileEntry[] }) {
|
||||
function handleRootMessage({ root }: { root: DirEntry }) {
|
||||
const store = useDocumentStore()
|
||||
console.log('Watch root', root)
|
||||
store.updateRoot(root)
|
||||
tree = root
|
||||
saveSession()
|
||||
}
|
||||
|
||||
function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
|
||||
const store = useDocumentStore()
|
||||
const update = updateData.update
|
||||
console.log('Watch update', update)
|
||||
console.log('Watch update', updateData.update)
|
||||
if (!tree) return console.error('Watch update before root')
|
||||
let newtree = []
|
||||
let oidx = 0
|
||||
|
||||
for (const [action, arg] of update) {
|
||||
if (action === 'k') {
|
||||
newtree.push(...tree.slice(oidx, oidx + arg))
|
||||
oidx += arg
|
||||
let node: DirEntry = tree
|
||||
for (const elem of updateData.update) {
|
||||
if (elem.deleted) {
|
||||
delete node.dir[elem.name]
|
||||
break // Deleted elements can't have further children
|
||||
}
|
||||
else if (action === 'd') oidx += arg
|
||||
else if (action === 'i') newtree.push(...arg)
|
||||
else console.log("Unknown update action", action, arg)
|
||||
if (elem.name) {
|
||||
// @ts-ignore
|
||||
console.log(node, elem.name)
|
||||
node = node.dir[elem.name] ||= {}
|
||||
}
|
||||
if (elem.key !== undefined) node.key = elem.key
|
||||
if (elem.size !== undefined) node.size = elem.size
|
||||
if (elem.mtime !== undefined) node.mtime = elem.mtime
|
||||
if (elem.dir !== undefined) node.dir = elem.dir
|
||||
}
|
||||
if (oidx != tree.length)
|
||||
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}, new tree ${newtree.length}`)
|
||||
store.updateRoot(newtree)
|
||||
tree = newtree
|
||||
saveSession()
|
||||
store.updateRoot(tree)
|
||||
}
|
||||
|
||||
function handleError(msg: errorEvent) {
|
||||
@@ -1,4 +1,10 @@
|
||||
import type { Document, FileEntry, FUID, SelectedItems } from '@/repositories/Document'
|
||||
import type {
|
||||
Document,
|
||||
DirEntry,
|
||||
FileEntry,
|
||||
FUID,
|
||||
SelectedItems
|
||||
} from '@/repositories/Document'
|
||||
import { formatSize, formatUnixDate, haystackFormat } from '@/utils'
|
||||
import { defineStore } from 'pinia'
|
||||
import { collator } from '@/utils'
|
||||
@@ -20,11 +26,13 @@ export const useDocumentStore = defineStore({
|
||||
id: 'documents',
|
||||
state: () => ({
|
||||
document: [] as Document[],
|
||||
search: "" as string,
|
||||
selected: new Set<FUID>(),
|
||||
fileExplorer: null as any,
|
||||
uploadingDocuments: [],
|
||||
uploadCount: 0 as number,
|
||||
fileExplorer: null,
|
||||
error: '' as string,
|
||||
connected: false,
|
||||
server: {} as Record<string, any>,
|
||||
user: {
|
||||
username: '',
|
||||
privileged: false,
|
||||
@@ -32,29 +40,70 @@ export const useDocumentStore = defineStore({
|
||||
isOpenLoginModal: false
|
||||
} as User
|
||||
}),
|
||||
persist: {
|
||||
storage: sessionStorage,
|
||||
paths: ['document'],
|
||||
},
|
||||
actions: {
|
||||
updateRoot(root: FileEntry[]) {
|
||||
const docs = []
|
||||
let loc = [] as string[]
|
||||
for (const [level, name, key, mtime, size, isfile] of root) {
|
||||
loc = loc.slice(0, level - 1)
|
||||
docs.push({
|
||||
name,
|
||||
loc: level ? loc.join('/') : '/',
|
||||
key,
|
||||
size,
|
||||
sizedisp: formatSize(size),
|
||||
mtime,
|
||||
modified: formatUnixDate(mtime),
|
||||
haystack: haystackFormat(name),
|
||||
dir: !isfile,
|
||||
})
|
||||
loc.push(name)
|
||||
updateRoot(root: DirEntry | null = null) {
|
||||
if (!root) {
|
||||
this.document = []
|
||||
return
|
||||
}
|
||||
// Transform tree data to flat documents array
|
||||
let loc = ""
|
||||
const mapper = ([name, attr]: [string, FileEntry | DirEntry]) => ({
|
||||
...attr,
|
||||
loc,
|
||||
name,
|
||||
sizedisp: formatSize(attr.size),
|
||||
modified: formatUnixDate(attr.mtime),
|
||||
haystack: haystackFormat(name),
|
||||
})
|
||||
const queue = [...Object.entries(root.dir ?? {}).map(mapper)]
|
||||
const docs = []
|
||||
for (let doc; (doc = queue.shift()) !== undefined;) {
|
||||
docs.push(doc)
|
||||
if ("dir" in doc) {
|
||||
// Recurse but replace recursive structure with boolean
|
||||
loc = doc.loc ? `${doc.loc}/${doc.name}` : doc.name
|
||||
queue.push(...Object.entries(doc.dir).map(mapper))
|
||||
// @ts-ignore
|
||||
doc.dir = true
|
||||
}
|
||||
// @ts-ignore
|
||||
else doc.dir = false
|
||||
}
|
||||
// Pre sort directory entries folders first then files, names in natural ordering
|
||||
docs.sort((a, b) =>
|
||||
// @ts-ignore
|
||||
b.dir - a.dir ||
|
||||
collator.compare(a.name, b.name)
|
||||
)
|
||||
this.document = docs as Document[]
|
||||
},
|
||||
updateUploadingDocuments(key: number, progress: number) {
|
||||
for (const d of this.uploadingDocuments) {
|
||||
if (d.key === key) d.progress = progress
|
||||
}
|
||||
},
|
||||
pushUploadingDocuments(name: string) {
|
||||
this.uploadCount++
|
||||
const document = {
|
||||
key: this.uploadCount,
|
||||
name: name,
|
||||
progress: 0
|
||||
}
|
||||
this.uploadingDocuments.push(document)
|
||||
return document
|
||||
},
|
||||
deleteUploadingDocument(key: number) {
|
||||
this.uploadingDocuments = this.uploadingDocuments.filter(e => e.key !== key)
|
||||
},
|
||||
updateModified() {
|
||||
for (const doc of this.document) doc.modified = formatUnixDate(doc.mtime)
|
||||
for (const d of this.document) {
|
||||
if ('mtime' in d) d.modified = formatUnixDate(d.mtime)
|
||||
}
|
||||
},
|
||||
login(username: string, privileged: boolean) {
|
||||
this.user.username = username
|
||||
@@ -16,17 +16,17 @@ import { needleFormat, localeIncludes, collator } from '@/utils';
|
||||
|
||||
const documentStore = useDocumentStore()
|
||||
const fileExplorer = ref()
|
||||
const props = defineProps<{
|
||||
const props = defineProps({
|
||||
path: Array<string>
|
||||
query: string
|
||||
}>()
|
||||
})
|
||||
const documents = computed(() => {
|
||||
if (!props.path) return []
|
||||
const loc = props.path.join('/')
|
||||
const query = props.query
|
||||
// List the current location
|
||||
if (!query) return documentStore.document.filter(doc => doc.loc === loc)
|
||||
if (!documentStore.search) return documentStore.document.filter(doc => doc.loc === loc)
|
||||
// Find up to 100 newest documents that match the search
|
||||
const needle = needleFormat(query)
|
||||
const search = documentStore.search
|
||||
const needle = needleFormat(search)
|
||||
let limit = 100
|
||||
let docs = []
|
||||
for (const doc of documentStore.recentDocuments) {
|
||||
@@ -46,7 +46,7 @@ const documents = computed(() => {
|
||||
// @ts-ignore
|
||||
(a.type === 'file') - (b.type === 'file') ||
|
||||
// @ts-ignore
|
||||
b.name.includes(query) - a.name.includes(query) ||
|
||||
b.name.includes(search) - a.name.includes(search) ||
|
||||
collator.compare(a.name, b.name)
|
||||
))
|
||||
return docs
|
||||
@@ -105,9 +105,9 @@ def _confdir(args):
|
||||
if confdir.exists() and not confdir.is_dir():
|
||||
if confdir.name != config.conffile.name:
|
||||
raise ValueError("Config path is not a directory")
|
||||
# Accidentally pointed to the db.toml, use parent
|
||||
# Accidentally pointed to the cista.toml, use parent
|
||||
confdir = confdir.parent
|
||||
config.conffile = confdir / config.conffile.name
|
||||
config.conffile = config.conffile.with_parent(confdir)
|
||||
|
||||
|
||||
def _user(args):
|
||||
|
||||
@@ -46,7 +46,6 @@ async def upload(req, ws):
|
||||
raise ValueError(f"Expected {req.end - pos} more bytes, got {d}")
|
||||
# Report success
|
||||
res = StatusMsg(status="ack", req=req)
|
||||
print("ack", res)
|
||||
await asend(ws, res)
|
||||
|
||||
|
||||
@@ -89,7 +88,7 @@ async def watch(req, ws):
|
||||
msgspec.json.encode(
|
||||
{
|
||||
"server": {
|
||||
"name": config.config.name or config.config.path.name,
|
||||
"name": "Cista", # Should be configurable
|
||||
"version": __version__,
|
||||
"public": config.config.public,
|
||||
},
|
||||
@@ -104,11 +103,11 @@ async def watch(req, ws):
|
||||
)
|
||||
uuid = token_bytes(16)
|
||||
try:
|
||||
with watching.state.lock:
|
||||
with watching.tree_lock:
|
||||
q = watching.pubsub[uuid] = asyncio.Queue()
|
||||
# Init with disk usage and full tree
|
||||
await ws.send(watching.format_space(watching.state.space))
|
||||
await ws.send(watching.format_root(watching.state.root))
|
||||
await ws.send(watching.format_du())
|
||||
await ws.send(watching.format_tree())
|
||||
# Send updates
|
||||
while True:
|
||||
await ws.send(await q.get())
|
||||
|
||||
141
cista/app.py
@@ -1,9 +1,7 @@
|
||||
import asyncio
|
||||
import datetime
|
||||
import mimetypes
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from pathlib import Path, PurePath, PurePosixPath
|
||||
from stat import S_IFDIR, S_IFREG
|
||||
from importlib.resources import files
|
||||
from urllib.parse import unquote
|
||||
from wsgiref.handlers import format_date_time
|
||||
|
||||
@@ -11,12 +9,12 @@ import brotli
|
||||
import sanic.helpers
|
||||
from blake3 import blake3
|
||||
from sanic import Blueprint, Sanic, empty, raw
|
||||
from sanic.exceptions import Forbidden, NotFound, ServerError
|
||||
from sanic.exceptions import Forbidden, NotFound
|
||||
from sanic.log import logging
|
||||
from stream_zip import ZIP_AUTO, stream_zip
|
||||
|
||||
from cista import auth, config, session, watching
|
||||
from cista.api import bp
|
||||
from cista.protocol import DirEntry
|
||||
from cista.util.apphelpers import handle_sanic_exception
|
||||
|
||||
# Workaround until Sanic PR #2824 is merged
|
||||
@@ -32,9 +30,7 @@ app.exception(Exception)(handle_sanic_exception)
|
||||
async def main_start(app, loop):
|
||||
config.load_config()
|
||||
await watching.start(app, loop)
|
||||
app.ctx.threadexec = ThreadPoolExecutor(
|
||||
max_workers=8, thread_name_prefix="cista-ioworker"
|
||||
)
|
||||
app.ctx.threadexec = ThreadPoolExecutor(max_workers=8)
|
||||
|
||||
|
||||
@app.after_server_stop
|
||||
@@ -47,8 +43,8 @@ async def main_stop(app, loop):
|
||||
async def use_session(req):
|
||||
req.ctx.session = session.get(req)
|
||||
try:
|
||||
req.ctx.username = req.ctx.session["username"] # type: ignore
|
||||
req.ctx.user = config.config.users[req.ctx.username]
|
||||
req.ctx.username = req.ctx.session["username"]
|
||||
req.ctx.user = config.config.users[req.ctx.session["username"]] # type: ignore
|
||||
except (AttributeError, KeyError, TypeError):
|
||||
req.ctx.username = None
|
||||
req.ctx.user = None
|
||||
@@ -79,16 +75,22 @@ def http_fileserver(app, _):
|
||||
www = {}
|
||||
|
||||
|
||||
@app.before_server_start
|
||||
async def load_wwwroot(*_ignored):
|
||||
global www
|
||||
www = await asyncio.get_event_loop().run_in_executor(None, _load_wwwroot, www)
|
||||
|
||||
|
||||
def _load_wwwroot(www):
|
||||
wwwnew = {}
|
||||
base = Path(__file__).with_name("wwwroot")
|
||||
paths = [PurePath()]
|
||||
base = files("cista") / "wwwroot"
|
||||
paths = ["."]
|
||||
while paths:
|
||||
path = paths.pop(0)
|
||||
current = base / path
|
||||
for p in current.iterdir():
|
||||
if p.is_dir():
|
||||
paths.append(p.relative_to(base))
|
||||
paths.append(current / p.parts[-1])
|
||||
continue
|
||||
name = p.relative_to(base).as_posix()
|
||||
mime = mimetypes.guess_type(name)[0] or "application/octet-stream"
|
||||
@@ -119,35 +121,15 @@ def _load_wwwroot(www):
|
||||
if len(br) >= len(data):
|
||||
br = False
|
||||
wwwnew[name] = data, br, headers
|
||||
if not wwwnew:
|
||||
raise ServerError(
|
||||
"Web frontend missing. Did you forget npm run build?",
|
||||
extra={"wwwroot": str(base)},
|
||||
quiet=True,
|
||||
)
|
||||
return wwwnew
|
||||
|
||||
|
||||
@app.before_server_start
|
||||
async def start(app):
|
||||
await load_wwwroot(app)
|
||||
if app.debug:
|
||||
app.add_task(refresh_wwwroot())
|
||||
|
||||
|
||||
async def load_wwwroot(app):
|
||||
global www
|
||||
www = await asyncio.get_event_loop().run_in_executor(
|
||||
app.ctx.threadexec, _load_wwwroot, www
|
||||
)
|
||||
|
||||
|
||||
@app.add_task
|
||||
async def refresh_wwwroot():
|
||||
while True:
|
||||
await asyncio.sleep(0.5)
|
||||
try:
|
||||
wwwold = www
|
||||
await load_wwwroot(app)
|
||||
await load_wwwroot()
|
||||
changes = ""
|
||||
for name in sorted(www):
|
||||
attr = www[name]
|
||||
@@ -163,6 +145,7 @@ async def refresh_wwwroot():
|
||||
print("Error loading wwwroot", e)
|
||||
if not app.debug:
|
||||
return
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
|
||||
@app.route("/<path:path>", methods=["GET", "HEAD"])
|
||||
@@ -177,70 +160,75 @@ async def wwwroot(req, path=""):
|
||||
return empty(304, headers=headers)
|
||||
# Brotli compressed?
|
||||
if br and "br" in req.headers.accept_encoding.split(", "):
|
||||
headers = {**headers, "content-encoding": "br"}
|
||||
headers = {
|
||||
**headers,
|
||||
"content-encoding": "br",
|
||||
}
|
||||
data = br
|
||||
return raw(data, headers=headers)
|
||||
|
||||
|
||||
def get_files(wanted: set) -> list[tuple[PurePosixPath, Path]]:
|
||||
loc = PurePosixPath()
|
||||
idx = 0
|
||||
ret = []
|
||||
level: int | None = None
|
||||
parent: PurePosixPath | None = None
|
||||
with watching.state.lock:
|
||||
root = watching.state.root
|
||||
while idx < len(root):
|
||||
f = root[idx]
|
||||
loc = PurePosixPath(*loc.parts[: f.level - 1]) / f.name
|
||||
if parent is not None and f.level <= level:
|
||||
level = parent = None
|
||||
if f.key in wanted:
|
||||
level, parent = f.level, loc.parent
|
||||
if parent is not None:
|
||||
wanted.discard(f.key)
|
||||
ret.append((loc.relative_to(parent), watching.rootpath / loc))
|
||||
idx += 1
|
||||
return ret
|
||||
import datetime
|
||||
from collections import deque
|
||||
from pathlib import Path
|
||||
from stat import S_IFREG
|
||||
|
||||
from stream_zip import ZIP_AUTO, stream_zip
|
||||
|
||||
|
||||
@app.get("/zip/<keys>/<zipfile:ext=zip>")
|
||||
async def zip_download(req, keys, zipfile, ext):
|
||||
"""Download a zip archive of the given keys"""
|
||||
|
||||
wanted = set(keys.split("+"))
|
||||
files = get_files(wanted)
|
||||
with watching.tree_lock:
|
||||
q = deque([([], None, watching.tree[""].dir)])
|
||||
files = []
|
||||
while q:
|
||||
locpar, relpar, d = q.pop()
|
||||
for name, attr in d.items():
|
||||
loc = [*locpar, name]
|
||||
rel = None
|
||||
if relpar or attr.key in wanted:
|
||||
rel = [*relpar, name] if relpar else [name]
|
||||
wanted.remove(attr.key)
|
||||
if isinstance(attr, DirEntry):
|
||||
q.append((loc, rel, attr.dir))
|
||||
elif rel:
|
||||
files.append(
|
||||
(
|
||||
"/".join(rel),
|
||||
Path(watching.rootpath.joinpath(*loc)),
|
||||
attr.mtime,
|
||||
attr.size,
|
||||
)
|
||||
)
|
||||
|
||||
if not files:
|
||||
raise NotFound(
|
||||
"No files found",
|
||||
context={"keys": keys, "zipfile": f"{zipfile}.{ext}", "wanted": wanted},
|
||||
context={"keys": keys, "zipfile": zipfile, "wanted": wanted},
|
||||
)
|
||||
if wanted:
|
||||
raise NotFound("Files not found", context={"missing": wanted})
|
||||
|
||||
def local_files(files):
|
||||
for rel, p in files:
|
||||
s = p.stat()
|
||||
size = s.st_size
|
||||
modified = datetime.datetime.fromtimestamp(s.st_mtime, datetime.UTC)
|
||||
name = rel.as_posix()
|
||||
if p.is_dir():
|
||||
yield f"{name}/", modified, S_IFDIR | 0o755, ZIP_AUTO(size), iter(b"")
|
||||
else:
|
||||
yield name, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p, size)
|
||||
for rel, p, mtime, size in files:
|
||||
if not p.is_file():
|
||||
raise NotFound(f"File not found {rel}")
|
||||
|
||||
def contents(name, size):
|
||||
def local_files(files):
|
||||
for rel, p, mtime, size in files:
|
||||
modified = datetime.datetime.fromtimestamp(mtime, datetime.UTC)
|
||||
yield rel, modified, S_IFREG | 0o644, ZIP_AUTO(size), contents(p)
|
||||
|
||||
def contents(name):
|
||||
with name.open("rb") as f:
|
||||
while size > 0 and (chunk := f.read(min(size, 1 << 20))):
|
||||
size -= len(chunk)
|
||||
while chunk := f.read(65536):
|
||||
yield chunk
|
||||
assert size == 0
|
||||
|
||||
def worker():
|
||||
try:
|
||||
for chunk in stream_zip(local_files(files)):
|
||||
asyncio.run_coroutine_threadsafe(queue.put(chunk), loop).result()
|
||||
asyncio.run_coroutine_threadsafe(queue.put(chunk), loop)
|
||||
except Exception:
|
||||
logging.exception("Error streaming ZIP")
|
||||
raise
|
||||
@@ -253,10 +241,7 @@ async def zip_download(req, keys, zipfile, ext):
|
||||
thread = loop.run_in_executor(app.ctx.threadexec, worker)
|
||||
|
||||
# Stream the response
|
||||
res = await req.respond(
|
||||
content_type="application/zip",
|
||||
headers={"cache-control": "no-store"},
|
||||
)
|
||||
res = await req.respond(content_type="application/zip")
|
||||
while chunk := await queue.get():
|
||||
await res.send(chunk)
|
||||
|
||||
|
||||
@@ -68,10 +68,10 @@ def verify(request, *, privileged=False):
|
||||
if request.ctx.user:
|
||||
if request.ctx.user.privileged:
|
||||
return
|
||||
raise Forbidden("Access Forbidden: Only for privileged users", quiet=True)
|
||||
raise Forbidden("Access Forbidden: Only for privileged users")
|
||||
elif config.config.public or request.ctx.user:
|
||||
return
|
||||
raise Unauthorized("Login required", "cookie", quiet=True)
|
||||
raise Unauthorized("Login required", "cookie", context={"redirect": "/login"})
|
||||
|
||||
|
||||
bp = Blueprint("auth")
|
||||
|
||||
@@ -14,7 +14,6 @@ class Config(msgspec.Struct):
|
||||
listen: str
|
||||
secret: str = secrets.token_hex(12)
|
||||
public: bool = False
|
||||
name: str = ""
|
||||
users: dict[str, User] = {}
|
||||
links: dict[str, Link] = {}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ class MkDir(ControlBase):
|
||||
|
||||
def __call__(self):
|
||||
path = config.config.path / filename.sanitize(self.path)
|
||||
path.mkdir(parents=True, exist_ok=False)
|
||||
path.mkdir(parents=False, exist_ok=False)
|
||||
|
||||
|
||||
class Rename(ControlBase):
|
||||
@@ -112,43 +112,47 @@ class ErrorMsg(msgspec.Struct):
|
||||
## Directory listings
|
||||
|
||||
|
||||
class FileEntry(msgspec.Struct, array_like=True):
|
||||
level: int
|
||||
class FileEntry(msgspec.Struct):
|
||||
key: str
|
||||
size: int
|
||||
mtime: int
|
||||
|
||||
|
||||
class DirEntry(msgspec.Struct):
|
||||
key: str
|
||||
size: int
|
||||
mtime: int
|
||||
dir: DirList
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.dir[name]
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
self.dir[name] = value
|
||||
|
||||
def __contains__(self, name):
|
||||
return name in self.dir
|
||||
|
||||
def __delitem__(self, name):
|
||||
del self.dir[name]
|
||||
|
||||
@property
|
||||
def props(self):
|
||||
return {k: v for k, v in self.__struct_fields__ if k != "dir"}
|
||||
|
||||
|
||||
DirList = dict[str, FileEntry | DirEntry]
|
||||
|
||||
|
||||
class UpdateEntry(msgspec.Struct, omit_defaults=True):
|
||||
"""Updates the named entry in the tree. Fields that are set replace old values. A list of entries recurses directories."""
|
||||
|
||||
name: str
|
||||
key: str
|
||||
mtime: int
|
||||
size: int
|
||||
isfile: int
|
||||
|
||||
def __repr__(self):
|
||||
return self.key or "FileEntry()"
|
||||
|
||||
|
||||
class Update(msgspec.Struct, array_like=True):
|
||||
...
|
||||
|
||||
|
||||
class UpdKeep(Update, tag="k"):
|
||||
count: int
|
||||
|
||||
|
||||
class UpdDel(Update, tag="d"):
|
||||
count: int
|
||||
|
||||
|
||||
class UpdIns(Update, tag="i"):
|
||||
items: list[FileEntry]
|
||||
|
||||
|
||||
class UpdateMessage(msgspec.Struct):
|
||||
update: list[UpdKeep | UpdDel | UpdIns]
|
||||
|
||||
|
||||
class Space(msgspec.Struct):
|
||||
disk: int
|
||||
free: int
|
||||
usage: int
|
||||
storage: int
|
||||
deleted: bool = False
|
||||
size: int | None = None
|
||||
mtime: int | None = None
|
||||
dir: DirList | None = None
|
||||
|
||||
|
||||
def make_dir_data(root):
|
||||
|
||||
@@ -30,10 +30,7 @@ def run(*, dev=False):
|
||||
reload_dir={confdir, wwwroot},
|
||||
access_log=True,
|
||||
) # type: ignore
|
||||
if dev:
|
||||
Sanic.serve()
|
||||
else:
|
||||
Sanic.serve_single()
|
||||
Sanic.serve()
|
||||
|
||||
|
||||
def check_cert(certdir, domain):
|
||||
|
||||