Compare commits
1 Commits
2978e0c968
...
v0.2.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9854dd01cc |
@@ -120,6 +120,9 @@ class FileEntry(msgspec.Struct, array_like=True):
|
||||
size: int
|
||||
isfile: int
|
||||
|
||||
def __repr__(self):
|
||||
return self.key or "FileEntry()"
|
||||
|
||||
|
||||
class Update(msgspec.Struct, array_like=True):
|
||||
...
|
||||
@@ -137,6 +140,10 @@ class UpdIns(Update, tag="i"):
|
||||
items: list[FileEntry]
|
||||
|
||||
|
||||
class UpdateMessage(msgspec.Struct):
|
||||
update: list[UpdKeep | UpdDel | UpdIns]
|
||||
|
||||
|
||||
class Space(msgspec.Struct):
|
||||
disk: int
|
||||
free: int
|
||||
|
||||
@@ -50,37 +50,42 @@ class State:
|
||||
begin, end = 0, len(self._listing)
|
||||
level = 0
|
||||
isfile = 0
|
||||
while level < len(relpath.parts):
|
||||
# Enter a subdirectory
|
||||
|
||||
# Special case for root
|
||||
if not relpath.parts:
|
||||
return slice(begin, end)
|
||||
|
||||
begin += 1
|
||||
for part in relpath.parts:
|
||||
level += 1
|
||||
begin += 1
|
||||
if level == len(relpath.parts):
|
||||
isfile = relfile
|
||||
name = relpath.parts[level - 1]
|
||||
namesort = sortkey(name)
|
||||
r = self._listing[begin]
|
||||
assert r.level == level
|
||||
# Iterate over items at this level
|
||||
while (
|
||||
begin < end
|
||||
and r.name != name
|
||||
and r.isfile <= isfile
|
||||
and sortkey(r.name) < namesort
|
||||
):
|
||||
# Skip contents
|
||||
found = False
|
||||
|
||||
while begin < end:
|
||||
entry = self._listing[begin]
|
||||
|
||||
if entry.level < level:
|
||||
break
|
||||
|
||||
if entry.level == level:
|
||||
if entry.name == part:
|
||||
found = True
|
||||
if level == len(relpath.parts):
|
||||
isfile = relfile
|
||||
else:
|
||||
begin += 1
|
||||
break
|
||||
cmp = entry.isfile - isfile or sortkey(entry.name) > sortkey(part)
|
||||
if cmp > 0:
|
||||
break
|
||||
|
||||
begin += 1
|
||||
while begin < end and self._listing[begin].level > level:
|
||||
begin += 1
|
||||
# Not found?
|
||||
if begin == end or self._listing[begin].level < level:
|
||||
return slice(begin, begin)
|
||||
r = self._listing[begin]
|
||||
# Not found?
|
||||
if begin == end or r.name != name:
|
||||
|
||||
if not found:
|
||||
return slice(begin, begin)
|
||||
# Found an item, now find its end
|
||||
for end in range(begin + 1, len(self._listing)):
|
||||
if self._listing[end].level <= level:
|
||||
|
||||
# Found the starting point, now find the end of the slice
|
||||
for end in range(begin + 1, len(self._listing) + 1):
|
||||
if end == len(self._listing) or self._listing[end].level <= level:
|
||||
break
|
||||
return slice(begin, end)
|
||||
|
||||
@@ -148,11 +153,12 @@ def watcher_thread(loop):
|
||||
rootpath = config.config.path
|
||||
i = inotify.adapters.InotifyTree(rootpath.as_posix())
|
||||
# Initialize the tree from filesystem
|
||||
old, new = state.root, walk()
|
||||
if old != new:
|
||||
with state.lock:
|
||||
new = walk()
|
||||
with state.lock:
|
||||
old = state.root
|
||||
if old != new:
|
||||
state.root = new
|
||||
broadcast(format_root(new), loop)
|
||||
broadcast(format_update(old, new), loop)
|
||||
|
||||
# The watching is not entirely reliable, so do a full refresh every minute
|
||||
refreshdl = time.monotonic() + 60.0
|
||||
@@ -190,10 +196,10 @@ def watcher_thread_poll(loop):
|
||||
|
||||
while not quit:
|
||||
rootpath = config.config.path
|
||||
old = state.root
|
||||
new = walk()
|
||||
if old != new:
|
||||
with state.lock:
|
||||
with state.lock:
|
||||
old = state.root
|
||||
if old != new:
|
||||
state.root = new
|
||||
broadcast(format_update(old, new), loop)
|
||||
|
||||
@@ -283,13 +289,11 @@ def format_update(old, new):
|
||||
|
||||
del_count = 0
|
||||
rest = new[nidx:]
|
||||
while old[oidx] not in rest:
|
||||
while oidx < len(old) and old[oidx] not in rest:
|
||||
del_count += 1
|
||||
oidx += 1
|
||||
|
||||
if del_count:
|
||||
update.append(UpdDel(del_count))
|
||||
oidx += 1
|
||||
continue
|
||||
|
||||
insert_items = []
|
||||
@@ -333,8 +337,9 @@ async def abroadcast(msg):
|
||||
|
||||
async def start(app, loop):
|
||||
config.load_config()
|
||||
use_inotify = False and sys.platform == "linux"
|
||||
app.ctx.watcher = threading.Thread(
|
||||
target=watcher_thread if sys.platform == "linux" else watcher_thread_poll,
|
||||
target=watcher_thread if use_inotify else watcher_thread_poll,
|
||||
args=[loop],
|
||||
)
|
||||
app.ctx.watcher.start()
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
<template>
|
||||
<object
|
||||
v-if="props.type === 'pdf'"
|
||||
:data="dataURL"
|
||||
type="application/pdf"
|
||||
width="100%"
|
||||
height="100%"
|
||||
></object>
|
||||
<a-image
|
||||
v-else-if="props.type === 'image'"
|
||||
width="50%"
|
||||
:src="dataURL"
|
||||
@click="() => setVisible(true)"
|
||||
:previewMask="false"
|
||||
:preview="{
|
||||
visibleImg,
|
||||
onVisibleChange: setVisible
|
||||
}"
|
||||
/>
|
||||
<!-- Unknown case -->
|
||||
<h1 v-else>Unsupported file type</h1>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { watchEffect, ref } from 'vue'
|
||||
import Router from '@/router/index'
|
||||
import { url_document_get } from '@/repositories/Document'
|
||||
|
||||
const dataURL = ref('')
|
||||
watchEffect(() => {
|
||||
dataURL.value = new URL(
|
||||
url_document_get + Router.currentRoute.value.path,
|
||||
location.origin
|
||||
).toString()
|
||||
})
|
||||
const emit = defineEmits({
|
||||
visibleImg(value: boolean) {
|
||||
return value
|
||||
}
|
||||
})
|
||||
|
||||
function setVisible(value: boolean) {
|
||||
emit('visibleImg', value)
|
||||
}
|
||||
|
||||
const props = defineProps<{
|
||||
type?: string
|
||||
visibleImg: boolean
|
||||
}>()
|
||||
</script>
|
||||
|
||||
<style></style>
|
||||
@@ -9,7 +9,7 @@
|
||||
<SvgButton
|
||||
name="create-folder"
|
||||
data-tooltip="New folder"
|
||||
@click="() => documentStore.fileExplorer.newFolder()"
|
||||
@click="() => documentStore.fileExplorer!.newFolder()"
|
||||
/>
|
||||
<slot></slot>
|
||||
<div class="spacer smallgap"></div>
|
||||
@@ -42,15 +42,15 @@ const showSearchInput = ref<boolean>(false)
|
||||
const search = ref<HTMLInputElement | null>()
|
||||
const searchButton = ref<HTMLButtonElement | null>()
|
||||
|
||||
const closeSearch = ev => {
|
||||
const closeSearch = (ev: Event) => {
|
||||
if (!showSearchInput.value) return // Already closing
|
||||
showSearchInput.value = false
|
||||
const breadcrumb = document.querySelector('.breadcrumb') as HTMLElement
|
||||
breadcrumb.focus()
|
||||
updateSearch(ev)
|
||||
}
|
||||
const updateSearch = ev => {
|
||||
const q = ev.target.value
|
||||
const updateSearch = (ev: Event) => {
|
||||
const q = (ev.target as HTMLInputElement).value
|
||||
let p = props.path.join('/')
|
||||
p = p ? `/${p}` : ''
|
||||
const url = q ? `${p}//${q}` : (p || '/')
|
||||
@@ -58,9 +58,9 @@ const updateSearch = ev => {
|
||||
if (!props.query && q) router.push(url)
|
||||
else router.replace(url)
|
||||
}
|
||||
const toggleSearchInput = () => {
|
||||
const toggleSearchInput = (ev: Event) => {
|
||||
showSearchInput.value = !showSearchInput.value
|
||||
if (!showSearchInput.value) return closeSearch()
|
||||
if (!showSearchInput.value) return closeSearch(ev)
|
||||
nextTick(() => {
|
||||
const input = search.value
|
||||
if (input) input.focus()
|
||||
|
||||
@@ -34,7 +34,7 @@ const op = (op: string, dst?: string) => {
|
||||
// @ts-ignore
|
||||
if (dst !== undefined) msg.dst = dst
|
||||
const control = connect(controlUrl, {
|
||||
message(ev: WebSocmetMessageEvent) {
|
||||
message(ev: MessageEvent) {
|
||||
const res = JSON.parse(ev.data)
|
||||
if ('error' in res) {
|
||||
console.error('Control socket error', msg, res.error)
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
<template>
|
||||
<template v-for="upload in documentStore.uploadingDocuments" :key="upload.key">
|
||||
<span>{{ upload.name }}</span>
|
||||
<div class="progress-container">
|
||||
<a-progress :percent="upload.progress" />
|
||||
<CloseCircleOutlined class="close-button" @click="dismissUpload(upload.key)" />
|
||||
</div>
|
||||
</template>
|
||||
</template>
|
||||
<script setup lang="ts">
|
||||
import { useDocumentStore } from '@/stores/documents'
|
||||
const documentStore = useDocumentStore()
|
||||
|
||||
function dismissUpload(key: number) {
|
||||
documentStore.deleteUploadingDocument(key)
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.progress-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
.close-button:hover {
|
||||
color: #b81414;
|
||||
}
|
||||
</style>
|
||||
@@ -11,29 +11,38 @@ const props = defineProps({
|
||||
path: Array<string>
|
||||
})
|
||||
|
||||
type CloudFile = {
|
||||
file: File
|
||||
cloudName: string
|
||||
cloudPos: number
|
||||
}
|
||||
|
||||
function uploadHandler(event: Event) {
|
||||
event.preventDefault()
|
||||
event.stopPropagation()
|
||||
// @ts-ignore
|
||||
let infiles = Array.from(event.dataTransfer?.files || event.target.files) as File[]
|
||||
const infiles = Array.from(event.dataTransfer?.files || event.target.files) as File[]
|
||||
if (!infiles.length) return
|
||||
const loc = props.path!.join('/')
|
||||
for (const f of infiles) {
|
||||
f.cloudName = loc + '/' + (f.webkitRelativePath || f.name)
|
||||
f.cloudPos = 0
|
||||
let files = []
|
||||
for (const file of infiles) {
|
||||
files.push({
|
||||
file,
|
||||
cloudName: loc + '/' + (file.webkitRelativePath || file.name),
|
||||
cloudPos: 0,
|
||||
})
|
||||
}
|
||||
const dotfiles = infiles.filter(f => f.cloudName.includes('/.'))
|
||||
const dotfiles = files.filter(f => f.cloudName.includes('/.'))
|
||||
if (dotfiles.length) {
|
||||
documentStore.error = "Won't upload dotfiles"
|
||||
console.log("Dotfiles omitted", dotfiles)
|
||||
infiles = infiles.filter(f => !f.cloudName.includes('/.'))
|
||||
files = files.filter(f => !f.cloudName.includes('/.'))
|
||||
}
|
||||
if (!infiles.length) return
|
||||
infiles.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
|
||||
if (!files.length) return
|
||||
files.sort((a, b) => collator.compare(a.cloudName, b.cloudName))
|
||||
// @ts-ignore
|
||||
upqueue = upqueue.concat(infiles)
|
||||
statsAdd(infiles)
|
||||
upqueue = [...upqueue, ...files]
|
||||
statsAdd(files)
|
||||
startWorker()
|
||||
}
|
||||
|
||||
@@ -49,13 +58,14 @@ const uprogress_init = {
|
||||
tlast: 0,
|
||||
statbytes: 0,
|
||||
statdur: 0,
|
||||
files: [],
|
||||
files: [] as CloudFile[],
|
||||
filestart: 0,
|
||||
fileidx: 0,
|
||||
filecount: 0,
|
||||
filename: '',
|
||||
filesize: 0,
|
||||
filepos: 0,
|
||||
status: 'idle',
|
||||
}
|
||||
const uprogress = reactive({...uprogress_init})
|
||||
const percent = computed(() => uprogress.uploaded / uprogress.total * 100)
|
||||
@@ -78,7 +88,7 @@ setInterval(() => {
|
||||
uprogress.statdur *= .9
|
||||
}
|
||||
}, 100)
|
||||
const statUpdate = ({name, size, start, end}) => {
|
||||
const statUpdate = ({name, size, start, end}: {name: string, size: number, start: number, end: number}) => {
|
||||
if (name !== uprogress.filename) return // If stats have been reset
|
||||
const now = Date.now()
|
||||
uprogress.uploaded = uprogress.filestart + end
|
||||
@@ -97,7 +107,7 @@ const statNextFile = () => {
|
||||
const f = uprogress.files.shift()
|
||||
if (!f) return statReset()
|
||||
uprogress.filepos = 0
|
||||
uprogress.filesize = f.size
|
||||
uprogress.filesize = f.file.size
|
||||
uprogress.filename = f.cloudName
|
||||
}
|
||||
const statReset = () => {
|
||||
@@ -105,14 +115,14 @@ const statReset = () => {
|
||||
uprogress.t0 = Date.now()
|
||||
uprogress.tlast = uprogress.t0 + 1
|
||||
}
|
||||
const statsAdd = (f: Array<File>) => {
|
||||
const statsAdd = (f: CloudFile[]) => {
|
||||
if (uprogress.files.length === 0) statReset()
|
||||
uprogress.total += f.reduce((a, b) => a + b.size, 0)
|
||||
uprogress.total += f.reduce((a, b) => a + b.file.size, 0)
|
||||
uprogress.filecount += f.length
|
||||
uprogress.files = uprogress.files.concat(f)
|
||||
uprogress.files = [...uprogress.files, ...f]
|
||||
statNextFile()
|
||||
}
|
||||
let upqueue = [] as File[]
|
||||
let upqueue = [] as CloudFile[]
|
||||
|
||||
// TODO: Rewrite as WebSocket class
|
||||
const WSCreate = async () => await new Promise<WebSocket>(resolve => {
|
||||
@@ -155,17 +165,19 @@ const worker = async () => {
|
||||
const ws = await WSCreate()
|
||||
while (upqueue.length) {
|
||||
const f = upqueue[0]
|
||||
if (f.cloudPos === f.size) {
|
||||
if (f.cloudPos === f.file.size) {
|
||||
upqueue.shift()
|
||||
continue
|
||||
}
|
||||
const start = f.cloudPos
|
||||
const end = Math.min(f.size, start + (1<<20))
|
||||
const control = { name: f.cloudName, size: f.size, start, end }
|
||||
const data = f.slice(start, end)
|
||||
const end = Math.min(f.file.size, start + (1<<20))
|
||||
const control = { name: f.cloudName, size: f.file.size, start, end }
|
||||
const data = f.file.slice(start, end)
|
||||
f.cloudPos = end
|
||||
// Note: files may get modified during I/O
|
||||
// @ts-ignore FIXME proper WebSocket class, avoid attaching functions to WebSocket object
|
||||
ws.sendMsg(control)
|
||||
// @ts-ignore
|
||||
await ws.sendData(data)
|
||||
}
|
||||
if (upqueue.length) startWorker()
|
||||
|
||||
@@ -109,7 +109,7 @@ const handleWatchMessage = (event: MessageEvent) => {
|
||||
}
|
||||
}
|
||||
|
||||
function handleRootMessage({ root }: { root: DirEntry }) {
|
||||
function handleRootMessage({ root }: { root: FileEntry[] }) {
|
||||
const store = useDocumentStore()
|
||||
console.log('Watch root', root)
|
||||
store.updateRoot(root)
|
||||
@@ -126,16 +126,16 @@ function handleUpdateMessage(updateData: { update: UpdateEntry[] }) {
|
||||
let oidx = 0
|
||||
|
||||
for (const [action, arg] of update) {
|
||||
if (action === 'k') {
|
||||
newtree.push(...tree.slice(oidx, oidx + arg))
|
||||
oidx += arg
|
||||
}
|
||||
else if (action === 'd') oidx += arg
|
||||
else if (action === 'i') newtree.push(...arg)
|
||||
else console.log("Unknown update action", action, arg)
|
||||
if (action === 'k') {
|
||||
newtree.push(...tree.slice(oidx, oidx + arg))
|
||||
oidx += arg
|
||||
}
|
||||
else if (action === 'd') oidx += arg
|
||||
else if (action === 'i') newtree.push(...arg)
|
||||
else console.log("Unknown update action", action, arg)
|
||||
}
|
||||
if (oidx != tree.length)
|
||||
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}`)
|
||||
throw Error(`Tree update out of sync, number of entries mismatch: got ${oidx}, expected ${tree.length}, new tree ${newtree.length}`)
|
||||
store.updateRoot(newtree)
|
||||
tree = newtree
|
||||
saveSession()
|
||||
|
||||
@@ -4,7 +4,6 @@ import { defineStore } from 'pinia'
|
||||
import { collator } from '@/utils'
|
||||
import { logoutUser } from '@/repositories/User'
|
||||
import { watchConnect } from '@/repositories/WS'
|
||||
import { format } from 'path'
|
||||
|
||||
type FileData = { id: string; mtime: number; size: number; dir: DirectoryData }
|
||||
type DirectoryData = {
|
||||
@@ -22,9 +21,7 @@ export const useDocumentStore = defineStore({
|
||||
state: () => ({
|
||||
document: [] as Document[],
|
||||
selected: new Set<FUID>(),
|
||||
uploadingDocuments: [],
|
||||
uploadCount: 0 as number,
|
||||
fileExplorer: null,
|
||||
fileExplorer: null as any,
|
||||
error: '' as string,
|
||||
connected: false,
|
||||
server: {} as Record<string, any>,
|
||||
@@ -54,7 +51,6 @@ export const useDocumentStore = defineStore({
|
||||
})
|
||||
loc.push(name)
|
||||
}
|
||||
console.log("Documents", docs)
|
||||
this.document = docs as Document[]
|
||||
},
|
||||
updateModified() {
|
||||
|
||||
136
tests/test_watching.py
Normal file
136
tests/test_watching.py
Normal file
@@ -0,0 +1,136 @@
|
||||
from pathlib import PurePosixPath
|
||||
|
||||
import msgspec
|
||||
import pytest
|
||||
|
||||
from cista.protocol import FileEntry, UpdateMessage, UpdDel, UpdIns, UpdKeep
|
||||
from cista.watching import State, format_update
|
||||
|
||||
|
||||
def decode(data: str):
|
||||
return msgspec.json.decode(data, type=UpdateMessage).update
|
||||
|
||||
|
||||
# Helper function to create a list of FileEntry objects
|
||||
def f(count, start=0):
|
||||
return [FileEntry(i, str(i), str(i), 0, 0, 0) for i in range(start, start + count)]
|
||||
|
||||
|
||||
def test_identical_lists():
|
||||
old_list = f(3)
|
||||
new_list = old_list.copy()
|
||||
expected = [UpdKeep(3)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_completely_different_lists():
|
||||
old_list = f(3)
|
||||
new_list = f(3, 3) # Different entries
|
||||
expected = [UpdDel(3), UpdIns(new_list)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_insertions():
|
||||
old_list = f(3)
|
||||
new_list = old_list[:2] + f(1, 10) + old_list[2:]
|
||||
expected = [UpdKeep(2), UpdIns(f(1, 10)), UpdKeep(1)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_deletions():
|
||||
old_list = f(3)
|
||||
new_list = [old_list[0], old_list[2]]
|
||||
expected = [UpdKeep(1), UpdDel(1), UpdKeep(1)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_mixed_operations():
|
||||
old_list = f(4)
|
||||
new_list = [old_list[0], old_list[2], *f(1, 10)]
|
||||
expected = [UpdKeep(1), UpdDel(1), UpdKeep(1), UpdDel(1), UpdIns(f(1, 10))]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_empty_old_list():
|
||||
old_list = []
|
||||
new_list = f(3)
|
||||
expected = [UpdIns(new_list)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_empty_new_list():
|
||||
old_list = f(3)
|
||||
new_list = []
|
||||
expected = [UpdDel(3)]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def test_longer_lists():
|
||||
old_list = f(6)
|
||||
new_list = f(1, 6) + old_list[1:3] + old_list[4:5] + f(2, 7)
|
||||
expected = [
|
||||
UpdDel(1),
|
||||
UpdIns(f(1, 6)),
|
||||
UpdKeep(2),
|
||||
UpdDel(1),
|
||||
UpdKeep(1),
|
||||
UpdDel(1),
|
||||
UpdIns(f(2, 7)),
|
||||
]
|
||||
assert decode(format_update(old_list, new_list)) == expected
|
||||
|
||||
|
||||
def sortkey(name):
|
||||
# Define the sorting key for names here
|
||||
return name.lower()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def state():
|
||||
entries = [
|
||||
FileEntry(0, "", "root", 0, 0, 0),
|
||||
FileEntry(1, "bar", "bar", 0, 0, 0),
|
||||
FileEntry(2, "baz", "bar/baz", 0, 0, 0),
|
||||
FileEntry(1, "foo", "foo", 0, 0, 0),
|
||||
FileEntry(1, "xxx", "xxx", 0, 0, 0),
|
||||
FileEntry(2, "yyy", "xxx/yyy", 0, 0, 1),
|
||||
]
|
||||
s = State()
|
||||
s._listing = entries
|
||||
return s
|
||||
|
||||
|
||||
def test_existing_directory(state):
|
||||
path = PurePosixPath("bar")
|
||||
expected_slice = slice(1, 3) # Includes 'bar' and 'baz'
|
||||
assert state._slice(path) == expected_slice
|
||||
|
||||
|
||||
def test_existing_file(state):
|
||||
path = PurePosixPath("xxx/yyy")
|
||||
expected_slice = slice(5, 6) # Only includes 'yyy'
|
||||
assert state._slice(path) == expected_slice
|
||||
|
||||
|
||||
def test_nonexistent_directory(state):
|
||||
path = PurePosixPath("zzz")
|
||||
expected_slice = slice(6, 6) # 'zzz' would be inserted at end
|
||||
assert state._slice(path) == expected_slice
|
||||
|
||||
|
||||
def test_nonexistent_file(state):
|
||||
path = (PurePosixPath("bar/mmm"), 1)
|
||||
expected_slice = slice(3, 3) # A file would be inserted after 'baz' under 'bar'
|
||||
assert state._slice(path) == expected_slice
|
||||
|
||||
|
||||
def test_root_directory(state):
|
||||
path = PurePosixPath()
|
||||
expected_slice = slice(0, 6) # Entire tree
|
||||
assert state._slice(path) == expected_slice
|
||||
|
||||
|
||||
def test_directory_with_subdirs_and_files(state):
|
||||
path = PurePosixPath("xxx")
|
||||
expected_slice = slice(4, 6) # Includes 'xxx' and 'yyy'
|
||||
assert state._slice(path) == expected_slice
|
||||
Reference in New Issue
Block a user