Compare commits

..

5 Commits

Author SHA1 Message Date
Leo Vasanko
4060a582d6 Linter 2025-08-13 10:18:18 -07:00
Leo Vasanko
47574675a3 Debug printouts for watching. 2025-08-13 10:17:06 -07:00
Leo Vasanko
425d7012b7 Additional tests for watching. 2025-08-13 10:16:54 -07:00
Leo Vasanko
cf7d61d11c Don't add uv.lock to repo. 2025-08-13 10:15:59 -07:00
Leo Vasanko
aa1ea03bed Fix running with today's Python tooling and av module. 2025-08-13 10:15:22 -07:00
5 changed files with 1418 additions and 14 deletions

1
.gitignore vendored
View File

@ -1,4 +1,5 @@
.*
*.lock
!.gitignore
__pycache__/
*.egg-info/

View File

@ -18,8 +18,6 @@ from sanic.log import logger
from cista import config
from cista.util.filename import sanitize
DISPLAYMATRIX = av.stream.SideData.DISPLAYMATRIX
bp = Blueprint("preview", url_prefix="/preview")
@ -100,15 +98,39 @@ def process_video(path, *, maxsize, quality):
with av.open(str(path)) as container:
stream = container.streams.video[0]
stream.codec_context.skip_frame = "NONKEY"
rot = stream.side_data and stream.side_data.get(DISPLAYMATRIX) or 0
# Updated side data access for newer av versions
rot = 0
try:
# Try newer API first
if hasattr(stream, "side_data") and stream.side_data:
display_matrix = stream.side_data.get("DISPLAYMATRIX")
if display_matrix:
rot = (
display_matrix.rotation
if hasattr(display_matrix, "rotation")
else 0
)
except (AttributeError, KeyError):
# Fallback for older API or missing side data
rot = 0
container.seek(container.duration // 8)
img = next(container.decode(stream)).to_image()
try:
frame = next(container.decode(stream))
img = frame.to_image()
except StopIteration:
# If no frame found, try from beginning
container.seek(0)
frame = next(container.decode(stream))
img = frame.to_image()
del stream
img.thumbnail((maxsize, maxsize))
imgdata = io.BytesIO()
if rot:
img = img.rotate(rot, expand=True)
if rot and rot != 0:
img = img.rotate(-rot, expand=True) # Negative rotation for correct orientation
img.save(imgdata, format="webp", quality=quality, method=4)
del img
ret = imgdata.getvalue()

View File

@ -46,59 +46,127 @@ def treeiter(rootmod):
def treeget(rootmod: list[FileEntry], path: PurePosixPath):
logger.debug(f"DEBUG: treeget ENTRY: path={path}, rootmod_len={len(rootmod)}")
begin = None
ret = []
iteration_count = 0
for i, relpath, entry in treeiter(rootmod):
iteration_count += 1
if (
iteration_count % 1000 == 0
): # Log every 1000 iterations to detect infinite loops
logger.debug(
f"DEBUG: treeget iteration {iteration_count}, i={i}, relpath={relpath}, entry.name={entry.name}"
)
if begin is None:
if relpath == path:
logger.debug(f"DEBUG: treeget FOUND path {path} at index {i}")
begin = i
ret.append(entry)
continue
if entry.level <= len(path.parts):
logger.debug(
f"DEBUG: treeget BREAK: entry.level={entry.level} <= path.parts_len={len(path.parts)}"
)
break
ret.append(entry)
logger.debug(
f"DEBUG: treeget EXIT: path={path}, begin={begin}, ret_len={len(ret)}, iterations={iteration_count}"
)
return begin, ret
def treeinspos(rootmod: list[FileEntry], relpath: PurePosixPath, relfile: int):
# Find the first entry greater than the new one
# precondition: the new entry doesn't exist
logger.debug(
f"DEBUG: treeinspos ENTRY: relpath={relpath}, relfile={relfile}, rootmod_len={len(rootmod)}"
)
isfile = 0
level = 0
i = 0
iteration_count = 0
for i, rel, entry in treeiter(rootmod):
iteration_count += 1
# Detect potential infinite loops in treeinspos
if iteration_count % 1000 == 0:
logger.debug(
f"DEBUG: treeinspos iteration {iteration_count}, i={i}, rel={rel}, entry.name={entry.name}, level={level}, entry.level={entry.level}"
)
if iteration_count > 10000: # Emergency brake for infinite loops
logger.error(
f"ERROR: treeinspos potential infinite loop! iteration={iteration_count}, relpath={relpath}, i={i}, level={level}"
)
break
if entry.level > level:
# We haven't found item at level, skip subdirectories
logger.debug(
f"DEBUG: treeinspos SKIP: entry.level={entry.level} > level={level}"
)
continue
if entry.level < level:
# We have passed the level, so the new item is the first
logger.debug(
f"DEBUG: treeinspos RETURN_EARLY: entry.level={entry.level} < level={level}, returning i={i}"
)
return i
if level == 0:
# root
logger.debug("DEBUG: treeinspos ROOT: incrementing level from 0 to 1")
level += 1
continue
ename = rel.parts[level - 1]
name = relpath.parts[level - 1]
logger.debug(
f"DEBUG: treeinspos COMPARE: ename='{ename}', name='{name}', level={level}"
)
esort = sortkey(ename)
nsort = sortkey(name)
# Non-leaf are always folders, only use relfile at leaf
isfile = relfile if len(relpath.parts) == level else 0
logger.debug(
f"DEBUG: treeinspos SORT: esort={esort}, nsort={nsort}, isfile={isfile}, entry.isfile={entry.isfile}"
)
# First compare by isfile, then by sorting order and if that too matches then case sensitive
cmp = (
entry.isfile - isfile
or (esort > nsort) - (esort < nsort)
or (ename > name) - (ename < name)
)
logger.debug(f"DEBUG: treeinspos CMP: cmp={cmp}")
if cmp > 0:
logger.debug(f"DEBUG: treeinspos RETURN: cmp > 0, returning i={i}")
return i
if cmp < 0:
logger.debug(f"DEBUG: treeinspos CONTINUE: cmp < 0")
continue
logger.debug(f"DEBUG: treeinspos INCREMENT_LEVEL: level {level} -> {level + 1}")
level += 1
if level > len(relpath.parts):
print("ERROR: insertpos", relpath, i, entry.name, entry.level, level)
logger.error(
f"ERROR: insertpos level overflow: relpath={relpath}, i={i}, entry.name={entry.name}, entry.level={entry.level}, level={level}"
)
break
else:
logger.debug(f"DEBUG: treeinspos FOR_ELSE: incrementing i from {i} to {i + 1}")
i += 1
logger.debug(
f"DEBUG: treeinspos EXIT: returning i={i}, iterations={iteration_count}"
)
return i
@ -177,24 +245,66 @@ def update_root(loop):
def update_path(rootmod: list[FileEntry], relpath: PurePosixPath, loop):
"""Called on FS updates, check the filesystem and broadcast any changes."""
logger.debug(
f"DEBUG: update_path ENTRY: path={relpath}, rootmod_len={len(rootmod)}"
)
# Add timing for walk operation
walk_start = time.perf_counter()
new = walk(relpath)
walk_end = time.perf_counter()
logger.debug(
f"DEBUG: walk({relpath}) took {walk_end - walk_start:.4f}s, returned {len(new)} entries"
)
# Add timing for treeget operation
treeget_start = time.perf_counter()
obegin, old = treeget(rootmod, relpath)
treeget_end = time.perf_counter()
logger.debug(
f"DEBUG: treeget({relpath}) took {treeget_end - treeget_start:.4f}s, obegin={obegin}, old_len={len(old) if old else 0}"
)
if old == new:
logger.debug(
f"Watch: Event without changes needed {relpath}"
if old
else f"Watch: Event with old and new missing: {relpath}"
)
logger.debug(f"DEBUG: update_path EARLY_EXIT: no changes for {relpath}")
return
# Debug the deletion operation
if obegin is not None:
logger.debug(
f"DEBUG: DELETING entries from rootmod[{obegin}:{obegin + len(old)}] for path {relpath}"
)
del rootmod[obegin : obegin + len(old)]
logger.debug(f"DEBUG: DELETED entries, rootmod_len now {len(rootmod)}")
if new:
logger.debug(f"Watch: Update {relpath}" if old else f"Watch: Created {relpath}")
# Add timing for treeinspos operation - this is where hangs might occur
inspos_start = time.perf_counter()
i = treeinspos(rootmod, relpath, new[0].isfile)
inspos_end = time.perf_counter()
logger.debug(
f"DEBUG: treeinspos({relpath}) took {inspos_end - inspos_start:.4f}s, returned index={i}"
)
logger.debug(
f"DEBUG: INSERTING {len(new)} entries at position {i} for path {relpath}"
)
rootmod[i:i] = new
logger.debug(f"DEBUG: INSERTED entries, rootmod_len now {len(rootmod)}")
else:
logger.debug(f"Watch: Removed {relpath}")
logger.debug(
f"DEBUG: update_path EXIT: path={relpath}, final_rootmod_len={len(rootmod)}"
)
def update_space(loop):
"""Called periodically to update the disk usage."""
@ -213,61 +323,120 @@ def update_space(loop):
def format_update(old, new):
logger.debug(f"DEBUG: format_update ENTRY: old_len={len(old)}, new_len={len(new)}")
# Make keep/del/insert diff until one of the lists ends
oidx, nidx = 0, 0
oremain, nremain = set(old), set(new)
update = []
keep_count = 0
iteration_count = 0
while oidx < len(old) and nidx < len(new):
iteration_count += 1
# Log every 1000 iterations to detect infinite loops
if iteration_count % 1000 == 0:
logger.debug(
f"DEBUG: format_update iteration {iteration_count}, oidx={oidx}/{len(old)}, nidx={nidx}/{len(new)}"
)
# Emergency brake for potential infinite loops
if iteration_count > 50000:
logger.error(
f"ERROR: format_update potential infinite loop! iteration={iteration_count}, oidx={oidx}, nidx={nidx}"
)
raise Exception(
f"format_update infinite loop detected at iteration {iteration_count}"
)
modified = False
# Matching entries are kept
if old[oidx] == new[nidx]:
entry = old[oidx]
logger.debug(
f"DEBUG: format_update MATCH: entry={entry.name}, oidx={oidx}, nidx={nidx}"
)
oremain.remove(entry)
nremain.remove(entry)
keep_count += 1
oidx += 1
nidx += 1
continue
if keep_count > 0:
logger.debug(f"DEBUG: format_update KEEP: adding UpdKeep({keep_count})")
modified = True
update.append(UpdKeep(keep_count))
keep_count = 0
# Items only in old are deleted
del_count = 0
del_start_oidx = oidx
while oidx < len(old) and old[oidx] not in nremain:
logger.debug(
f"DEBUG: format_update DELETE: removing old[{oidx}]={old[oidx].name}"
)
oremain.remove(old[oidx])
del_count += 1
oidx += 1
if del_count:
logger.debug(
f"DEBUG: format_update DEL: adding UpdDel({del_count}), oidx {del_start_oidx}->{oidx}"
)
update.append(UpdDel(del_count))
continue
# Items only in new are inserted
insert_items = []
ins_start_nidx = nidx
while nidx < len(new) and new[nidx] not in oremain:
entry = new[nidx]
logger.debug(
f"DEBUG: format_update INSERT: adding new[{nidx}]={entry.name}"
)
nremain.remove(entry)
insert_items.append(entry)
nidx += 1
if insert_items:
logger.debug(
f"DEBUG: format_update INS: adding UpdIns({len(insert_items)} items), nidx {ins_start_nidx}->{nidx}"
)
modified = True
update.append(UpdIns(insert_items))
if not modified:
logger.error(
f"ERROR: format_update INFINITE_LOOP: nidx={nidx}, oidx={oidx}, old_len={len(old)}, new_len={len(new)}"
)
logger.error(
f"ERROR: old[oidx]={old[oidx].name if oidx < len(old) else 'OUT_OF_BOUNDS'}"
)
logger.error(
f"ERROR: new[nidx]={new[nidx].name if nidx < len(new) else 'OUT_OF_BOUNDS'}"
)
raise Exception(
f"Infinite loop in diff {nidx=} {oidx=} {len(old)=} {len(new)=}"
)
# Diff any remaining
if keep_count > 0:
logger.debug(f"DEBUG: format_update FINAL_KEEP: adding UpdKeep({keep_count})")
update.append(UpdKeep(keep_count))
if oremain:
logger.debug(
f"DEBUG: format_update FINAL_DEL: adding UpdDel({len(oremain)}) for remaining old items"
)
update.append(UpdDel(len(oremain)))
elif nremain:
logger.debug(
f"DEBUG: format_update FINAL_INS: adding UpdIns({len(new[nidx:])}) for remaining new items"
)
update.append(UpdIns(new[nidx:]))
logger.debug(
f"DEBUG: format_update EXIT: generated {len(update)} operations, iterations={iteration_count}"
)
return msgspec.json.encode({"update": update}).decode()
@ -339,9 +508,25 @@ def watcher_inotify(loop):
logger.debug(f"Watch: {interesting=} {event=}")
if interesting:
# Update modified path
logger.debug(
f"DEBUG: inotify PROCESSING: event={event}, path={event[2]}/{event[3]}"
)
t0 = time.perf_counter()
path = PurePosixPath(event[2]) / event[3]
update_path(rootmod, path.relative_to(rootpath), loop)
try:
rel_path = path.relative_to(rootpath)
logger.debug(
f"DEBUG: inotify CALLING update_path: rel_path={rel_path}"
)
update_path(rootmod, rel_path, loop)
logger.debug(
f"DEBUG: inotify update_path COMPLETED: rel_path={rel_path}"
)
except Exception as e:
logger.error(
f"ERROR: inotify update_path FAILED: path={path}, error={e}"
)
raise
t1 = time.perf_counter()
logger.debug(f"Watch: Update {event[3]} took {t1 - t0:.1f}s")
if not dirty:
@ -349,14 +534,22 @@ def watcher_inotify(loop):
dirty = True
# Wait a maximum of 0.5s to push the updates
if dirty and time.monotonic() >= t + 0.5:
logger.debug("DEBUG: inotify TIMEOUT: breaking due to 0.5s timeout")
break
if dirty and state.root != rootmod:
logger.debug(
f"DEBUG: inotify BATCH_UPDATE: state.root_len={len(state.root)}, rootmod_len={len(rootmod)}"
)
t0 = time.perf_counter()
logger.debug("DEBUG: inotify CALLING format_update")
update = format_update(state.root, rootmod)
logger.debug("DEBUG: inotify format_update COMPLETED")
t1 = time.perf_counter()
with state.lock:
logger.debug("DEBUG: inotify BROADCASTING update")
broadcast(update, loop)
state.root = rootmod
logger.debug("DEBUG: inotify BROADCAST completed, state updated")
t2 = time.perf_counter()
logger.debug(
f"Format update took {t1 - t0:.1f}s, broadcast {t2 - t1:.1f}s"

View File

@ -15,6 +15,7 @@ classifiers = [
requires-python = ">=3.11"
dependencies = [
"argon2-cffi",
"av",
"blake3",
"brotli",
"docopt",
@ -23,7 +24,6 @@ dependencies = [
"natsort",
"pathvalidate",
"pillow",
"pyav",
"pyjwt",
"pymupdf",
"sanic",
@ -50,15 +50,15 @@ source = "vcs"
[tool.hatch.build]
artifacts = ["cista/wwwroot"]
targets.sdist.hooks.custom.path = "scripts/build-frontend.py"
targets.sdist.include = [
"/cista",
]
hooks.vcs.version-file = "cista/_version.py"
hooks.vcs.template = """
# This file is automatically generated by hatch build.
__version__ = {version!r}
"""
only-packages = true
targets.sdist.include = [
"/cista",
]
[tool.pytest.ini_options]
addopts = [
@ -95,11 +95,14 @@ ignore = [
"TD0",
"TRY",
]
show-source = true
show-fixes = true
[tool.ruff.isort]
known-first-party = ["cista"]
[tool.ruff.per-file-ignores]
"tests/*" = ["S", "ANN", "D", "INP"]
[dependency-groups]
dev = [
"pytest>=8.4.1",
]

File diff suppressed because it is too large Load Diff