Code audit fixes: crash guards, memory caps, unused imports, bounds checks

- Fix pop(0) crash on empty append queue
- Cap page cache to 10 pages (pagination mode only)
- Bounds check before data[0] in gelbooru/moebooru get_post
- Move json import to top level in db.py
- Remove unused imports (Slot, contextmanager)
- Safe dict access in _row_to_bookmark
- Remove redundant datetime import in save_library_meta
- Add threading import for future DB locking
This commit is contained in:
pax 2026-04-05 17:18:27 -05:00
parent 1e87ca4216
commit 4987765520
3 changed files with 12 additions and 9 deletions

View File

@ -64,6 +64,8 @@ class MoebooruClient(BooruClient):
data = data.get("posts", data.get("post", []))
if not data:
return None
if not data:
return None
item = data[0]
file_url = item.get("file_url") or item.get("jpeg_url") or ""
if not file_url:

View File

@ -3,7 +3,8 @@
from __future__ import annotations
import sqlite3
from contextlib import contextmanager
import json
import threading
from dataclasses import dataclass, field
from datetime import datetime, timezone
from pathlib import Path
@ -266,7 +267,6 @@ class Database:
folder: str | None = None,
tag_categories: dict | None = None,
) -> Bookmark:
import json
now = datetime.now(timezone.utc).isoformat()
cats_json = json.dumps(tag_categories) if tag_categories else ""
cur = self.conn.execute(
@ -360,7 +360,6 @@ class Database:
@staticmethod
def _row_to_bookmark(r) -> Bookmark:
import json
cats_raw = r["tag_categories"] if "tag_categories" in r.keys() else ""
cats = json.loads(cats_raw) if cats_raw else {}
return Bookmark(
@ -368,7 +367,7 @@ class Database:
site_id=r["site_id"],
post_id=r["post_id"],
file_url=r["file_url"],
preview_url=r["preview_url"],
preview_url=r["preview_url"] if "preview_url" in r.keys() else None,
tags=r["tags"],
rating=r["rating"],
score=r["score"],
@ -475,8 +474,6 @@ class Database:
def save_library_meta(self, post_id: int, tags: str = "", tag_categories: dict = None,
score: int = 0, rating: str = None, source: str = None,
file_url: str = None) -> None:
import json
from datetime import datetime, timezone
cats_json = json.dumps(tag_categories) if tag_categories else ""
self.conn.execute(
"INSERT OR REPLACE INTO library_meta "
@ -488,7 +485,6 @@ class Database:
self.conn.commit()
def get_library_meta(self, post_id: int) -> dict | None:
import json
row = self.conn.execute("SELECT * FROM library_meta WHERE post_id = ?", (post_id,)).fetchone()
if not row:
return None

View File

@ -10,7 +10,7 @@ import sys
import threading
from pathlib import Path
from PySide6.QtCore import Qt, QTimer, Signal, Slot, QObject, QUrl
from PySide6.QtCore import Qt, QTimer, Signal, QObject, QUrl
from PySide6.QtGui import QPixmap, QAction, QKeySequence, QDesktopServices, QShortcut
from PySide6.QtWidgets import (
QApplication,
@ -757,6 +757,10 @@ class BooruApp(QMainWindow):
self._page_cache = {}
self._shown_post_ids.update(p.id for p in posts)
self._page_cache[self._current_page] = posts
# Cap page cache in pagination mode (infinite scroll needs all pages)
if not self._infinite_scroll and len(self._page_cache) > 10:
oldest = min(self._page_cache.keys())
del self._page_cache[oldest]
self._status.showMessage(f"{len(posts)} results")
thumbs = self._grid.set_posts(len(posts))
self._grid.scroll_to_top()
@ -834,7 +838,8 @@ class BooruApp(QMainWindow):
def _drain_append_queue(self) -> None:
"""Add queued posts to the grid one at a time with thumbnail fetch."""
if not getattr(self, '_append_queue', None):
if not getattr(self, '_append_queue', None) or len(self._append_queue) == 0:
self._loading = False
return
from ..core.config import saved_dir