Compare commits
No commits in common. "main" and "v0.2.5" have entirely different histories.
55
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
55
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@ -1,55 +0,0 @@
|
|||||||
name: Bug Report
|
|
||||||
description: Something broken or misbehaving
|
|
||||||
title: "[BUG] "
|
|
||||||
labels: ["bug"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: summary
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: What's broken?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: repro
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce
|
|
||||||
value: |
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
3.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: expected
|
|
||||||
attributes:
|
|
||||||
label: Expected vs actual behavior
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: os
|
|
||||||
attributes:
|
|
||||||
label: OS
|
|
||||||
options: [Linux, Windows, Other]
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: booru-viewer version / commit
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: python
|
|
||||||
attributes:
|
|
||||||
label: Python & PySide6 version
|
|
||||||
- type: dropdown
|
|
||||||
id: backend
|
|
||||||
attributes:
|
|
||||||
label: Booru backend
|
|
||||||
options: [Danbooru, Gelbooru, Safebooru, e621, Other]
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Logs / traceback
|
|
||||||
render: shell
|
|
||||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,8 +0,0 @@
|
|||||||
blank_issues_enabled: false
|
|
||||||
contact_links:
|
|
||||||
- name: Questions and general discussion
|
|
||||||
url: https://github.com/pxlwh/booru-viewer/discussions
|
|
||||||
about: For usage questions, setup help, and general chat that isn't a bug
|
|
||||||
- name: Gitea mirror
|
|
||||||
url: https://git.pax.moe/pax/booru-viewer
|
|
||||||
about: Primary development repo — same codebase, also accepts issues
|
|
||||||
22
.github/ISSUE_TEMPLATE/docs.yaml
vendored
22
.github/ISSUE_TEMPLATE/docs.yaml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: Documentation Issue
|
|
||||||
description: Typos, unclear sections, missing docs, broken links
|
|
||||||
title: "[DOCS] "
|
|
||||||
labels: ["documentation"]
|
|
||||||
body:
|
|
||||||
- type: input
|
|
||||||
id: file
|
|
||||||
attributes:
|
|
||||||
label: File or page
|
|
||||||
description: README.md, themes/README.md, HYPRLAND.md, KEYBINDS.md, in-app help, etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: problem
|
|
||||||
attributes:
|
|
||||||
label: What's wrong or missing?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: suggestion
|
|
||||||
attributes:
|
|
||||||
label: Suggested fix or addition
|
|
||||||
28
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
28
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@ -1,28 +0,0 @@
|
|||||||
name: Feature Request
|
|
||||||
description: Suggest a new feature or enhancement
|
|
||||||
title: "[FEAT] "
|
|
||||||
labels: ["enhancement"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: problem
|
|
||||||
attributes:
|
|
||||||
label: Problem
|
|
||||||
description: What's the use case or pain point?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: proposal
|
|
||||||
attributes:
|
|
||||||
label: Proposed solution
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: alternatives
|
|
||||||
attributes:
|
|
||||||
label: Alternatives considered
|
|
||||||
- type: checkboxes
|
|
||||||
id: scope
|
|
||||||
attributes:
|
|
||||||
label: Scope check
|
|
||||||
options:
|
|
||||||
- label: I've checked this isn't already implemented or tracked
|
|
||||||
70
.github/ISSUE_TEMPLATE/hyprland_wayland.yaml
vendored
70
.github/ISSUE_TEMPLATE/hyprland_wayland.yaml
vendored
@ -1,70 +0,0 @@
|
|||||||
name: Hyprland / Wayland Issue
|
|
||||||
description: Compositor-specific issues (window positioning, popout math, Waybar, multi-monitor)
|
|
||||||
title: "[HYPR] "
|
|
||||||
labels: ["hyprland", "wayland"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: summary
|
|
||||||
attributes:
|
|
||||||
label: What's happening?
|
|
||||||
description: Describe the compositor-specific behavior you're seeing
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: compositor
|
|
||||||
attributes:
|
|
||||||
label: Compositor
|
|
||||||
options: [Hyprland, Sway, KDE/KWin Wayland, GNOME/Mutter Wayland, Other Wayland, Other]
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: compositor_version
|
|
||||||
attributes:
|
|
||||||
label: Compositor version
|
|
||||||
description: e.g. Hyprland v0.42.0
|
|
||||||
- type: dropdown
|
|
||||||
id: monitors
|
|
||||||
attributes:
|
|
||||||
label: Monitor setup
|
|
||||||
options: [Single monitor, Dual monitor, 3+ monitors, Mixed scaling, Mixed refresh rates]
|
|
||||||
- type: dropdown
|
|
||||||
id: area
|
|
||||||
attributes:
|
|
||||||
label: What area is affected?
|
|
||||||
options:
|
|
||||||
- Main window geometry / position
|
|
||||||
- Popout window positioning
|
|
||||||
- Popout aspect-ratio lock
|
|
||||||
- Popout anchor (resize pivot)
|
|
||||||
- Context menu / popup positioning
|
|
||||||
- Waybar exclusive zone handling
|
|
||||||
- Fullscreen (F11)
|
|
||||||
- Privacy screen overlay
|
|
||||||
- Other
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: envvars
|
|
||||||
attributes:
|
|
||||||
label: Relevant env vars set
|
|
||||||
description: BOORU_VIEWER_NO_HYPR_RULES, BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK, etc.
|
|
||||||
placeholder: "BOORU_VIEWER_NO_HYPR_RULES=1"
|
|
||||||
render: shell
|
|
||||||
- type: textarea
|
|
||||||
id: windowrules
|
|
||||||
attributes:
|
|
||||||
label: Any windowrules targeting booru-viewer?
|
|
||||||
description: Paste relevant rules from your compositor config
|
|
||||||
render: shell
|
|
||||||
- type: textarea
|
|
||||||
id: hyprctl
|
|
||||||
attributes:
|
|
||||||
label: hyprctl output (if applicable)
|
|
||||||
description: "`hyprctl monitors -j`, `hyprctl clients -j` filtered to booru-viewer"
|
|
||||||
render: json
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: booru-viewer version / commit
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
72
.github/ISSUE_TEMPLATE/performance.yaml
vendored
72
.github/ISSUE_TEMPLATE/performance.yaml
vendored
@ -1,72 +0,0 @@
|
|||||||
name: Performance Issue
|
|
||||||
description: Slowdowns, lag, high memory/CPU, UI freezes (distinct from broken features)
|
|
||||||
title: "[PERF] "
|
|
||||||
labels: ["performance"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: summary
|
|
||||||
attributes:
|
|
||||||
label: What's slow?
|
|
||||||
description: Describe what feels sluggish and what you'd expect
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: area
|
|
||||||
attributes:
|
|
||||||
label: What area?
|
|
||||||
options:
|
|
||||||
- Grid scroll / infinite scroll
|
|
||||||
- Thumbnail loading
|
|
||||||
- Search / API requests
|
|
||||||
- Image preview / pan-zoom
|
|
||||||
- Video playback
|
|
||||||
- Popout open / close
|
|
||||||
- Popout navigation
|
|
||||||
- Settings / dialogs
|
|
||||||
- Startup
|
|
||||||
- Other
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: repro
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce
|
|
||||||
value: |
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
3.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: timings
|
|
||||||
attributes:
|
|
||||||
label: Approximate timings
|
|
||||||
description: How long does the slow operation take? How long would you expect?
|
|
||||||
- type: input
|
|
||||||
id: library_size
|
|
||||||
attributes:
|
|
||||||
label: Library / bookmark size
|
|
||||||
description: Number of saved files and/or bookmarks, if relevant
|
|
||||||
- type: dropdown
|
|
||||||
id: os
|
|
||||||
attributes:
|
|
||||||
label: OS
|
|
||||||
options: [Linux, Windows, Other]
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: hardware
|
|
||||||
attributes:
|
|
||||||
label: Hardware (CPU / RAM / GPU)
|
|
||||||
- type: textarea
|
|
||||||
id: logs
|
|
||||||
attributes:
|
|
||||||
label: Relevant DEBUG logs
|
|
||||||
description: Launch with Ctrl+L open and reproduce — paste anything that looks slow
|
|
||||||
render: shell
|
|
||||||
- type: input
|
|
||||||
id: version
|
|
||||||
attributes:
|
|
||||||
label: booru-viewer version / commit
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
26
.github/ISSUE_TEMPLATE/site_support.yaml
vendored
26
.github/ISSUE_TEMPLATE/site_support.yaml
vendored
@ -1,26 +0,0 @@
|
|||||||
name: Site Support Request
|
|
||||||
description: Request support for a new booru backend
|
|
||||||
title: "[SITE] "
|
|
||||||
labels: ["site-support"]
|
|
||||||
body:
|
|
||||||
- type: input
|
|
||||||
id: site
|
|
||||||
attributes:
|
|
||||||
label: Site name and URL
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
id: api
|
|
||||||
attributes:
|
|
||||||
label: API type
|
|
||||||
options: [Danbooru-compatible, Gelbooru-compatible, Moebooru, Shimmie2, Unknown, Other]
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: api_docs
|
|
||||||
attributes:
|
|
||||||
label: Link to API documentation (if any)
|
|
||||||
- type: textarea
|
|
||||||
id: notes
|
|
||||||
attributes:
|
|
||||||
label: Auth, rate limits, or quirks worth knowing
|
|
||||||
30
.github/ISSUE_TEMPLATE/theme_submission.yaml
vendored
30
.github/ISSUE_TEMPLATE/theme_submission.yaml
vendored
@ -1,30 +0,0 @@
|
|||||||
name: Theme Submission
|
|
||||||
description: Submit a palette for inclusion
|
|
||||||
title: "[THEME] "
|
|
||||||
labels: ["theme"]
|
|
||||||
body:
|
|
||||||
- type: input
|
|
||||||
id: name
|
|
||||||
attributes:
|
|
||||||
label: Theme name
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: palette
|
|
||||||
attributes:
|
|
||||||
label: Palette file contents
|
|
||||||
description: Paste the full @palette block or the complete .qss file
|
|
||||||
render: css
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: screenshot
|
|
||||||
attributes:
|
|
||||||
label: Screenshot URL
|
|
||||||
- type: checkboxes
|
|
||||||
id: license
|
|
||||||
attributes:
|
|
||||||
label: Licensing
|
|
||||||
options:
|
|
||||||
- label: I'm okay with this being distributed under the project's license
|
|
||||||
required: true
|
|
||||||
39
.github/ISSUE_TEMPLATE/ux_feedback.yaml
vendored
39
.github/ISSUE_TEMPLATE/ux_feedback.yaml
vendored
@ -1,39 +0,0 @@
|
|||||||
name: UX Feedback
|
|
||||||
description: Non-bug UX suggestions, workflow friction, small polish
|
|
||||||
title: "[UX] "
|
|
||||||
labels: ["ux"]
|
|
||||||
body:
|
|
||||||
- type: textarea
|
|
||||||
id: context
|
|
||||||
attributes:
|
|
||||||
label: What were you trying to do?
|
|
||||||
description: The workflow or action where the friction happened
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: friction
|
|
||||||
attributes:
|
|
||||||
label: What felt awkward or wrong?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: suggestion
|
|
||||||
attributes:
|
|
||||||
label: What would feel better?
|
|
||||||
description: Optional — a rough idea is fine
|
|
||||||
- type: dropdown
|
|
||||||
id: area
|
|
||||||
attributes:
|
|
||||||
label: Area
|
|
||||||
options:
|
|
||||||
- Grid / thumbnails
|
|
||||||
- Preview pane
|
|
||||||
- Popout window
|
|
||||||
- Top bar / filters
|
|
||||||
- Search
|
|
||||||
- Bookmarks
|
|
||||||
- Library
|
|
||||||
- Settings
|
|
||||||
- Keyboard shortcuts
|
|
||||||
- Theming
|
|
||||||
- Other
|
|
||||||
669
CHANGELOG.md
669
CHANGELOG.md
@ -1,125 +1,11 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [Unreleased]
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Settings → Cache: **Clear Tag Cache** button — wipes the per-site `tag_types` rows (including the `__batch_api_probe__` sentinel) so Gelbooru/Moebooru backends re-probe and re-populate tag categories from scratch. Useful when a stale cache from an earlier build leaves some category types mis-labelled or missing
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Thumbnail drag-start threshold raised from 10px to 30px to match the rubber band's gate — small mouse wobbles on a thumb no longer trigger a file drag
|
|
||||||
- Settings → Cache layout: Clear Tag Cache moved into row 1 alongside Clear Thumbnails and Clear Image Cache as a 3-wide non-destructive row; destructive Clear Everything + Evict stay in row 2
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Grid blanked out after splitter drag or tile/float toggle until the next scroll — `ThumbnailGrid.resizeEvent` now re-runs `_recycle_offscreen` against the new geometry so thumbs whose pixmap was evicted by a column-count shift get refreshed into view. **Behavior change:** no more blank grid after resize
|
|
||||||
- Status bar overwrote the per-post info set by `_on_post_selected` with `"N results — Loaded"` the moment the image finished downloading, hiding tag counts / post ID until the user re-clicked; `on_image_done` now preserves the incoming `info` string
|
|
||||||
- `category_fetcher._do_ensure` no longer permanently flips `_batch_api_works` to False when a transient network error drops a tag-API request mid-call; the unprobed path now routes through `_probe_batch_api`, which distinguishes clean 200-with-zero-matches (structurally broken, flip) from timeout/HTTP-error (transient, retry next call)
|
|
||||||
- Bookmark→library save and bookmark Save As now plumb the active site's `CategoryFetcher` through to the filename template, so `%artist%`/`%character%` tokens render correctly instead of silently dropping out when saving a post that wasn't previewed first
|
|
||||||
- Info panel no longer silently drops tags that failed to land in a cached category — any tag from `post.tag_list` not rendered under a known category section now appears in an "Other" bucket, so partial cache coverage can't make individual tags invisible
|
|
||||||
- `BooruClient._request` retries now cover `httpx.RemoteProtocolError` and `httpx.ReadError` in addition to the existing timeout/connect/network set — an overloaded booru that drops the TCP connection mid-response no longer fails the whole search on the first try
|
|
||||||
- VRAM retained when no video is playing — `stop()` now frees the GL render context (textures + FBOs) instead of just dropping the hwdec surface pool. Context is recreated lazily on next `play_file()` via `ensure_gl_init()` (~5ms, invisible behind network fetch)
|
|
||||||
|
|
||||||
### Refactored
|
|
||||||
- `category_fetcher` batch tag-API params are now built by a shared `_build_tag_api_params` helper instead of duplicated across `fetch_via_tag_api` and `_probe_batch_api`
|
|
||||||
- `detect.detect_site_type` — removed the leftover `if True:` indent marker; no behavior change
|
|
||||||
- `core.http.make_client` — single constructor for the three `httpx.AsyncClient` instances (cache download pool, API pool, detect probe). Each call site still keeps its own singleton and connection pool; only the construction is shared
|
|
||||||
- Silent `except: pass` sites in `popout/window`, `video_player`, and `window_state` now carry one-line comments naming the absorbed failure and the graceful fallback (or were downgraded to `log.debug(..., exc_info=True)`). No behavior change
|
|
||||||
- Popout docstrings purged of in-flight-refactor commit markers (`skeleton`, `14a`, `14b`, `future commit`) that referred to now-landed state-machine extraction; load-bearing commit 14b reference kept in `_dispatch_and_apply` as it still protects against reintroducing the bug
|
|
||||||
- `core/cache.py` tempfile cleanup: `BaseException` catch now documents why it's intentionally broader than `Exception`
|
|
||||||
- `api/e621` and `api/moebooru` JSON parse guards narrowed from bare `except` to `ValueError`
|
|
||||||
- `gui/media/video_player.py` — `import time` hoisted to module top
|
|
||||||
- `gui/post_actions.is_in_library` — dead `try/except` stripped
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Unused `Favorite` alias in `core/db.py` — callers migrated to `Bookmark` in 0.2.5, nothing referenced the fallback anymore
|
|
||||||
|
|
||||||
## v0.2.7
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Popout always reopened as floating even when tiled at close — Hyprland tiled state is now persisted and restored via `settiled` on reopen
|
|
||||||
- Video stutter on network streams — `cache_pause_initial` was blocking first frame, reverted cache_pause changes and kept larger demuxer buffer
|
|
||||||
- Rubber band selection state getting stuck across interrupted drags
|
|
||||||
- LIKE wildcards in `search_library_meta` not being escaped
|
|
||||||
- Copy File to Clipboard broken in preview pane and popout; added Copy Image URL action
|
|
||||||
- Thumbnail cleanup and Post ID sort broken for templated filenames in library
|
|
||||||
- Save/unsave bookmark UX — no flash on toggle, correct dot indicators
|
|
||||||
- Autocomplete broken for multi-tag queries
|
|
||||||
- Search not resetting to page 1 on new query
|
|
||||||
- Fade animation cleanup crashing `FlowLayout.clear`
|
|
||||||
- Privacy toggle not preserving video pause state
|
|
||||||
- Bookmarks grid not refreshing on unsave
|
|
||||||
- `_cached_path` not set for streaming videos
|
|
||||||
- Standard icon column showing in QMessageBox dialogs
|
|
||||||
- Popout aspect lock for bookmarks now reads actual image dimensions instead of guessing
|
|
||||||
- GPU resource leak on Mesa/Intel drivers — `mpv_render_context_free` now runs with the owning GL context current (NVIDIA tolerated the bug, other drivers did not)
|
|
||||||
- Popout teardown `AttributeError` when `centralWidget()` or `QApplication.instance()` returned `None` during init/shutdown race
|
|
||||||
- Category fetcher rejects XML responses containing `<!DOCTYPE` or `<!ENTITY` before parsing, blocking XXE and billion-laughs payloads from user-configured sites
|
|
||||||
- VRAM not released on popout close — `video_player` now drops the hwdec surface pool on stop and popout runs explicit mpv cleanup before teardown
|
|
||||||
- Popout open animation was being suppressed by the `no_anim` aspect-lock workaround — first fit after open now lets Hyprland's `windowsIn`/`popin` play; subsequent navigation fits still suppress anim to avoid resize flicker
|
|
||||||
- Thumbnail grid blanking out after Hyprland tiled resize until a scroll/click — viewport is now force-updated at the end of `ThumbnailGrid.resizeEvent` so the Qt Wayland buffer stays in sync with the new geometry
|
|
||||||
- Library video thumbnails captured from a black opening frame — mpv now seeks to 10% before the first frame decode so title cards, fade-ins, and codec warmup no longer produce a black thumbnail (delete `~/.cache/booru-viewer/thumbnails/library/` to regenerate existing entries)
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Uncached videos now download via httpx in parallel with mpv streaming — file is cached immediately for copy/paste without waiting for playback to finish
|
|
||||||
- Library video thumbnails use mpv instead of ffmpeg — drops the ffmpeg dependency entirely
|
|
||||||
- Save/Unsave from Library mutually exclusive in context menus, preview pane, and popout
|
|
||||||
- S key guard consistent with B/F behavior
|
|
||||||
- Tag count limits removed from info panel
|
|
||||||
- Ctrl+S and Ctrl+D menu shortcuts removed (conflict-prone)
|
|
||||||
- Thumbnail fade-in shortened from 200ms to 80ms
|
|
||||||
- Default demuxer buffer reduced to 50MiB; streaming URLs still get 150MiB
|
|
||||||
- Minimum width set on thumbnail grid
|
|
||||||
- Popout overlay hover zone enlarged
|
|
||||||
- Settings dialog gets an Apply button; thumbnail size and flip layout apply live
|
|
||||||
- Tab selection preserved on view switch
|
|
||||||
- Scroll delta accumulated for volume control and zoom (smoother with hi-res scroll wheels)
|
|
||||||
- Force Fusion widget style when no `custom.qss` is present
|
|
||||||
- Dark Fusion palette applied as fallback when no system Qt theme file (`Trolltech.conf`) is detected; KDE/GNOME users keep their own palette
|
|
||||||
- **Behavior change:** popout re-fits window to current content's aspect and resets zoom when leaving a tiled layout to a different-aspect image or video; previously restored the old floating geometry with the wrong aspect lock
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
- Thumbnails re-decoded from disk on size change instead of holding full pixmaps in memory
|
|
||||||
- Off-screen thumbnail pixmaps recycled (decoded on demand from cached path)
|
|
||||||
- Lookup sets cached across infinite scroll appends; invalidated on bookmark/save
|
|
||||||
- `auto_evict_cache` throttled to once per 30s
|
|
||||||
- Stale prefetch spirals cancelled on new click
|
|
||||||
- Single-pass directory walk in cache eviction functions
|
|
||||||
- GTK dialog platform detection cached instead of recreating Database per call
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Dead code: `core/images.py`
|
|
||||||
- `TODO.md`
|
|
||||||
- Unused imports across `main_window`, `grid`, `settings`, `dialogs`, `sites`, `search_controller`, `video_player`, `info_panel`
|
|
||||||
- Dead `mid` variable in `grid.paintEvent`, dead `get_connection_log` import in `settings._build_network_tab`
|
|
||||||
|
|
||||||
## v0.2.6
|
|
||||||
|
|
||||||
### Security: 2026-04-10 audit remediation
|
|
||||||
|
|
||||||
Closes 12 of the 16 findings from the read-only audit at `docs/SECURITY_AUDIT.md`. Two High, four Medium, four Low, and two Informational findings fixed; the four skipped Informational items are documented at the bottom. Each fix is its own commit on the `security/audit-2026-04-10` branch with an `Audit-Ref:` trailer.
|
|
||||||
|
|
||||||
- **#1 SSRF (High)**: every httpx client now installs an event hook that resolves the target host and rejects loopback, RFC1918, link-local (including the 169.254.169.254 cloud-metadata endpoint), CGNAT, unique-local v6, and multicast. Hook fires on every redirect hop, not just the initial request. **Behavior change:** user-configured boorus pointing at private/loopback addresses now fail with `blocked request target ...` instead of being probed. Test Connection on a local booru will be rejected.
|
|
||||||
- **#2 mpv (High)**: the embedded mpv instance is constructed with `ytdl=no`, `load_scripts=no`, and `demuxer_lavf_o=protocol_whitelist=file,http,https,tls,tcp`, plus `input_conf=/dev/null` on POSIX. Closes the yt-dlp delegation surface (CVE-prone extractors invoked on attacker-supplied URLs) and the `concat:`/`subfile:` local-file-read gadget via ffmpeg's lavf demuxer. **Behavior change:** any `file_url` whose host is only handled by yt-dlp (youtube.com, reddit.com, ...) no longer plays. Boorus do not legitimately serve such URLs, so in practice this only affects hostile responses.
|
|
||||||
- **#3 Credential logging (Medium)**: `login`, `api_key`, `user_id`, and `password_hash` are now stripped from URLs and params before any logging path emits them. Single redaction helper in `core/api/_safety.py`, called from the booru-base request hook and from each per-client `log.debug` line.
|
|
||||||
- **#4 DB + data dir permissions (Medium)**: on POSIX, `~/.local/share/booru-viewer/` is now `0o700` and `booru.db` (plus the `-wal`/`-shm` sidecars) is `0o600`. **Behavior change:** existing installs are tightened on next launch. Windows is unchanged — NTFS ACLs handle this separately.
|
|
||||||
- **#5 Lock leak (Medium)**: the per-URL coalesce lock table is capped at 4096 entries with LRU eviction. Eviction skips currently-held locks so a coroutine mid-`async with` can't be ripped out from under itself.
|
|
||||||
- **#6 HTML injection (Medium)**: `post.source` is escaped before insertion into the info-panel rich text. Non-http(s) sources (including `javascript:` and `data:`) render as plain escaped text without an `<a>` tag, so they can't become click targets.
|
|
||||||
- **#7 Windows reserved names (Low)**: `render_filename_template` now prefixes filenames whose stem matches a reserved Windows device name (`CON`, `PRN`, `AUX`, `NUL`, `COM1-9`, `LPT1-9`) with `_`, regardless of host platform. Cross-OS library copies stay safe.
|
|
||||||
- **#8 PIL bomb cap (Low)**: `Image.MAX_IMAGE_PIXELS=256M` moved from `core/cache.py` (where it was a side-effect of import order) to `core/__init__.py`, so any `booru_viewer.core.*` import installs the cap first.
|
|
||||||
- **#9 Dependency bounds (Low)**: upper bounds added to runtime deps in `pyproject.toml` (`httpx<1.0`, `Pillow<12.0`, `PySide6<7.0`, `python-mpv<2.0`). Lock-file generation deferred — see `TODO.md`.
|
|
||||||
- **#10 Early content validation (Low)**: `_do_download` now accumulates the first 16 bytes of the response and validates magic bytes before committing to writing the rest. A hostile server omitting Content-Type previously could burn up to `MAX_DOWNLOAD_BYTES` (500MB) of bandwidth before the post-download check rejected.
|
|
||||||
- **#14 Category fetcher body cap (Informational)**: HTML body the regex walks over in `CategoryFetcher.fetch_post` is truncated at 2MB. Defense in depth — the regex is linear-bounded but a multi-MB hostile body still pegs CPU.
|
|
||||||
- **#16 Logging hook gap (Informational)**: e621 and detect_site_type clients now install the `_log_request` hook so their requests appear in the connection log alongside the base client. Absorbed into the #1 wiring commits since both files were already being touched.
|
|
||||||
|
|
||||||
**Skipped (Wontfix), with reason:**
|
|
||||||
- **#11 64-bit hash truncation**: not exploitable in practice (audit's own words). Fix would change every cache path and require a migration.
|
|
||||||
- **#12 Referer leak through CDN redirects**: intentional — booru CDNs gate downloads on Referer matching. Documented; not fixed.
|
|
||||||
- **#13 hyprctl batch joining**: user is trusted in the threat model and Hyprland controls the field. Informational only.
|
|
||||||
- **#15 dead code in `core/images.py`**: code quality, not security. Out of scope under the no-refactor constraint. Logged in `TODO.md`.
|
|
||||||
|
|
||||||
## v0.2.5
|
## v0.2.5
|
||||||
|
|
||||||
Full UI overhaul (icon buttons, compact top bar, responsive video controls), popout resize-pivot anchor, layout flip, and the main_window.py controller decomposition.
|
Full UI overhaul (icon buttons, compact top bar, responsive video controls), popout resize-pivot anchor, layout flip, and the main_window.py controller decomposition.
|
||||||
|
|
||||||
|
## Changes since v0.2.4
|
||||||
|
|
||||||
### Refactor: main_window.py controller decomposition
|
### Refactor: main_window.py controller decomposition
|
||||||
|
|
||||||
`main_window.py` went from a 3,318-line god-class to a 1,164-line coordinator plus 7 controller modules. Every other subsystem in the codebase had already been decomposed (popout state machine, library save, category fetcher) — BooruApp was the last monolith. 11 commits, pure refactor, no behavior change. Design doc at `docs/MAIN_WINDOW_REFACTOR.md`.
|
`main_window.py` went from a 3,318-line god-class to a 1,164-line coordinator plus 7 controller modules. Every other subsystem in the codebase had already been decomposed (popout state machine, library save, category fetcher) — BooruApp was the last monolith. 11 commits, pure refactor, no behavior change. Design doc at `docs/MAIN_WINDOW_REFACTOR.md`.
|
||||||
@ -200,6 +86,8 @@ All 12 bundled QSS themes were trimmed and regenerated:
|
|||||||
|
|
||||||
Library filename templates, tag category fetching for all backends, and a popout video streaming overhaul. 50+ commits since v0.2.3.
|
Library filename templates, tag category fetching for all backends, and a popout video streaming overhaul. 50+ commits since v0.2.3.
|
||||||
|
|
||||||
|
## Changes since v0.2.3
|
||||||
|
|
||||||
### New: library filename templates
|
### New: library filename templates
|
||||||
|
|
||||||
Save files with custom names instead of bare post IDs. Templates use `%id%`, `%artist%`, `%character%`, `%copyright%`, `%general%`, `%meta%`, `%species%`, `%md5%`, `%rating%`, `%score%`, `%ext%` tokens. Set in Settings > Paths.
|
Save files with custom names instead of bare post IDs. Templates use `%id%`, `%artist%`, `%character%`, `%copyright%`, `%general%`, `%meta%`, `%species%`, `%md5%`, `%rating%`, `%score%`, `%ext%` tokens. Set in Settings > Paths.
|
||||||
@ -232,6 +120,8 @@ Tag categories (Artist, Character, Copyright, General, Meta, Species) now work a
|
|||||||
- HTML parser two-pass rewrite: Pass 1 finds tag-type elements by class, Pass 2 extracts tag names from `tags=NAME` URL parameters in search links. Works on Rule34, Safebooru.org, and Moebooru.
|
- HTML parser two-pass rewrite: Pass 1 finds tag-type elements by class, Pass 2 extracts tag names from `tags=NAME` URL parameters in search links. Works on Rule34, Safebooru.org, and Moebooru.
|
||||||
- `save_post_file` ensures categories before template render so `%artist%` / `%character%` tokens resolve on Gelbooru-style sites.
|
- `save_post_file` ensures categories before template render so `%artist%` / `%character%` tokens resolve on Gelbooru-style sites.
|
||||||
- On-demand fetch model for Rule34 / Safebooru.org / Moebooru: ~200ms HTML scrape on first click, instant from cache on re-click.
|
- On-demand fetch model for Rule34 / Safebooru.org / Moebooru: ~200ms HTML scrape on first click, instant from cache on re-click.
|
||||||
|
- Tag cache auto-prunes at 50k rows to prevent unbounded DB growth over months of browsing.
|
||||||
|
- Canonical category display order: Artist > Character > Copyright > Species > General > Meta > Lore (matches Danbooru/e621 inline order across all booru types).
|
||||||
|
|
||||||
### Improved: popout video streaming
|
### Improved: popout video streaming
|
||||||
|
|
||||||
@ -247,540 +137,21 @@ Click-to-first-frame latency on uncached video posts with the popout open is rou
|
|||||||
|
|
||||||
- **Popout close preserves video position.** `closeEvent` now snapshots `position_ms` before dispatching `CloseRequested` (whose `StopMedia` effect destroys mpv's `time_pos`). The embedded preview resumes at the correct position instead of restarting from 0.
|
- **Popout close preserves video position.** `closeEvent` now snapshots `position_ms` before dispatching `CloseRequested` (whose `StopMedia` effect destroys mpv's `time_pos`). The embedded preview resumes at the correct position instead of restarting from 0.
|
||||||
- **Library popout aspect lock for images.** Library items' Post objects were constructed without width/height, so the popout got 0/0 and `_fit_to_content` returned early without setting `keep_aspect_ratio`. Now reads actual pixel dimensions via `QPixmap` before constructing the Post.
|
- **Library popout aspect lock for images.** Library items' Post objects were constructed without width/height, so the popout got 0/0 and `_fit_to_content` returned early without setting `keep_aspect_ratio`. Now reads actual pixel dimensions via `QPixmap` before constructing the Post.
|
||||||
|
- **Library tag search for templated filenames.** The tag search filter used `f.stem.isdigit()` to extract post_id — templated filenames were invisible to search. Now resolves post_id via `get_library_post_id_by_filename` with digit-stem fallback.
|
||||||
|
- **Library thumbnail lookup for templated filenames.** Thumbnails were saved by post_id but looked up by file stem. Templated files showed wrong or missing thumbnails. Now resolves post_id before thumbnail lookup.
|
||||||
|
- **Saved-dot indicator in primary search handler.** `_on_search_done` still used the old filesystem walk with `stem.isdigit()` — last surviving digit-stem callsite. Replaced with `get_saved_post_ids()` DB query.
|
||||||
|
- **Library delete meta cleanup.** Library tab single-delete and multi-delete called `.unlink()` directly, bypassing `delete_from_library`. Orphan `library_meta` rows leaked. Now resolves post_id and calls `remove_library_meta` after unlinking.
|
||||||
|
- **Partial cache compose.** `try_compose_from_cache` now populates `post.tag_categories` with whatever IS cached (for immediate partial display) but returns True only at 100% coverage. Prevents single cached tags from blocking the fetch path.
|
||||||
|
|
||||||
|
### UI
|
||||||
|
|
||||||
|
- Swapped Score and Media Type filter positions in the top toolbar. Dropdowns (Rating, Media Type) are now adjacent; Score sits between Media Type and Page.
|
||||||
|
- Tightened thumbnail spacing in the grid from 8px to 2px.
|
||||||
|
- Thumbnail size capped at 200px in Settings.
|
||||||
|
|
||||||
### Other
|
### Other
|
||||||
|
|
||||||
- README updated, unused Windows screenshots dropped from the repo.
|
- README updated for v0.2.4, unused Windows screenshots dropped from the repo.
|
||||||
- Tightened thumbnail spacing in the grid from 8px to 2px.
|
- New `docs/SAVE_AND_CATEGORIES.md` architecture reference with diagrams covering the unified save flow, CategoryFetcher dispatch, cache table, and per-booru resolution matrix.
|
||||||
- Max thumbnail size at 200px.
|
|
||||||
|
|
||||||
## v0.2.3
|
---
|
||||||
|
|
||||||
A refactor + cleanup release. The two largest source files (`gui/app.py` 3608 lines + `gui/preview.py` 2273 lines) are gone, replaced by a module-per-concern layout. The popout viewer's internal state was rebuilt as an explicit state machine with the historical race bugs locked out structurally instead of by suppression windows. The slider drag-back race that no one had named is finally fixed. A handful of latent bugs got caught and resolved on the way through.
|
|
||||||
|
|
||||||
### Structural refactor: gui/app.py + gui/preview.py split
|
|
||||||
|
|
||||||
The two largest source files were doing too much. `gui/app.py` was 3608 lines mixing async dispatch, signal wiring, tab switching, popout coordination, splitter persistence, context menus, bulk actions, batch download, fullscreen, privacy, and a dozen other concerns. `gui/preview.py` was 2273 lines holding the embedded preview, the popout, the image viewer, the video player, an OpenGL surface, and a click-to-seek slider. Both files had reached the point where almost every commit cited "the staging surface doesn't split cleanly" as the reason for bundling unrelated fixes.
|
|
||||||
|
|
||||||
This release pays that cost down with a structural carve into 12 module-per-concern files plus 2 oversize-by-design god-class files. 14 commits, every commit byte-identical except for relative-import depth corrections, app runnable at every commit boundary.
|
|
||||||
|
|
||||||
- **`gui/app.py` (3608 lines) gone.** Carved into:
|
|
||||||
- `app_runtime.py`: `run()`, `_apply_windows_dark_mode()`, `_load_user_qss()` (`@palette` preprocessor), `_BASE_POPOUT_OVERLAY_QSS`. The QApplication setup, custom QSS load, icon resolution, BooruApp instantiation, and exec loop.
|
|
||||||
- `main_window.py`: `BooruApp(QMainWindow)`, ~3200 lines. The class is one indivisible unit because every method shares instance attributes with every other method. Splitting it across files would have required either inheritance, composition, or method-as-attribute injection, and none of those were worth introducing for a refactor that was supposed to be a pure structural move with no logic changes.
|
|
||||||
- `info_panel.py`: `InfoPanel(QWidget)` toggleable info panel.
|
|
||||||
- `log_handler.py`: `LogHandler(logging.Handler, QObject)` Qt-aware logger adapter.
|
|
||||||
- `async_signals.py`: `AsyncSignals(QObject)` signal hub for async worker results.
|
|
||||||
- `search_state.py`: `SearchState` dataclass.
|
|
||||||
- **`gui/preview.py` (2273 lines) gone.** Carved into:
|
|
||||||
- `preview_pane.py`: `ImagePreview(QWidget)` embedded preview pane.
|
|
||||||
- `popout/window.py`: `FullscreenPreview(QMainWindow)` popout. Initially a single 1136-line file; further carved by the popout state machine refactor below.
|
|
||||||
- `media/constants.py`: `VIDEO_EXTENSIONS`, `_is_video()`.
|
|
||||||
- `media/image_viewer.py`: `ImageViewer(QWidget)` zoom/pan image viewer.
|
|
||||||
- `media/mpv_gl.py`: `_MpvGLWidget` + `_MpvOpenGLSurface`.
|
|
||||||
- `media/video_player.py`: `VideoPlayer(QWidget)` + `_ClickSeekSlider`.
|
|
||||||
- `popout/viewport.py`: `Viewport(NamedTuple)` + `_DRIFT_TOLERANCE`.
|
|
||||||
- **Re-export shim pattern.** Each move added a `from .new_location import MovedClass # re-export for refactor compat` line at the bottom of the old file so existing imports kept resolving the same class object during the migration. The final cleanup commit updated the importer call sites to canonical paths and deleted the now-empty `app.py` and `preview.py`.
|
|
||||||
|
|
||||||
### Bug fixes surfaced by the refactor
|
|
||||||
|
|
||||||
The refactor's "manually verify after every commit" rule exposed 10 latent bugs that had been lurking in the original god-files. Every one of these is a preexisting issue, not something the refactor caused.
|
|
||||||
|
|
||||||
- **Browse multi-select reshape.** Split library and bookmark actions into four distinct entries (Save All / Unsave All / Bookmark All / Remove All Bookmarks), each shown only when the selection actually contains posts the action would affect. The original combined action did both library and bookmark operations under a misleading bookmark-only label, with no way to bulk-unsave without also stripping bookmarks. The reshape resolves the actual need.
|
|
||||||
- **Infinite scroll page_size clamp.** One-character fix at `_on_reached_bottom`'s `search_append.emit` call site (`collected` becomes `collected[:limit]`) to mirror the non-infinite path's slice in `_do_search`. The backfill loop's `>=` break condition allowed the last full batch to push collected past the configured page size.
|
|
||||||
- **Batch download: incremental saved-dot updates and browse-tab-only gating.** Two-part fix. (1) Stash the chosen destination, light saved-dots incrementally as each file lands when the destination is inside `saved_dir()`. (2) Disable the Batch Download menu and Ctrl+D shortcut on the Bookmarks and Library tabs, where it didn't make sense.
|
|
||||||
- **F11 round-trip preserves zoom and position.** Two preservation bugs. (1) `ImageViewer.resizeEvent` no longer clobbers the user's explicit zoom and pan on F11 enter/exit; it uses `event.oldSize()` to detect whether the user was at fit-to-view at the previous size and only re-fits in that case. (2) The popout's F11 enter writes the current Hyprland window state directly into its viewport tracking so F11 exit lands at the actual pre-fullscreen position regardless of how the user got there (drag, drag+nav, drag+F11). The previous drift detection only fired during a fit and missed the "drag then F11 with no nav between" sequence.
|
|
||||||
- **Remove O keybind for Open in Default App.** Five-line block deleted from the main keypress handler. Right-click menu actions stay; only the keyboard shortcut is gone.
|
|
||||||
- **Privacy screen resumes video on un-hide.** `_toggle_privacy` now calls `resume()` on the active video player on the privacy-off branch, mirroring the existing `pause()` calls on the privacy-on branch. The popout's privacy overlay also moved from "hide the popout window" to "raise an in-place black overlay over the popout's central widget" because Wayland's hide → show round-trip drops window position when the compositor unmaps and remaps; an in-place overlay sidesteps the issue.
|
|
||||||
- **VideoPlayer mute state preservation.** When the popout opens, the embedded preview's mute state was synced into the popout's `VideoPlayer` before the popout's mpv instance was created (mpv is wired lazily on first `set_media`). The sync silently disappeared because the `is_muted` setter only forwarded to mpv if mpv existed. Now there's a `_pending_mute` field that the setter writes to unconditionally; `_ensure_mpv` replays it into the freshly-created mpv. Same pattern as the existing volume-from-slider replay.
|
|
||||||
- **Search count + end-of-results instrumentation.** `_do_search` and `_on_reached_bottom` now log per-filter drop counts (`bl_tags`, `bl_posts`, `dedup`), `api_returned`, `kept`, and the `at_end` decision at DEBUG level. Distinguishes "API ran out of posts" from "client-side filters trimmed the page" for the next reproduction. This is instrumentation, not a fix; the underlying intermittent end-of-results bug is still under investigation.
|
|
||||||
|
|
||||||
### Popout state machine refactor
|
|
||||||
|
|
||||||
In the past two weeks, five popout race fixes had landed (`baa910a`, `5a44593`, `7d19555`, `fda3b10`, `31d02d3`), each correct in isolation but fitting the same pattern: a perf round shifted timing, a latent race surfaced, a defensive layer was added. The pattern was emergent from the popout's signal-and-callback architecture, not from any one specific bug. Every defensive layer added a timestamp-based suppression window that the next race fix would have to navigate around.
|
|
||||||
|
|
||||||
This release rebuilds the popout's internal state as an explicit state machine. The 1136-line `FullscreenPreview` god-class became a thin Qt adapter on top of a pure-Python state machine, with the historical race fixes enforced structurally instead of by suppression windows. 16 commits.
|
|
||||||
|
|
||||||
The state machine has 6 states (`AwaitingContent`, `DisplayingImage`, `LoadingVideo`, `PlayingVideo`, `SeekingVideo`, `Closing`), 17 events, and 14 effects. The pure-Python core lives in `popout/state.py` and `popout/effects.py` and imports nothing from PySide6, mpv, or httpx. The Qt-side adapter in `popout/window.py` translates Qt events into state machine events and applies the returned effects to widgets; it never makes decisions about what to do.
|
|
||||||
|
|
||||||
The race fixes that were timestamp windows in the previous code are now structural transitions:
|
|
||||||
|
|
||||||
- **EOF race.** `VideoEofReached` is only legal in `PlayingVideo`. In every other state (most importantly `LoadingVideo`, where the stale-eof race lived), the event is dropped at the dispatch boundary without changing state or emitting effects. Replaces the 250ms `_eof_ignore_until` timestamp window that the previous code used to suppress stale eof events from a previous video's stop.
|
|
||||||
- **Double-load race.** `NavigateRequested` from a media-bearing state transitions to `AwaitingContent` once. A second `NavigateRequested` while still in `AwaitingContent` re-emits the navigate signal but does not re-stop or re-load. The state machine never produces two `LoadVideo` / `LoadImage` effects for the same navigation cycle, regardless of how many `NavigateRequested` events the eventFilter dispatches.
|
|
||||||
- **Persistent viewport.** The viewport (center + long_side) is a state machine field, only mutated by user-action events (`WindowMoved`, `WindowResized`, or `HyprlandDriftDetected`). Never overwritten by reading the previous fit's output. Replaces the per-nav drift accumulation that the previous "recompute viewport from current state" shortcut produced.
|
|
||||||
- **F11 round-trip.** Entering fullscreen snapshots the current viewport into a separate `pre_fullscreen_viewport` field. Exiting restores from the snapshot. The pre-fullscreen viewport is the captured value at the moment of entering, regardless of how the user got there.
|
|
||||||
- **Seek slider pin.** `SeekingVideo` state holds the user's click target. The slider rendering reads from the state machine: while in `SeekingVideo`, the displayed value is the click target; otherwise it's mpv's actual `time_pos`. `SeekCompleted` (from mpv's `playback-restart` event) transitions back to `PlayingVideo`. No timestamp window.
|
|
||||||
- **Pending mute.** The mute / volume / loop_mode values are state machine fields. `MuteToggleRequested` flips the field regardless of which state the machine is in. The `PlayingVideo` entry handler emits `[ApplyMute, ApplyVolume, ApplyLoopMode]` so the persistent values land in the freshly-loaded video on every load cycle.
|
|
||||||
|
|
||||||
The Qt adapter's interface to `main_window.py` was also cleaned up. Previously `main_window.py` reached into `_fullscreen_window._video.X`, `_fullscreen_window._stack.currentIndex()`, `_fullscreen_window._bookmark_btn.setVisible(...)`, and similar private-attribute access at ~25 sites. Those are gone. Nine new public methods on `FullscreenPreview` replace them: `is_video_active`, `set_toolbar_visibility`, `sync_video_state`, `get_video_state`, `seek_video_to`, `connect_media_ready_once`, `pause_media`, `force_mpv_pause`, `stop_media`. Existing methods (`set_media`, `update_state`, `set_post_tags`, `privacy_hide`, `privacy_show`) are preserved unchanged.
|
|
||||||
|
|
||||||
A new debug environment variable `BOORU_VIEWER_STRICT_STATE=1` raises an `InvalidTransition` exception on illegal (state, event) pairs in the state machine. Default release mode drops + logs at debug.
|
|
||||||
|
|
||||||
### Slider drag-back race fixed
|
|
||||||
|
|
||||||
The slider's `_seek` method used `mpv.seek(pos / 1000.0, 'absolute')` (keyframe-only seek). On videos with sparse keyframes (typical 1-5s GOP), mpv lands on the nearest keyframe at-or-before the click position, which is up to 5 seconds behind where the user actually clicked. The 500ms pin window from the earlier fix sweep papered over this for half a second, but afterwards the slider visibly dragged back to mpv's keyframe-rounded position and crawled forward.
|
|
||||||
|
|
||||||
- **`'absolute' → 'absolute+exact'`** in `VideoPlayer._seek`. Aligns the slider with `seek_to_ms` and `_seek_relative`, which were already using exact seek. mpv decodes from the previous keyframe forward to the EXACT target position before reporting it via `time_pos`. Costs 30-100ms more per seek but lands at the exact click position. No more drag-back. Affects both the embedded preview and the popout because they share the `VideoPlayer` class.
|
|
||||||
- **Legacy 500ms pin window removed.** Now redundant after the exact-seek fix. The supporting fields (`_seek_target_ms`, `_seek_pending_until`, `_seek_pin_window_secs`) are gone, `_seek` is one line, `_poll`'s slider write is unconditional after the `isSliderDown()` check.
|
|
||||||
|
|
||||||
### Grid layout fix
|
|
||||||
|
|
||||||
The grid was collapsing by a column when switching to a post in some scenarios. Two compounding issues.
|
|
||||||
|
|
||||||
- **The flow layout's wrap loop was vulnerable to per-cell width drift.** Walked each thumb summing `widget.width() + THUMB_SPACING` and wrapped on `x + item_w > self.width()`. If `THUMB_SIZE` was changed at runtime via Settings, existing thumbs kept their old `setFixedSize` value while new ones from infinite-scroll backfill got the new value. Mixed widths break a width-summing wrap loop.
|
|
||||||
- **The `columns` property had an off-by-one** at column boundaries because it omitted the leading margin from `w // (THUMB_SIZE + THUMB_SPACING)`. A row that fits N thumbs needs `THUMB_SPACING + N * step` pixels, not `N * step`. The visible symptom was that keyboard Up/Down navigation step was off-by-one in the boundary range.
|
|
||||||
- **Fix.** The flow layout now computes column count once via `(width - THUMB_SPACING) // step` and positions thumbs by `(col, row)` index, with no per-widget `widget.width()` reads. The `columns` property uses the EXACT same formula so keyboard nav matches the visual layout at every window width. Affects all three tabs (Browse / Bookmarks / Library) since they all use the same `ThumbnailGrid`.
|
|
||||||
|
|
||||||
### Other fixes
|
|
||||||
|
|
||||||
These two landed right after v0.2.2 was tagged but before the structural refactor started.
|
|
||||||
|
|
||||||
- **Popout video load performance.** mpv URL streaming for uncached videos via a new `video_stream` signal that hands the remote URL to mpv directly instead of waiting for the cache download to finish. mpv fast-load options `vd_lavc_fast` and `vd_lavc_skiploopfilter=nonkey`. GL pre-warm at popout open via a `showEvent` calling `ensure_gl_init` so the first video click doesn't pay for context creation. Identical-rect skip in `_fit_to_content` so back-to-back same-aspect navigation doesn't redundantly dispatch hyprctl. Plus three race-defense layers: pause-on-activate at the top of `_on_post_activated`, the 250ms stale-eof suppression window in VideoPlayer that the state machine refactor later subsumed, and removed redundant `_update_fullscreen` calls from `_navigate_fullscreen` and `_on_video_end_next` that were re-loading the previous post's path with a stale value.
|
|
||||||
- **Double-activation race fix in `_navigate_preview`.** Removed a redundant `_on_post_activated` call from all five view types (browse, bookmarks normal, bookmarks wrap-edge, library normal, library wrap-edge). `_select(idx)` already chains through `post_selected` which already calls `_on_post_activated`, so calling it explicitly again was a duplicate that fired the activation handler twice per keyboard nav.
|
|
||||||
|
|
||||||
## v0.2.2
|
|
||||||
|
|
||||||
A hardening + decoupling release. Bookmark folders and library folders are no longer the same thing under the hood, the `core/` layers get a defensive hardening pass, the async/DB layers get a real concurrency refactor, and the README finally articulates what this project is.
|
|
||||||
|
|
||||||
### Bookmarks ↔ Library decoupling
|
|
||||||
|
|
||||||
- **Bookmark folders and library folders are now independent name spaces.** Used to share identity through `_db.get_folders()` — the same string was both a row in `favorite_folders` and a directory under `saved_dir`. The cross-bleed produced a duplicate-on-move bug and made "Save to Library" silently re-file the bookmark. Now they're two stores: bookmark folders are DB-backed labels for organizing your bookmark list, library folders are real subdirectories of `saved/` for organizing files on disk.
|
|
||||||
- **`library_folders()`** in `core.config` is the new source of truth for every Save-to-Library menu — reads filesystem subdirs of `saved_dir` directly.
|
|
||||||
- **`find_library_files(post_id)`** is the new "is this saved?" / delete primitive — walks the library shallowly by post id.
|
|
||||||
- **Move-aware Save to Library.** If the post is already in another library folder, atomic `Path.rename()` into the destination instead of re-copying from cache. Also fixes the duplicate-on-move bug.
|
|
||||||
- **Library tab right-click: Move to Folder submenu** for both single and multi-select, using `Path.rename` for atomic moves.
|
|
||||||
- **Bookmarks tab: − Folder button** next to + Folder for deleting the selected bookmark folder. DB-only, library filesystem untouched.
|
|
||||||
- **Browse tab right-click: "Bookmark as" submenu** when a post is not yet bookmarked (Unfiled / your bookmark folders / + New); flat "Remove Bookmark" when already bookmarked.
|
|
||||||
- **Embedded preview Bookmark button** got the same submenu shape via a new `bookmark_to_folder` signal + `set_bookmark_folders_callback`.
|
|
||||||
- **Popout Bookmark and Save buttons** both got the submenu treatment; works in both Browse and Bookmarks tab modes.
|
|
||||||
- **Popout in library mode** keeps the Save button visible as Unsave; the rest of the toolbar (Bookmark / BL Tag / BL Post) is hidden since they don't apply.
|
|
||||||
- **Popout state drift fixed.** `_update_fullscreen_state` now mirrors the embedded preview's `_is_bookmarked` / `_is_saved` instead of re-querying DB+filesystem, eliminating a state race during async bookmark adds.
|
|
||||||
- **"Unsorted" renamed to "Unfiled"** everywhere user-facing. Library Unfiled and bookmarks Unfiled now share one label.
|
|
||||||
- `favorite_folders` table preserved for backward compatibility — no migration required.
|
|
||||||
|
|
||||||
### Concurrency refactor
|
|
||||||
|
|
||||||
The earlier worker pattern of `threading.Thread + asyncio.run` was a real loop-affinity bug. The first throwaway loop a worker constructed would bind the shared httpx clients, and the next call from the persistent loop would fail with "Event loop is closed". This release routes everything through one loop and adds the locking and cleanup that should have been there from the start.
|
|
||||||
|
|
||||||
- **`core/concurrency.py`** is a new module: `set_app_loop()` / `get_app_loop()` / `run_on_app_loop()`. Every async piece of work in the GUI now schedules through one persistent loop, registered at startup by `BooruApp`.
|
|
||||||
- **`gui/sites.py` SiteDialog** Detect and Test buttons now route through `run_on_app_loop` instead of spawning a daemon thread. Results marshal back via Qt Signals with `QueuedConnection`. The dialog tracks in-flight futures and cancels them on close so a mid-detect dialog dismissal doesn't poke a destroyed QObject.
|
|
||||||
- **`gui/bookmarks.py` thumbnail loader** got the same swap. The existing `thumb_ready` signal already marshaled correctly.
|
|
||||||
- **Lazy-init lock on shared httpx clients.** `BooruClient._shared_client`, `E621Client._e621_client`, and `cache._shared_client` all use a fast-path / locked-slow-path lazy init. Concurrent first-callers can no longer both build a client and leak one.
|
|
||||||
- **`E621Client` UA-change leftover tracking.** When the User-Agent changes (api_user edit) and a new client is built, the old one is stashed in `_e621_to_close` and drained at shutdown instead of leaking.
|
|
||||||
- **`aclose_shared` on shutdown.** `BooruApp.closeEvent` now runs an `_close_all` coroutine via `run_coroutine_threadsafe(...).result(timeout=5)` before stopping the loop. Connection pools, keepalive sockets, and TLS state release cleanly instead of being abandoned.
|
|
||||||
- **`Database._write_lock` (RLock) + new `_write()` context manager.** Every write method now serializes through one lock so the asyncio thread and the Qt main thread can't interleave multi-statement writes. RLock so a writing method can call another writing method on the same thread without self-deadlocking. Reads stay lock-free under WAL.
|
|
||||||
|
|
||||||
### Defensive hardening
|
|
||||||
|
|
||||||
- **DB transactions.** `delete_site`, `add_search_history`, `remove_folder`, `rename_folder`, and `_migrate` now wrap their multi-statement bodies in `with self.conn:` so a crash mid-method can't leave orphan rows.
|
|
||||||
- **`add_bookmark` lastrowid fix.** When `INSERT OR IGNORE` collides on `(site_id, post_id)`, `lastrowid` is stale; the method now re-`SELECT`s the existing id. Was returning `Bookmark(id=0)` silently, which then no-op'd `update_bookmark_cache_path` on the next bookmark.
|
|
||||||
- **LIKE wildcard escape.** `get_bookmarks` LIKE clauses now `ESCAPE '\\'` so user search literals stop acting as SQL wildcards (`cat_ear` no longer matches `catear`).
|
|
||||||
- **Path traversal guard on folder names.** New `_validate_folder_name` rejects `..`, path separators, and leading `.`/`~` at write time. `saved_folder_dir()` resolves the candidate and refuses anything that doesn't `relative_to` the saved-images base.
|
|
||||||
- **Download size cap and streaming.** `download_image` enforces a 500 MB hard cap against the advertised Content-Length and the running total inside the chunk loop (servers can lie). Payloads ≥ 50 MB stream to a tempfile and atomic `os.replace` instead of buffering in RAM.
|
|
||||||
- **Per-URL coalesce lock.** `defaultdict[str, asyncio.Lock]` keyed by URL hash so concurrent callers downloading the same URL don't race `write_bytes`.
|
|
||||||
- **`Image.MAX_IMAGE_PIXELS = 256M`** with `DecompressionBombError` handling in both PIL converters.
|
|
||||||
- **Ugoira zip-bomb caps.** Frame count and cumulative uncompressed size checked from `ZipInfo` headers before any decompression.
|
|
||||||
- **`_convert_animated_to_gif` failure cache.** Writes a `.convfailed` sentinel sibling on failure to break the re-decode-every-paint loop for malformed animated PNGs/WebPs.
|
|
||||||
- **`_is_valid_media` distinguishes IO errors from "definitely invalid".** Returns `True` (don't delete) on `OSError` so a transient EBUSY/permissions hiccup no longer triggers a delete + re-download loop.
|
|
||||||
- **Hostname suffix matching for Referer.** Was using substring `in` matching, which meant `imgblahgelbooru.attacker.com` falsely mapped to `gelbooru.com`. Now uses proper suffix check.
|
|
||||||
- **`_request` retries on `httpx.NetworkError` and `httpx.ConnectError`** in addition to `TimeoutException`. A single DNS hiccup or RST no longer blows up the whole search.
|
|
||||||
- **`test_connection` no longer echoes the response body** in error strings. It was a body-leak gadget when used via `detect_site_type`'s redirect-following client.
|
|
||||||
- **Exception logging across `detect`, `search`, and `autocomplete`** in every API client. Previously every failure was a silent `return []`; now every swallowed exception logs at WARNING with type, message, and (where relevant) the response body prefix.
|
|
||||||
- **`main_gui.py`** `file_dialog_platform` DB probe failure now prints to stderr instead of vanishing.
|
|
||||||
- **Folder name validation surfaced as `QMessageBox.warning`** in `gui/bookmarks.py` and `gui/app.py` instead of crashing when a user types something the validator rejects.
|
|
||||||
|
|
||||||
### Popout overlay fix
|
|
||||||
|
|
||||||
- **`WA_StyledBackground` set on `_slideshow_toolbar` and `_slideshow_controls`.** Plain `QWidget` parents silently ignore QSS `background:` declarations without this attribute, which is why the popout overlay strip was rendering fully transparent (buttons styled, but the bar behind them showing the letterbox color).
|
|
||||||
- **Base popout overlay style baked into the QSS loader.** `_BASE_POPOUT_OVERLAY_QSS` is prepended before the user's `custom.qss` so themes that don't define overlay rules still get a usable translucent black bar with white text. Bundled themes still override on the same selectors.
|
|
||||||
|
|
||||||
### Popout aspect-ratio handling
|
|
||||||
|
|
||||||
The popout viewer's aspect handling had been patch-thrashing for ~20 commits since 0.2.0. A cold-context audit mapped 13 distinct failure modes still live in the code; this release closes the four highest-impact ones.
|
|
||||||
|
|
||||||
- **Width-anchor ratchet broken.** The previous `_fit_to_content` was width-anchored: `start_w = self.width()` read the current window width and derived height from aspect, with a back-derive if height exceeded the cap. Width was the only stable reference, and because portrait content has aspect < 1 and the height cap (90% of screen) was tighter than the width cap (100%), every portrait visit ran the back-derive and permanently shrunk the window. Going P→L→P→L→P on a 1080p screen produced a visibly smaller landscape on each loop.
|
|
||||||
- **New `Viewport(center_x, center_y, long_side)` model.** Three numbers, no aspect. Aspect is recomputed from content on every nav. The new `_compute_window_rect(viewport, content_aspect, screen)` is a pure static method: symmetric across portrait/landscape (`long_side` becomes width for landscape and height for portrait), proportional clamp shrinks both edges by the same factor when either would exceed its 0.90 ceiling, no asymmetric clamp constants, no back-derive step.
|
|
||||||
- **Viewport derived per-call from existing state.** No persistent field, no `moveEvent`/`resizeEvent` hooks needed for the basic ratchet fix. Three priority sources: pending one-shots (first fit after open or F11 exit) → current Hyprland window position+size → current Qt geometry. The Hyprland-current source captures whatever the user has dragged the popout to, so the next nav respects manual resizes.
|
|
||||||
- **First-fit aspect-lock race fixed.** `_fit_to_content` used to call `_is_hypr_floating` which returned `None` for both "not Hyprland" and "Hyprland but the window isn't visible to hyprctl yet". The latter happens on the very first popout open because the `wm:openWindow` event hasn't been processed when `set_media` fires. The method then fell through to a plain Qt resize and skipped the `keep_aspect_ratio` setprop, so the first image always opened unlocked and only subsequent navigations got the right shape. Now inlines the env-var check, distinguishes the two `None` cases, and retries on Hyprland with a 40ms backoff (capped at 5 attempts / 200ms total) when the window isn't registered yet.
|
|
||||||
- **Non-Hyprland top-left drift fixed.** The Qt fallback branch used to call `self.resize(w, h)`, which anchors top-left and lets bottom-right drift. The popout center walked toward the upper-left of the screen across navigations on Qt-driven WMs. Now uses `self.setGeometry(QRect(x, y, w, h))` with the computed top-left so the center stays put.
|
|
||||||
|
|
||||||
### Image fill in popout and embedded preview
|
|
||||||
|
|
||||||
- **`ImageViewer._fit_to_view` no longer caps zoom at native pixel size.** Used `min(scale_w, scale_h, 1.0)` so a smaller image in a larger window centered with letterbox space around it. The `1.0` cap is gone — images scale up to fill the available view, matching how the video player fills its widget. Combined with the popout's `keep_aspect_ratio`, the window matches the image's aspect AND the image fills it cleanly. Tiled popouts with mismatched aspect still letterbox (intentional — the layout owns the window shape).
|
|
||||||
|
|
||||||
### Main app flash and popout resize speed
|
|
||||||
|
|
||||||
- **Suppress dl_progress widget when the popout is open.** The download progress bar at the bottom of the right splitter was unconditionally `show()`'d on every grid click, including when the popout was open and the right splitter had been collapsed to give the grid full width. The show/hide pulse forced a layout pass on the right splitter that briefly compressed the main grid before the download finished and `hide()` fired. Visible flash on every click in the main app, even when clicking the same post that was already loaded (because `download_image` still runs against the cache). Three callsites now skip the widget entirely when the popout is visible. The status bar still updates with `Loading #X...` so the user has feedback in the main window.
|
|
||||||
- **Cache `_hyprctl_get_window` across one fit call.** `_fit_to_content` used to call `hyprctl clients -j` three times per popout navigation: once at the top for the floating check, once inside `_derive_viewport_for_fit` for the position/size read, and once inside `_hyprctl_resize_and_move` for the address lookup. Each call is a ~3ms `subprocess.run` that blocks the Qt event loop, totalling ~9ms of UI freeze per nav. The two helpers now accept an optional `win=None` parameter; `_fit_to_content` fetches the window dict once and threads it down. Per-fit subprocess count drops from 3 to 1 (~6ms saved per navigation), making rapid clicking and aspect-flip transitions feel snappier.
|
|
||||||
- **Show download progress on the active thumbnail when the embedded preview is hidden.** After the dl_progress suppression above landed, the user lost all visible download feedback in the main app whenever the popout was open. `_on_post_activated` now decides per call whether to use the dl_progress widget at the bottom of the right splitter or fall back to drawing the download progress on the active thumbnail in the main grid via the existing prefetch-progress paint path (`set_prefetch_progress(0.0..1.0)` to fill, `set_prefetch_progress(-1)` to clear). The decision is captured at function entry as `preview_hidden = not (self._preview.isVisible() and self._preview.width() > 0)` and closed over by the `_progress` callback and the `_load` coroutine, so the indicator that starts on a download stays on the same target even if the user opens or closes the popout mid-download. Generalizes to any reason the preview is hidden, not just popout-open: a user who has dragged the main splitter to collapse the preview gets the thumbnail indicator now too.
|
|
||||||
|
|
||||||
### Popout overlay stays hidden across navigation
|
|
||||||
|
|
||||||
- **Stop auto-showing the popout overlay on every `set_media`.** `FullscreenPreview.set_media` ended with an unconditional `self._show_overlay()` call, which meant the floating toolbar and video controls bar popped back into view on every left/right/hjkl navigation between posts. Visually noisy and not what the overlay is for — it's supposed to be a hover-triggered surface, not a per-post popup. Removed the call. The overlay is still shown by `__init__` default state (`_ui_visible = True`, so the user sees it for ~2 seconds on first popout open and the auto-hide timer hides it after that), by `eventFilter` mouse-move-into-top/bottom-edge-zone (the intended hover trigger, unchanged), by volume scroll on the video stack (unchanged), and by `Ctrl+H` toggle (unchanged). After this, the only way the overlay appears mid-session is hover or `Ctrl+H` — navigation through posts no longer flashes it back into view.
|
|
||||||
|
|
||||||
### Discord screen-share audio capture
|
|
||||||
|
|
||||||
- **`ao=pulse` in the mpv constructor.** mpv defaults to `ao=pipewire` (native PipeWire audio output) on Linux. Discord's screen-share-with-audio capture on Linux only enumerates clients connected via the libpulse API; native PipeWire clients are invisible to it. Visible symptom: video plays locally fine but audio is silently dropped from any Discord screen share. Firefox works because Firefox uses libpulse to talk to PipeWire's pulseaudio compat layer. Setting `ao="pulse,wasapi,"` in the MPV constructor (comma-separated priority list, mpv tries each in order) routes mpv through the same pulseaudio compat layer Firefox uses. `pulse` works on Linux; `wasapi` is the Windows fallback; trailing empty falls through to mpv's compiled-in default. No platform branch needed — mpv silently skips audio outputs that aren't available. Verified by inspection: with the fix, mpv's sink-input has `module-stream-restore.id = "sink-input-by-application-name:booru-viewer"` (the pulse-protocol form, identical to Firefox) instead of `"sink-input-by-application-id:booru-viewer"` (the native-pipewire form). References: [mpv #11100](https://github.com/mpv-player/mpv/issues/11100), [edisionnano/Screenshare-with-audio-on-Discord-with-Linux](https://github.com/edisionnano/Screenshare-with-audio-on-Discord-with-Linux).
|
|
||||||
- **`audio_client_name="booru-viewer"` in the mpv constructor.** mpv now registers in pulseaudio/pipewire introspection as `booru-viewer` instead of the default "mpv Media Player". Sets `application.name`, `application.id`, `application.icon_name`, `node.name`, and `device.description` to `booru-viewer` so capture tools group mpv's audio under the same identity as the Qt application.
|
|
||||||
|
|
||||||
### Docs
|
|
||||||
|
|
||||||
- **README repositioning.** New "Why booru-viewer" section between Screenshots and Features that names ahoviewer, Grabber, and Hydrus, lays out the labor axis (who does the filing) and the desktop axis (Hyprland/Wayland targeting), and explains the bookmark/library two-tier model with the browser-bookmark analogy.
|
|
||||||
- **New tagline** that does positioning instead of category description.
|
|
||||||
- **Bookmarks and Library Features sections split** to remove the previous intertwining; each now describes its own folder concept clearly.
|
|
||||||
- **Backup recipe** in Data Locations explaining the `saved/` + `booru.db` split and the recovery path.
|
|
||||||
- **Theming section** notes that each bundled theme ships in `*-rounded.qss` and `*-square.qss` variants.
|
|
||||||
|
|
||||||
### Fixes & polish
|
|
||||||
|
|
||||||
- **Drop the unused "Size: WxH" line from the InfoPanel** — bookmarks and library never had width/height plumbed and the field just showed 0×0.
|
|
||||||
- **Tighter combo and button padding across all 12 bundled themes.** `QPushButton` padding 2px 8px → 2px 6px, `QComboBox` padding 2px 6px → 2px 4px, `QComboBox::drop-down` width 18px → 14px. Saves 8px non-text width per combo and 4px per button.
|
|
||||||
- **Library sort combo: new "Post ID" entry** with a numeric stem sort that handles non-digit stems gracefully. Fits in 75px instead of needing 90px after the padding tightening.
|
|
||||||
- **Score and page spinboxes 50px → 40px** in the top toolbar to recover horizontal space. The internal range (0–99999) is unchanged; values >9999 will visually clip at the right edge but the stored value is preserved.
|
|
||||||
|
|
||||||
## v0.2.1
|
|
||||||
|
|
||||||
A theme + persistence + ricer-friendliness release. The whole stylesheet system was rebuilt around a runtime preprocessor with `@palette` / `${name}` vars, every bundled theme was rewritten end-to-end, and 12 theme variants ship instead of 6. Lots of UI state now survives a restart, and Hyprland ricers get an explicit opt-out for the in-code window management.
|
|
||||||
|
|
||||||
This release does not ship a fresh Windows installer — the previous v0.2.0 installer remains the latest installable binary. Run from source to get 0.2.1, or wait for the next release.
|
|
||||||
|
|
||||||
### Theming System
|
|
||||||
|
|
||||||
- **`@palette` / `${name}` preprocessor** — themes start with a `/* @palette */` header block listing color slots, the body uses `${name}` placeholders that the app substitutes at load time. Edit the 17-slot palette block at the top of any theme to recolor the entire app — no hunting through hex literals.
|
|
||||||
- **All 6 bundled themes rewritten** with comprehensive Fusion-style QSS covering every widget the app uses, every state (hover, focus, disabled, checked), every control variant
|
|
||||||
- **Two corner-radius variants per theme** — `*-rounded.qss` (4px radius, default Fusion-style look) and `*-square.qss` (every border-radius stripped except radio buttons, which stay circular)
|
|
||||||
- **Native Fusion sizing** — themed widgets shrunk to match Qt+Fusion defaults, toolbar row height is now ~23px instead of 30px, matching what `no-custom.qss` renders
|
|
||||||
- **Bundled themes** — catppuccin-mocha, nord, gruvbox, solarized-dark, tokyo-night, everforest. 12 files total (6 themes × 2 variants)
|
|
||||||
|
|
||||||
### QSS-Targetable Surfaces
|
|
||||||
|
|
||||||
Many things hardcoded in Python paint code can now be overridden from a `custom.qss` without touching the source:
|
|
||||||
|
|
||||||
- **InfoPanel tag category colors** — `qproperty-tagArtistColor`, `tagCharacterColor`, `tagCopyrightColor`, `tagSpeciesColor`, `tagMetaColor`, `tagLoreColor`
|
|
||||||
- **ThumbnailWidget selection paint** — `qproperty-selectionColor`, `multiSelectColor`, `hoverColor`, `idleColor` (in addition to existing `savedColor` and `bookmarkedColor`)
|
|
||||||
- **VideoPlayer letterbox color** — `qproperty-letterboxColor`. mpv paints the area around the video frame in this color instead of hardcoded black. Defaults to `QPalette.Window` so KDE color schemes, qt6ct, Windows dark/light mode, and any system Qt theme automatically produce a matching letterbox
|
|
||||||
- **Popout overlay bars** — translucent background for the floating top toolbar and bottom controls bar via the `overlay_bg` palette slot
|
|
||||||
- **Library count label states** — `QLabel[libraryCountState="..."]` attribute selector distinguishes "N files" / "no items match" / "directory unreachable" with QSS-controlled colors instead of inline red
|
|
||||||
|
|
||||||
### Hyprland Integration
|
|
||||||
|
|
||||||
- **Two opt-out env vars** for users with their own windowrules:
|
|
||||||
- `BOORU_VIEWER_NO_HYPR_RULES=1` — disables every in-code hyprctl dispatch except the popout's keep_aspect_ratio lock
|
|
||||||
- `BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1` — independently disables the popout's aspect ratio enforcement
|
|
||||||
- **Popout overlays themed** — top toolbar and bottom controls bar now look themed instead of hardcoded translucent black, respect the `@palette` `overlay_bg` slot
|
|
||||||
- **Popout video letterbox tracks the theme's bg color** via the new `qproperty-letterboxColor`
|
|
||||||
- **Wayland app_id** set via `setDesktopFileName("booru-viewer")` so compositors can target windows by class — `windowrule = float, class:^(booru-viewer)$` — instead of by the volatile window title
|
|
||||||
|
|
||||||
### State Persistence
|
|
||||||
|
|
||||||
- **Main window** — geometry, floating mode, tiled mode (Hyprland)
|
|
||||||
- **Splitter sizes** — main splitter (grid vs preview), right splitter (preview vs dl_progress vs info panel)
|
|
||||||
- **Info panel visibility**
|
|
||||||
- **Cache spinbox** auto-derived dialog min height (no more clipping when dragging the settings dialog small)
|
|
||||||
- **Popout window** position, dimensions, and F11 fullscreen state restored via Hyprland floating cache prime
|
|
||||||
|
|
||||||
### UX
|
|
||||||
|
|
||||||
- **Live debounced search** in bookmarks and library tabs — type to filter, press Enter to commit immediately. 150ms debounce on bookmarks (cheap SQLite), 250ms on library (filesystem scan)
|
|
||||||
- **Search button removed** from bookmarks toolbar (live search + Enter)
|
|
||||||
- **Score field +/- buttons removed** from main search bar — type the value directly
|
|
||||||
- **Embedded preview video controls** moved out of the overlay style and into the panel layout, sitting under the media instead of floating on top of it. Popout still uses the floating overlay
|
|
||||||
- **Next-mode loop wraps** to the start of the bookmarks/library list at the end of the last item instead of stopping
|
|
||||||
- **Splitter handle margins** — 4px breathing margin on either side so toolbar buttons don't sit flush against the splitter line
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- **Page-load thumbnails** pre-fetch bookmarks + cache state into set lookups instead of N synchronous SQLite queries per page
|
|
||||||
- **Animated PNG/WebP conversion** off-loaded to a worker thread via `asyncio.to_thread` so it doesn't block the asyncio event loop during downloads
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- **Open in Browser/Default App** on the bookmarks tab now opens the bookmark's actual source post (was opening unrelated cached files)
|
|
||||||
- **Cache settings spinboxes** can no longer be vertically clipped at the dialog's minimum size; spinboxes use Python-side `setMinimumHeight()` to propagate floors up the layout chain
|
|
||||||
- **Settings dialog** uses side-by-side `+`/`-` buttons instead of QSpinBox's default vertical arrows for clearer interaction
|
|
||||||
- **Bookmarks tab BL Tag** refreshes correctly when navigating bookmarked posts (was caching stale tags from the first selection)
|
|
||||||
- **Popout F11 → windowed** restores its previous windowed position and dimensions
|
|
||||||
- **Popout flicker on F11** transitions eliminated via `no_anim` setprop + deferred fit + dedupe of mpv `video-params` events
|
|
||||||
- **Bookmark + saved indicator dots** in the thumbnail grid: bookmark star on left, saved dot on right, both vertically aligned in a fixed-size box
|
|
||||||
- **Selection border** on thumbnail cells redrawn pen-aware: square geometry (no rounded corner artifacts), even line width on all sides, no off-by-one anti-aliasing seams
|
|
||||||
- **Toolbar buttons in narrow slots** no longer clip text (Bookmark/Unbookmark, Save/Unsave, BL Tag, BL Post, Popout, + Folder, Refresh) — all bumped to fit "Unbookmark" comfortably under the bundled themes' button padding
|
|
||||||
- **Toolbar rows** on bookmarks/library/preview panels now sit at a uniform 23px height matching the inputs/combos in the same row
|
|
||||||
- **Score and Page spinbox heights** forced to 23px via `setFixedHeight` to work around QSpinBox reserving vertical space for arrow buttons even when `setButtonSymbols(NoButtons)` is set
|
|
||||||
- **Library Open in Default App** uses the actual file path instead of routing through `cached_path_for` (which would return a hash path that doesn't exist for library files)
|
|
||||||
|
|
||||||
### Cleanup
|
|
||||||
|
|
||||||
- Deleted unused `booru_viewer/gui/theme.py` (222 lines of legacy stylesheet template that was never imported)
|
|
||||||
- Deleted `GREEN`/`DARK_GREEN`/`DIM_GREEN`/`BG`/`BG_LIGHT` etc constants from `booru_viewer/core/config.py` (only `theme.py` used them)
|
|
||||||
- Removed dead missing-indicator code (`set_missing`, `_missing_color`, `missingColor` Qt Property, the unreachable `if not filepath.exists()` branch in `library.refresh`)
|
|
||||||
- Removed dead score `+`/`-` buttons code path
|
|
||||||
|
|
||||||
## v0.2.0
|
|
||||||
|
|
||||||
### New: mpv video backend
|
|
||||||
|
|
||||||
- Replaced Qt Multimedia (QMediaPlayer/QVideoWidget) with embedded mpv via `python-mpv`
|
|
||||||
- OpenGL render API (`MpvRenderContext`) for Wayland-native compositing — no XWayland needed
|
|
||||||
- Proper hardware-accelerated decoding (`hwdec=auto`)
|
|
||||||
- Reliable aspect ratio handling — portrait videos scale correctly
|
|
||||||
- Proper end-of-file detection via `eof-reached` property observer instead of fragile position-jump heuristic
|
|
||||||
- Frame-accurate seeking with `absolute+exact` and `relative+exact`
|
|
||||||
- `keep-open=yes` holds last frame on video end instead of flashing black
|
|
||||||
- Windows: bundle `mpv-2.dll` in PyInstaller build
|
|
||||||
|
|
||||||
### New: popout viewer (renamed from slideshow)
|
|
||||||
|
|
||||||
- Renamed "Slideshow" to "Popout" throughout UI
|
|
||||||
- Toolbar and video controls float over media with translucent background (`rgba(0,0,0,160)`)
|
|
||||||
- Auto-hide after 2 seconds of inactivity, reappear on mouse move
|
|
||||||
- Ctrl+H manual toggle
|
|
||||||
- Media fills entire window — no layout shift when UI appears/disappears
|
|
||||||
- Video controls only show for video posts, hidden for images/GIFs
|
|
||||||
- Smart F11 exit: window sizes to 60% of monitor, maintaining content aspect ratio
|
|
||||||
- Window auto-resizes to content aspect ratio on navigation (height adjusts, position stays)
|
|
||||||
- Window geometry and fullscreen state persisted to DB across sessions
|
|
||||||
- Hyprland-specific: uses `hyprctl resizewindowpixel` + `setprop keep_aspect_ratio` to lock window to content aspect ratio (works both floating and tiled)
|
|
||||||
- Default site setting in Settings > General
|
|
||||||
|
|
||||||
### New: preview toolbar
|
|
||||||
|
|
||||||
- Action bar above the preview panel: Bookmark, Save, BL Tag, BL Post, Popout
|
|
||||||
- Appears when a post is active, hidden when preview is cleared
|
|
||||||
- Save button opens folder picker menu (Unsorted / existing folders / + New Folder)
|
|
||||||
- Save/Unsave state shown on button text
|
|
||||||
- Bookmark/Unbookmark state shown on button text
|
|
||||||
- Per-tab button visibility: Library tab only shows Save + Popout
|
|
||||||
- All actions work from any tab (Browse, Bookmarks, Library)
|
|
||||||
- Blacklist tag and blacklist post show confirmation dialogs
|
|
||||||
- "Unsave from Library" only appears in context menu when post is saved
|
|
||||||
|
|
||||||
### New: media type filter
|
|
||||||
|
|
||||||
- Replaced "Animated" checkbox with dropdown: All / Animated / Video / GIF / Audio
|
|
||||||
- Each option appends the corresponding booru tag to the search query
|
|
||||||
|
|
||||||
### New: thumbnail cache limits
|
|
||||||
|
|
||||||
- Added "Max thumbnail cache" setting (default 500 MB)
|
|
||||||
- Auto-evicts oldest thumbnails when limit is reached
|
|
||||||
|
|
||||||
### Improved: state synchronization
|
|
||||||
|
|
||||||
- Saving/unsaving updates grid thumbnail dots instantly (browse, bookmarks, library)
|
|
||||||
- Unbookmarking refreshes the bookmarks tab immediately
|
|
||||||
- Saving from browse/bookmarks refreshes the library tab when async save completes
|
|
||||||
- Library items set `_current_post` on click so toolbar actions work correctly
|
|
||||||
- Preview toolbar tracks bookmark and save state across all tabs
|
|
||||||
- Tab switching clears grid selections to prevent cross-tab action conflicts
|
|
||||||
- Bookmark state updates after async bookmark completes (not before)
|
|
||||||
|
|
||||||
### Improved: infinite scroll
|
|
||||||
|
|
||||||
- Fixed missing posts when media type filters reduce results per page
|
|
||||||
- Local dedup set (`seen`) prevents cross-page duplicates within backfill without polluting `shown_post_ids`
|
|
||||||
- Page counter only advances when results are returned, not when filtering empties them
|
|
||||||
- Backfill loop increased to 10 max pages with 300ms delay between API calls (first call instant)
|
|
||||||
|
|
||||||
### Improved: pagination
|
|
||||||
|
|
||||||
- Status bar shows "(end)" when search returns fewer results than page size
|
|
||||||
- Prev/Next buttons hide when at page boundaries instead of just disabling
|
|
||||||
- Source URLs clickable in info panel, truncated at 60 chars for display
|
|
||||||
|
|
||||||
### Improved: video controls
|
|
||||||
|
|
||||||
- Seek step changed from 5s to ~3s for `,` and `.` keys
|
|
||||||
- `,` and `.` seek keys now work in the main preview panel, not just popout
|
|
||||||
- Translucent overlay style on video controls in both preview and popout
|
|
||||||
- Volume slider fixed at 60px to not compete with seek slider at small sizes
|
|
||||||
|
|
||||||
### New: API retry logic
|
|
||||||
|
|
||||||
- Single retry with backoff on HTTP 429 (rate limit) and 503 (service unavailable)
|
|
||||||
- Retries on request timeout
|
|
||||||
- Respects `Retry-After` header (capped at 5s)
|
|
||||||
- Applied to all API requests (search, get_post, autocomplete) across all four clients
|
|
||||||
- Downloads are not retried (large payloads, separate client)
|
|
||||||
|
|
||||||
### Refactor: SearchState dataclass
|
|
||||||
|
|
||||||
- Consolidated 8 scattered search state attributes into a single `SearchState` dataclass
|
|
||||||
- Eliminated all defensive `getattr`/`hasattr` patterns (8 instances)
|
|
||||||
- State resets cleanly on new search — no stale infinite scroll data
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- Added `python-mpv>=1.0`
|
|
||||||
- Removed dependency on `PySide6.QtMultimedia` and `PySide6.QtMultimediaWidgets`
|
|
||||||
|
|
||||||
## v0.1.9
|
|
||||||
|
|
||||||
### New Features
|
|
||||||
|
|
||||||
- **Animated filter** — checkbox to only show animated/video posts (server-side `animated` tag)
|
|
||||||
- **Start from page** — page number field in top bar, jump to any page on search
|
|
||||||
- **Post date** — creation date shown in the info line
|
|
||||||
- **Prefetch modes** — Off / Nearby (4 cardinals) / Aggressive (3 row radius)
|
|
||||||
- **Animated PNG/WebP** — auto-converted to GIF for Qt playback
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
- Thumbnail selection/hover box hugs the actual image content
|
|
||||||
- Video controls locked to bottom of preview panel
|
|
||||||
- Score filter uses +/- buttons instead of spinbox arrows
|
|
||||||
- Cache eviction triggers after infinite scroll page drain
|
|
||||||
- Combobox dropdown styling fixed on Windows dark mode
|
|
||||||
- Saved thumbnail size applied on startup
|
|
||||||
|
|
||||||
### Fixes
|
|
||||||
|
|
||||||
- Infinite scroll no longer stops early from false exhaustion
|
|
||||||
- Infinite scroll triggers when viewport isn't full (initial load, splitter resize, window resize)
|
|
||||||
- Shared HTTP clients reset on startup (prevents stale event loop errors)
|
|
||||||
- Non-JSON API responses handled gracefully instead of crashing
|
|
||||||
|
|
||||||
## v0.1.8
|
|
||||||
|
|
||||||
### Windows Installer
|
|
||||||
|
|
||||||
- **Inno Setup installer** — proper Windows installer with Start Menu shortcut, optional desktop icon, and uninstaller
|
|
||||||
- **`--onedir` build** — instant startup, no temp extraction (was `--onefile`)
|
|
||||||
- **`optimize=2`** — stripped docstrings/asserts for smaller, faster bytecode
|
|
||||||
- **No UPX** — trades disk space for faster launch (no decompression overhead)
|
|
||||||
- **`noarchive`** — loose .pyc files, no zip decompression at startup
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- **Shared HTTP client for API calls** — single TLS handshake for all Danbooru/Gelbooru/Moebooru requests
|
|
||||||
- **E621 shared client** — separate pooled client (custom User-Agent required)
|
|
||||||
- **Site detection reuses shared client** — no extra TLS for auto-detect
|
|
||||||
- **Priority downloads** — clicking a post pauses prefetch, downloads at full speed, resumes after
|
|
||||||
- **Referer header per-request** — fixes Gelbooru CDN returning HTML captcha pages
|
|
||||||
|
|
||||||
### Infinite Scroll
|
|
||||||
|
|
||||||
- **Auto-fill viewport** — if first page doesn't fill the screen, auto-loads more
|
|
||||||
- **Auto-load after drain** — checks if still at bottom after staggered append finishes
|
|
||||||
- **Content-aware trigger** — fires when scrollbar max is 0 (no scroll needed)
|
|
||||||
|
|
||||||
### Library
|
|
||||||
|
|
||||||
- **Tag categories stored** — saved as JSON in both library_meta and bookmarks DB
|
|
||||||
- **Categorized tags in info panel** — Library and Bookmarks show Artist/Character/Copyright etc.
|
|
||||||
- **Tag search in Library** — search box filters by stored tags
|
|
||||||
- **Browse thumbnail copied on save** — Library tab shows thumbnails instantly
|
|
||||||
- **Unsave from Library** in bookmarks right-click menu
|
|
||||||
|
|
||||||
### Bugfixes
|
|
||||||
|
|
||||||
- **Clear preview on new search**
|
|
||||||
- **Fixed diagonal grid navigation** — viewport width used for column count
|
|
||||||
- **Fixed Gelbooru CDN** — Referer header passed per-request with shared client
|
|
||||||
- **Crash guards** — pop(0) on empty queue, bounds checks in API clients
|
|
||||||
- **Page cache capped** — 10 pages max in pagination mode
|
|
||||||
- **Missing DB migrations** — tag_categories column added to existing tables
|
|
||||||
- **Tag click switches to Browse** — clears preview and searches clicked tag
|
|
||||||
|
|
||||||
## v0.1.7
|
|
||||||
|
|
||||||
### Infinite Scroll
|
|
||||||
|
|
||||||
- **New mode** — toggle in Settings > General, applies live
|
|
||||||
- Auto-loads more posts when scrolling to bottom
|
|
||||||
- **Staggered loading** — posts appear one at a time as thumbnails arrive
|
|
||||||
- **Stops at end** — gracefully handles API exhaustion
|
|
||||||
- Arrow keys at bottom don't break the grid
|
|
||||||
- Loading locked during drain to prevent multi-page burst
|
|
||||||
- Triggered one row from bottom for seamless experience
|
|
||||||
|
|
||||||
### Page Cache & Deduplication
|
|
||||||
|
|
||||||
- Page results cached in memory — prev/next loads instantly
|
|
||||||
- Backfilled posts don't repeat on subsequent pages
|
|
||||||
- Page label updates on cached loads
|
|
||||||
|
|
||||||
### Prefetch
|
|
||||||
|
|
||||||
- **Ring expansion** — prefetches in all 8 directions (including diagonals)
|
|
||||||
- **Auto-start on search** — begins from top of page immediately
|
|
||||||
- **Re-centers on click** — restarts spiral from clicked post
|
|
||||||
- **Triggers on infinite scroll** — new appended posts prefetch automatically
|
|
||||||
|
|
||||||
### Clipboard
|
|
||||||
|
|
||||||
- **Copy File to Clipboard** — works in grid, preview, bookmarks, and library
|
|
||||||
- **Ctrl+C shortcut** — global shortcut via QShortcut
|
|
||||||
- **QMimeData** — uses same mechanism as drag-and-drop for universal compatibility
|
|
||||||
- Sets both file URL (for file managers) and image data (for Discord/image apps)
|
|
||||||
- Videos copy as file URIs
|
|
||||||
|
|
||||||
### Slideshow
|
|
||||||
|
|
||||||
- **Blacklist Tag button** — opens categorized tag menu
|
|
||||||
- **Blacklist Post button** — blacklists current post
|
|
||||||
|
|
||||||
### Blacklist
|
|
||||||
|
|
||||||
- **In-place removal** — blacklisting removes matching posts from grid without re-searching
|
|
||||||
- Preserves infinite scroll state
|
|
||||||
- Only clears preview when the blacklisted post is the one being viewed
|
|
||||||
|
|
||||||
### UI Polish
|
|
||||||
|
|
||||||
- **QProxyStyle dark arrows** — spinbox/combobox arrows visible on all dark QSS themes
|
|
||||||
- **Diagonal nav fix** — column count reads viewport width correctly
|
|
||||||
- **Status bar** — shows result count with action confirmations
|
|
||||||
- **Live settings** — infinite scroll, library dir, thumbnail size apply without restart
|
|
||||||
|
|
||||||
### Stability
|
|
||||||
|
|
||||||
- All silent exceptions logged
|
|
||||||
- Missing defaults added for fresh installs
|
|
||||||
- Git history cleaned
|
|
||||||
|
|
||||||
## v0.1.6
|
|
||||||
|
|
||||||
### Infinite Scroll
|
|
||||||
|
|
||||||
- **New mode** — toggle in Settings > General: "Infinite scroll (replaces page buttons)"
|
|
||||||
- Hides prev/next buttons, auto-loads more posts when scrolling to bottom
|
|
||||||
- Posts appended to grid, deduped, blacklist filtered
|
|
||||||
- Stops gracefully when API runs out of results (shows "end")
|
|
||||||
- Arrow keys at bottom don't nuke the grid — page turn disabled in infinite scroll
|
|
||||||
- Applies live — no restart needed
|
|
||||||
|
|
||||||
### Page Cache & Deduplication
|
|
||||||
|
|
||||||
- **Page results cached** — prev/next loads instantly from memory within a search session
|
|
||||||
- **Post deduplication** — backfilled posts don't repeat on subsequent pages
|
|
||||||
- **Page label updates** on cached page loads
|
|
||||||
|
|
||||||
### Prefetch
|
|
||||||
|
|
||||||
- **Ring expansion** — prefetches in all 8 directions (up, down, left, right, diagonals)
|
|
||||||
- **Auto-start on search** — begins prefetching from top of page immediately
|
|
||||||
- **Re-centers on click** — clicking a post restarts the spiral from that position
|
|
||||||
- **Triggers on infinite scroll** — new appended posts start prefetching automatically
|
|
||||||
|
|
||||||
### Slideshow
|
|
||||||
|
|
||||||
- **Blacklist Tag button** — opens categorized tag menu in slideshow toolbar
|
|
||||||
- **Blacklist Post button** — blacklists current post from slideshow toolbar
|
|
||||||
- **Blacklisting clears slideshow** — both preview and slideshow cleared when previewed post is blacklisted
|
|
||||||
|
|
||||||
### Copy to Clipboard
|
|
||||||
|
|
||||||
- **Ctrl+C** — copies preview image to clipboard (falls back to cached file)
|
|
||||||
- **Right-click grid** — "Copy Image to Clipboard" option
|
|
||||||
- **Right-click preview** — "Copy Image to Clipboard" always available
|
|
||||||
|
|
||||||
### Live Settings
|
|
||||||
|
|
||||||
- **Most settings apply instantly** — infinite scroll, library directory, thumbnail size, rating, score
|
|
||||||
- Removed "restart required" labels
|
|
||||||
|
|
||||||
### Bugfixes
|
|
||||||
|
|
||||||
- **Blacklisting doesn't clear unrelated preview** — only clears when the previewed post matches
|
|
||||||
- **Backfill confirmed working** — debug logging added
|
|
||||||
- **Status bar keeps result count** — shows "N results — Loaded" instead of just "Loaded"
|
|
||||||
- **Fixed README code block formatting** and added ffmpeg back to Linux deps
|
|
||||||
|
|||||||
109
HYPRLAND.md
109
HYPRLAND.md
@ -1,109 +0,0 @@
|
|||||||
# Hyprland integration
|
|
||||||
|
|
||||||
I daily-drive booru-viewer on Hyprland and I've baked in my own opinions
|
|
||||||
on how the app should behave there. By default, a handful of `hyprctl`
|
|
||||||
dispatches run at runtime to:
|
|
||||||
|
|
||||||
- Restore the main window's last floating mode + dimensions on launch
|
|
||||||
- Restore the popout's position and keep it anchored to its configured
|
|
||||||
anchor point (center or any corner) as its content resizes during
|
|
||||||
navigation, and suppress F11 / fullscreen-transition flicker
|
|
||||||
- "Prime" Hyprland's per-window floating cache at startup so a mid-session
|
|
||||||
toggle to floating uses your saved dimensions
|
|
||||||
- Lock the popout's aspect ratio to its content so you can't accidentally
|
|
||||||
stretch mpv playback by dragging the popout corner
|
|
||||||
|
|
||||||
## Opting out
|
|
||||||
|
|
||||||
If you're a ricer with your own `windowrule`s targeting
|
|
||||||
`class:^(booru-viewer)$` and you'd rather the app keep its hands off your
|
|
||||||
setup, there are two independent opt-out env vars:
|
|
||||||
|
|
||||||
- **`BOORU_VIEWER_NO_HYPR_RULES=1`** — disables every in-code hyprctl
|
|
||||||
dispatch *except* the popout's `keep_aspect_ratio` lock. Use this if
|
|
||||||
you want app-side window management out of the way but you still want
|
|
||||||
the popout to size itself to its content.
|
|
||||||
- **`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1`** — independently disables
|
|
||||||
the popout's aspect ratio enforcement. Useful if you want to drag the
|
|
||||||
popout to whatever shape you like (square, panoramic, monitor-aspect,
|
|
||||||
whatever) and accept that mpv playback will letterbox or stretch to
|
|
||||||
match.
|
|
||||||
|
|
||||||
For the full hands-off experience, set both:
|
|
||||||
|
|
||||||
```ini
|
|
||||||
[Desktop Entry]
|
|
||||||
Name=booru-viewer
|
|
||||||
Exec=env BOORU_VIEWER_NO_HYPR_RULES=1 BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1 /path/to/booru-viewer/.venv/bin/booru-viewer
|
|
||||||
Icon=/path/to/booru-viewer/icon.png
|
|
||||||
Type=Application
|
|
||||||
Categories=Graphics;
|
|
||||||
```
|
|
||||||
|
|
||||||
Or for one-off launches from a shell:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
BOORU_VIEWER_NO_HYPR_RULES=1 booru-viewer
|
|
||||||
```
|
|
||||||
|
|
||||||
## Writing your own rules
|
|
||||||
|
|
||||||
If you're running with `BOORU_VIEWER_NO_HYPR_RULES=1` (or layering rules
|
|
||||||
on top of the defaults), here's the reference.
|
|
||||||
|
|
||||||
### Window identity
|
|
||||||
|
|
||||||
- Main window — class `booru-viewer`
|
|
||||||
- Popout — class `booru-viewer`, title `booru-viewer — Popout`
|
|
||||||
|
|
||||||
> ⚠ The popout title uses an em dash (`—`, U+2014), not a hyphen. A rule
|
|
||||||
> like `match:title = ^booru-viewer - Popout$` will silently match
|
|
||||||
> nothing. Either paste the em dash verbatim or match the tail:
|
|
||||||
> `match:title = Popout$`.
|
|
||||||
|
|
||||||
### Example rules
|
|
||||||
|
|
||||||
```ini
|
|
||||||
# Float the popout with aspect-locked resize and no animation flicker
|
|
||||||
windowrule {
|
|
||||||
match:class = ^(booru-viewer)$
|
|
||||||
match:title = Popout$
|
|
||||||
float = yes
|
|
||||||
keep_aspect_ratio = on
|
|
||||||
no_anim = on
|
|
||||||
}
|
|
||||||
|
|
||||||
# Per-window scroll factor if your global is too aggressive
|
|
||||||
windowrule {
|
|
||||||
match:class = ^(booru-viewer)$
|
|
||||||
match:title = Popout$
|
|
||||||
scroll_mouse = 0.65
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### What the env vars actually disable
|
|
||||||
|
|
||||||
`BOORU_VIEWER_NO_HYPR_RULES=1` suppresses the in-code calls to:
|
|
||||||
|
|
||||||
- `dispatch resizeactive` / `moveactive` batches that restore saved
|
|
||||||
popout geometry
|
|
||||||
- `dispatch togglefloating` on the main window at launch
|
|
||||||
- `dispatch setprop address:<addr> no_anim 1` applied during popout
|
|
||||||
transitions (skipped on the first fit after open so Hyprland's
|
|
||||||
`windowsIn` / `popin` animation can play — subsequent navigation
|
|
||||||
fits still suppress anim to avoid resize flicker)
|
|
||||||
- The startup "prime" sequence that warms Hyprland's per-window
|
|
||||||
floating cache
|
|
||||||
|
|
||||||
`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1` suppresses only
|
|
||||||
`dispatch setprop address:<addr> keep_aspect_ratio 1` on the popout.
|
|
||||||
Everything else still runs.
|
|
||||||
|
|
||||||
Read-only queries (`hyprctl clients -j`, `hyprctl monitors -j`) always
|
|
||||||
run regardless — the app needs them to know where it is.
|
|
||||||
|
|
||||||
### Hyprland requirements
|
|
||||||
|
|
||||||
The `keep_aspect_ratio` windowrule and `dispatch setprop
|
|
||||||
keep_aspect_ratio` both require a recent Hyprland. On older builds the
|
|
||||||
aspect lock is silently a no-op.
|
|
||||||
88
README.md
88
README.md
@ -1,7 +1,16 @@
|
|||||||
# booru-viewer
|
# booru-viewer
|
||||||
A Qt6 booru client for people who keep what they save and rice what they run. Browse, search, and archive Danbooru, e621, Gelbooru, and Moebooru on Linux and Windows. Fully themeable.
|
|
||||||
|
|
||||||
<img src="screenshots/linux.png" alt="Linux — System Qt6 theme" width="700">
|
[](https://github.com/pxlwh/booru-viewer/actions/workflows/tests.yml)
|
||||||
|
|
||||||
|
A booru client for people who keep what they save and rice what they run.
|
||||||
|
|
||||||
|
Qt6 desktop app for Linux and Windows. Browse, search, and archive Danbooru, e621, Gelbooru, and Moebooru. Fully themeable.
|
||||||
|
|
||||||
|
## Screenshot
|
||||||
|
|
||||||
|
**Linux — Styled via system Qt6 theme**
|
||||||
|
|
||||||
|
<picture><img src="screenshots/linux.png" alt="Linux — System Qt6 theme" width="700"></picture>
|
||||||
|
|
||||||
Supports custom styling via `custom.qss` — see [Theming](#theming).
|
Supports custom styling via `custom.qss` — see [Theming](#theming).
|
||||||
|
|
||||||
@ -35,26 +44,21 @@ Windows 10 dark mode is automatically detected and applied.
|
|||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
|
|
||||||
**Arch / CachyOS / Manjaro** — install from the AUR:
|
Requires Python 3.11+ and pip. Most distros ship Python but you may need to install pip and the Qt6 system libraries.
|
||||||
|
|
||||||
|
**Arch / CachyOS:**
|
||||||
```sh
|
```sh
|
||||||
yay -S booru-viewer-git
|
sudo pacman -S python python-pip qt6-base mpv ffmpeg
|
||||||
# or: paru -S booru-viewer-git
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The AUR package tracks the gitea `main` branch, so `yay -Syu` pulls the latest commit. Desktop entry and icon are installed automatically.
|
**Ubuntu / Debian (24.04+):**
|
||||||
|
|
||||||
AUR: [/packages/booru-viewer-git](https://aur.archlinux.org/packages/booru-viewer-git)
|
|
||||||
|
|
||||||
**Other distros** — build from source. Requires Python 3.11+ and Qt6 system libraries.
|
|
||||||
|
|
||||||
Ubuntu / Debian (24.04+):
|
|
||||||
```sh
|
```sh
|
||||||
sudo apt install python3 python3-pip python3-venv mpv libmpv-dev
|
sudo apt install python3 python3-pip python3-venv mpv libmpv-dev ffmpeg
|
||||||
```
|
```
|
||||||
|
|
||||||
Fedora:
|
**Fedora:**
|
||||||
```sh
|
```sh
|
||||||
sudo dnf install python3 python3-pip qt6-qtbase mpv mpv-libs-devel
|
sudo dnf install python3 python3-pip qt6-qtbase mpv mpv-libs-devel ffmpeg
|
||||||
```
|
```
|
||||||
|
|
||||||
Then clone and install:
|
Then clone and install:
|
||||||
@ -64,10 +68,16 @@ cd booru-viewer
|
|||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
pip install -e .
|
pip install -e .
|
||||||
|
```
|
||||||
|
|
||||||
|
Run it:
|
||||||
|
```sh
|
||||||
booru-viewer
|
booru-viewer
|
||||||
```
|
```
|
||||||
|
|
||||||
To add a launcher entry, create `~/.local/share/applications/booru-viewer.desktop`:
|
Or without installing: `python3 -m booru_viewer.main_gui`
|
||||||
|
|
||||||
|
**Desktop entry:** To add booru-viewer to your app launcher, create `~/.local/share/applications/booru-viewer.desktop`:
|
||||||
```ini
|
```ini
|
||||||
[Desktop Entry]
|
[Desktop Entry]
|
||||||
Name=booru-viewer
|
Name=booru-viewer
|
||||||
@ -79,11 +89,47 @@ Categories=Graphics;
|
|||||||
|
|
||||||
### Hyprland integration
|
### Hyprland integration
|
||||||
|
|
||||||
booru-viewer ships with built-in Hyprland window management (popout
|
I daily-drive booru-viewer on Hyprland and I've baked in my own opinions on
|
||||||
geometry restore, aspect ratio lock, animation suppression, etc.) that
|
how the app should behave there. By default, a handful of `hyprctl` dispatches
|
||||||
can be fully or partially opted out of via env vars. See
|
run at runtime to:
|
||||||
[HYPRLAND.md](HYPRLAND.md) for the full details, opt-out flags, and
|
|
||||||
example `windowrule` reference.
|
- Restore the main window's last floating mode + dimensions on launch
|
||||||
|
- Restore the popout's position, center-pin it around its content during
|
||||||
|
navigation, and suppress F11 / fullscreen-transition flicker
|
||||||
|
- "Prime" Hyprland's per-window floating cache at startup so a mid-session
|
||||||
|
toggle to floating uses your saved dimensions
|
||||||
|
- Lock the popout's aspect ratio to its content so you can't accidentally
|
||||||
|
stretch mpv playback by dragging the popout corner
|
||||||
|
|
||||||
|
If you're a ricer with your own `windowrule`s targeting `class:^(booru-viewer)$`
|
||||||
|
and you'd rather the app keep its hands off your setup, there are two
|
||||||
|
independent opt-out env vars:
|
||||||
|
|
||||||
|
- **`BOORU_VIEWER_NO_HYPR_RULES=1`** — disables every in-code hyprctl dispatch
|
||||||
|
*except* the popout's `keep_aspect_ratio` lock. Use this if you want app-side
|
||||||
|
window management out of the way but you still want the popout to size itself
|
||||||
|
to its content.
|
||||||
|
- **`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1`** — independently disables the popout's
|
||||||
|
aspect ratio enforcement. Useful if you want to drag the popout to whatever
|
||||||
|
shape you like (square, panoramic, monitor-aspect, whatever) and accept that
|
||||||
|
mpv playback will letterbox or stretch to match.
|
||||||
|
|
||||||
|
For the full hands-off experience, set both:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[Desktop Entry]
|
||||||
|
Name=booru-viewer
|
||||||
|
Exec=env BOORU_VIEWER_NO_HYPR_RULES=1 BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1 /path/to/booru-viewer/.venv/bin/booru-viewer
|
||||||
|
Icon=/path/to/booru-viewer/icon.png
|
||||||
|
Type=Application
|
||||||
|
Categories=Graphics;
|
||||||
|
```
|
||||||
|
|
||||||
|
Or for one-off launches from a shell:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
BOORU_VIEWER_NO_HYPR_RULES=1 booru-viewer
|
||||||
|
```
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
||||||
|
|||||||
@ -1,18 +0,0 @@
|
|||||||
"""booru_viewer.core package — pure-Python data + I/O layer (no Qt).
|
|
||||||
|
|
||||||
Side effect on import: install the project-wide PIL decompression-bomb
|
|
||||||
cap. PIL's default warns silently above ~89M pixels; we want a hard
|
|
||||||
fail above 256M pixels so DecompressionBombError can be caught and
|
|
||||||
treated as a download failure.
|
|
||||||
|
|
||||||
Setting it here (rather than as a side effect of importing
|
|
||||||
``core.cache``) means any code path that touches PIL via any
|
|
||||||
``booru_viewer.core.*`` submodule gets the cap installed first,
|
|
||||||
regardless of submodule import order. Audit finding #8.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from PIL import Image as _PILImage
|
|
||||||
|
|
||||||
_PILImage.MAX_IMAGE_PIXELS = 256 * 1024 * 1024
|
|
||||||
|
|
||||||
del _PILImage
|
|
||||||
@ -1,150 +0,0 @@
|
|||||||
"""Network-safety helpers for httpx clients.
|
|
||||||
|
|
||||||
Keeps SSRF guards and secret redaction in one place so every httpx
|
|
||||||
client in the project can share a single implementation. All helpers
|
|
||||||
here are pure stdlib + httpx; no Qt, no project-side imports.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import ipaddress
|
|
||||||
import socket
|
|
||||||
from typing import Any, Mapping
|
|
||||||
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# SSRF guard — finding #1
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
_BLOCKED_V4 = [
|
|
||||||
ipaddress.ip_network("0.0.0.0/8"), # this-network
|
|
||||||
ipaddress.ip_network("10.0.0.0/8"), # RFC1918
|
|
||||||
ipaddress.ip_network("100.64.0.0/10"), # CGNAT
|
|
||||||
ipaddress.ip_network("127.0.0.0/8"), # loopback
|
|
||||||
ipaddress.ip_network("169.254.0.0/16"), # link-local (incl. 169.254.169.254 metadata)
|
|
||||||
ipaddress.ip_network("172.16.0.0/12"), # RFC1918
|
|
||||||
ipaddress.ip_network("192.0.0.0/24"), # IETF protocol assignments
|
|
||||||
ipaddress.ip_network("192.168.0.0/16"), # RFC1918
|
|
||||||
ipaddress.ip_network("198.18.0.0/15"), # benchmark
|
|
||||||
ipaddress.ip_network("224.0.0.0/4"), # multicast
|
|
||||||
ipaddress.ip_network("240.0.0.0/4"), # reserved
|
|
||||||
]
|
|
||||||
|
|
||||||
_BLOCKED_V6 = [
|
|
||||||
ipaddress.ip_network("::1/128"), # loopback
|
|
||||||
ipaddress.ip_network("::/128"), # unspecified
|
|
||||||
ipaddress.ip_network("::ffff:0:0/96"), # IPv4-mapped (covers v4 via v6)
|
|
||||||
ipaddress.ip_network("64:ff9b::/96"), # well-known NAT64
|
|
||||||
ipaddress.ip_network("fc00::/7"), # unique local
|
|
||||||
ipaddress.ip_network("fe80::/10"), # link-local
|
|
||||||
ipaddress.ip_network("ff00::/8"), # multicast
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _is_blocked_ip(ip: ipaddress._BaseAddress) -> bool:
|
|
||||||
nets = _BLOCKED_V4 if isinstance(ip, ipaddress.IPv4Address) else _BLOCKED_V6
|
|
||||||
return any(ip in net for net in nets)
|
|
||||||
|
|
||||||
|
|
||||||
def check_public_host(host: str) -> None:
|
|
||||||
"""Raise httpx.RequestError if ``host`` is (or resolves to) a non-public IP.
|
|
||||||
|
|
||||||
Blocks loopback, RFC1918, link-local (including the 169.254.169.254
|
|
||||||
cloud-metadata endpoint), unique-local v6, and similar. Used by both
|
|
||||||
the initial request and every redirect hop — see
|
|
||||||
``validate_public_request`` for the async wrapper.
|
|
||||||
"""
|
|
||||||
if not host:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
ip = ipaddress.ip_address(host)
|
|
||||||
except ValueError:
|
|
||||||
ip = None
|
|
||||||
if ip is not None:
|
|
||||||
if _is_blocked_ip(ip):
|
|
||||||
raise httpx.RequestError(f"blocked address: {host}")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
infos = socket.getaddrinfo(host, None)
|
|
||||||
except socket.gaierror as e:
|
|
||||||
raise httpx.RequestError(f"DNS resolution failed for {host}: {e}")
|
|
||||||
seen: set[str] = set()
|
|
||||||
for info in infos:
|
|
||||||
addr = info[4][0]
|
|
||||||
if addr in seen:
|
|
||||||
continue
|
|
||||||
seen.add(addr)
|
|
||||||
try:
|
|
||||||
resolved = ipaddress.ip_address(addr.split("%", 1)[0])
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
if _is_blocked_ip(resolved):
|
|
||||||
raise httpx.RequestError(
|
|
||||||
f"blocked request target {host} -> {addr}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def validate_public_request(request: httpx.Request) -> None:
|
|
||||||
"""httpx request event hook — rejects private/metadata targets.
|
|
||||||
|
|
||||||
Fires on every hop including redirects. The initial request to a
|
|
||||||
user-configured booru base_url is also validated; this intentionally
|
|
||||||
blocks users from pointing the app at ``http://localhost/`` or an
|
|
||||||
RFC1918 address (behavior change from v0.2.5).
|
|
||||||
|
|
||||||
Limitation: TOCTOU / DNS rebinding. We resolve the host here, but
|
|
||||||
the kernel will re-resolve when the TCP connection actually opens,
|
|
||||||
and a rebinder that returns a public IP on first query and a
|
|
||||||
private IP on the second can bypass this hook. The project's threat
|
|
||||||
model is a *malicious booru returning a 3xx to a private address* —
|
|
||||||
not an active rebinder controlling the DNS recursor — so this check
|
|
||||||
is the intended defense line. If the threat model ever widens, the
|
|
||||||
follow-up is a custom httpx transport that validates post-connect.
|
|
||||||
"""
|
|
||||||
host = request.url.host
|
|
||||||
if not host:
|
|
||||||
return
|
|
||||||
await asyncio.to_thread(check_public_host, host)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Credential redaction — finding #3
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
# Case-sensitive; matches the literal param names every booru client
|
|
||||||
# uses today (verified via grep across danbooru/e621/gelbooru/moebooru).
|
|
||||||
SECRET_KEYS: frozenset[str] = frozenset({
|
|
||||||
"login",
|
|
||||||
"api_key",
|
|
||||||
"user_id",
|
|
||||||
"password_hash",
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def redact_url(url: str) -> str:
|
|
||||||
"""Replace secret query params with ``***`` in a URL string.
|
|
||||||
|
|
||||||
Preserves ordering and non-secret params. Empty-query URLs pass
|
|
||||||
through unchanged.
|
|
||||||
"""
|
|
||||||
parts = urlsplit(url)
|
|
||||||
if not parts.query:
|
|
||||||
return url
|
|
||||||
pairs = parse_qsl(parts.query, keep_blank_values=True)
|
|
||||||
redacted = [(k, "***" if k in SECRET_KEYS else v) for k, v in pairs]
|
|
||||||
return urlunsplit((
|
|
||||||
parts.scheme,
|
|
||||||
parts.netloc,
|
|
||||||
parts.path,
|
|
||||||
urlencode(redacted),
|
|
||||||
parts.fragment,
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def redact_params(params: Mapping[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Return a copy of ``params`` with secret keys replaced by ``***``."""
|
|
||||||
return {k: ("***" if k in SECRET_KEYS else v) for k, v in params.items()}
|
|
||||||
@ -10,9 +10,8 @@ from dataclasses import dataclass, field
|
|||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from ..config import DEFAULT_PAGE_SIZE
|
from ..config import USER_AGENT, DEFAULT_PAGE_SIZE
|
||||||
from ..cache import log_connection
|
from ..cache import log_connection
|
||||||
from ._safety import redact_url
|
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
|
|
||||||
@ -100,11 +99,16 @@ class BooruClient(ABC):
|
|||||||
return c
|
return c
|
||||||
# Slow path: build it. Lock so two coroutines on the same loop don't
|
# Slow path: build it. Lock so two coroutines on the same loop don't
|
||||||
# both construct + leak.
|
# both construct + leak.
|
||||||
from ..http import make_client
|
|
||||||
with BooruClient._shared_client_lock:
|
with BooruClient._shared_client_lock:
|
||||||
c = BooruClient._shared_client
|
c = BooruClient._shared_client
|
||||||
if c is None or c.is_closed:
|
if c is None or c.is_closed:
|
||||||
c = make_client(extra_request_hooks=[self._log_request])
|
c = httpx.AsyncClient(
|
||||||
|
headers={"User-Agent": USER_AGENT},
|
||||||
|
follow_redirects=True,
|
||||||
|
timeout=20.0,
|
||||||
|
event_hooks={"request": [self._log_request]},
|
||||||
|
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||||
|
)
|
||||||
BooruClient._shared_client = c
|
BooruClient._shared_client = c
|
||||||
return c
|
return c
|
||||||
|
|
||||||
@ -123,11 +127,7 @@ class BooruClient(ABC):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _log_request(request: httpx.Request) -> None:
|
async def _log_request(request: httpx.Request) -> None:
|
||||||
# Redact api_key / login / user_id / password_hash from the
|
log_connection(str(request.url))
|
||||||
# URL before it ever crosses the function boundary — the
|
|
||||||
# rendered URL would otherwise land in tracebacks, debug logs,
|
|
||||||
# or in-app connection-log views as plaintext.
|
|
||||||
log_connection(redact_url(str(request.url)))
|
|
||||||
|
|
||||||
_RETRYABLE_STATUS = frozenset({429, 503})
|
_RETRYABLE_STATUS = frozenset({429, 503})
|
||||||
|
|
||||||
@ -152,18 +152,9 @@ class BooruClient(ABC):
|
|||||||
wait = 2.0
|
wait = 2.0
|
||||||
log.info(f"Retrying {url} after {resp.status_code} (wait {wait}s)")
|
log.info(f"Retrying {url} after {resp.status_code} (wait {wait}s)")
|
||||||
await asyncio.sleep(wait)
|
await asyncio.sleep(wait)
|
||||||
except (
|
except (httpx.TimeoutException, httpx.ConnectError, httpx.NetworkError) as e:
|
||||||
httpx.TimeoutException,
|
# Retry on transient DNS/TCP/timeout failures. Without this,
|
||||||
httpx.ConnectError,
|
# a single DNS hiccup or RST blows up the whole search.
|
||||||
httpx.NetworkError,
|
|
||||||
httpx.RemoteProtocolError,
|
|
||||||
httpx.ReadError,
|
|
||||||
) as e:
|
|
||||||
# Retry on transient DNS/TCP/timeout failures plus
|
|
||||||
# mid-response drops — RemoteProtocolError and ReadError
|
|
||||||
# are common when an overloaded booru closes the TCP
|
|
||||||
# connection between headers and body. Without them a
|
|
||||||
# single dropped response blows up the whole search.
|
|
||||||
if attempt == 1:
|
if attempt == 1:
|
||||||
raise
|
raise
|
||||||
log.info(f"Retrying {url} after {type(e).__name__}: {e}")
|
log.info(f"Retrying {url} after {type(e).__name__}: {e}")
|
||||||
|
|||||||
@ -76,13 +76,6 @@ _LABEL_MAP: dict[str, str] = {
|
|||||||
"style": "Style",
|
"style": "Style",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Sentinel cap on the HTML body the regex walks over. A real
|
|
||||||
# Gelbooru/Moebooru post page is ~30-150KB; capping at 2MB gives
|
|
||||||
# any legit page comfortable headroom while preventing a hostile
|
|
||||||
# server from feeding the regex hundreds of MB and pegging CPU.
|
|
||||||
# Audit finding #14.
|
|
||||||
_FETCH_POST_HTML_CAP = 2 * 1024 * 1024
|
|
||||||
|
|
||||||
# Gelbooru tag DAPI integer code -> Capitalized label (for fetch_via_tag_api)
|
# Gelbooru tag DAPI integer code -> Capitalized label (for fetch_via_tag_api)
|
||||||
_GELBOORU_TYPE_MAP: dict[int, str] = {
|
_GELBOORU_TYPE_MAP: dict[int, str] = {
|
||||||
0: "General",
|
0: "General",
|
||||||
@ -213,31 +206,6 @@ class CategoryFetcher:
|
|||||||
and bool(self._client.api_user)
|
and bool(self._client.api_user)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _build_tag_api_params(self, chunk: list[str]) -> dict:
|
|
||||||
"""Params dict for a tag-DAPI batch request.
|
|
||||||
|
|
||||||
The ``lstrip("&")`` and ``startswith("api_key=")`` guards
|
|
||||||
accommodate users who paste their credentials with a leading
|
|
||||||
``&`` or as ``api_key=VALUE`` — either form gets normalised
|
|
||||||
to a clean name→value mapping.
|
|
||||||
"""
|
|
||||||
params: dict = {
|
|
||||||
"page": "dapi",
|
|
||||||
"s": "tag",
|
|
||||||
"q": "index",
|
|
||||||
"json": "1",
|
|
||||||
"names": " ".join(chunk),
|
|
||||||
"limit": len(chunk),
|
|
||||||
}
|
|
||||||
if self._client.api_key and self._client.api_user:
|
|
||||||
key = self._client.api_key.strip().lstrip("&")
|
|
||||||
user = self._client.api_user.strip().lstrip("&")
|
|
||||||
if key and not key.startswith("api_key="):
|
|
||||||
params["api_key"] = key
|
|
||||||
if user and not user.startswith("user_id="):
|
|
||||||
params["user_id"] = user
|
|
||||||
return params
|
|
||||||
|
|
||||||
async def fetch_via_tag_api(self, posts: list["Post"]) -> int:
|
async def fetch_via_tag_api(self, posts: list["Post"]) -> int:
|
||||||
"""Batch-fetch tag types via the booru's tag DAPI.
|
"""Batch-fetch tag types via the booru's tag DAPI.
|
||||||
|
|
||||||
@ -269,7 +237,21 @@ class CategoryFetcher:
|
|||||||
BATCH = 500
|
BATCH = 500
|
||||||
for i in range(0, len(missing), BATCH):
|
for i in range(0, len(missing), BATCH):
|
||||||
chunk = missing[i:i + BATCH]
|
chunk = missing[i:i + BATCH]
|
||||||
params = self._build_tag_api_params(chunk)
|
params: dict = {
|
||||||
|
"page": "dapi",
|
||||||
|
"s": "tag",
|
||||||
|
"q": "index",
|
||||||
|
"json": "1",
|
||||||
|
"names": " ".join(chunk),
|
||||||
|
"limit": len(chunk),
|
||||||
|
}
|
||||||
|
if self._client.api_key and self._client.api_user:
|
||||||
|
key = self._client.api_key.strip().lstrip("&")
|
||||||
|
user = self._client.api_user.strip().lstrip("&")
|
||||||
|
if key and not key.startswith("api_key="):
|
||||||
|
params["api_key"] = key
|
||||||
|
if user and not user.startswith("user_id="):
|
||||||
|
params["user_id"] = user
|
||||||
try:
|
try:
|
||||||
resp = await self._client._request("GET", tag_api_url, params=params)
|
resp = await self._client._request("GET", tag_api_url, params=params)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
@ -308,12 +290,7 @@ class CategoryFetcher:
|
|||||||
log.warning("Category HTML fetch for #%d failed: %s: %s",
|
log.warning("Category HTML fetch for #%d failed: %s: %s",
|
||||||
post.id, type(e).__name__, e)
|
post.id, type(e).__name__, e)
|
||||||
return False
|
return False
|
||||||
# Cap the HTML the regex walks over (audit #14). Truncation
|
cats, labels = _parse_post_html(resp.text)
|
||||||
# vs. full read: the body is already buffered by httpx, so
|
|
||||||
# this doesn't prevent a memory hit — but it does cap the
|
|
||||||
# CPU spent in _TAG_ELEMENT_RE.finditer for a hostile server
|
|
||||||
# returning hundreds of MB of HTML.
|
|
||||||
cats, labels = _parse_post_html(resp.text[:_FETCH_POST_HTML_CAP])
|
|
||||||
if not cats:
|
if not cats:
|
||||||
return False
|
return False
|
||||||
post.tag_categories = _canonical_order(cats)
|
post.tag_categories = _canonical_order(cats)
|
||||||
@ -357,41 +334,29 @@ class CategoryFetcher:
|
|||||||
async def _do_ensure(self, post: "Post") -> None:
|
async def _do_ensure(self, post: "Post") -> None:
|
||||||
"""Inner dispatch for ensure_categories.
|
"""Inner dispatch for ensure_categories.
|
||||||
|
|
||||||
Dispatch:
|
Tries the batch API when it's known to work (True) OR not yet
|
||||||
- ``_batch_api_works is True``: call ``fetch_via_tag_api``
|
probed (None). The result doubles as an inline probe: if the
|
||||||
directly. If it populates categories we're done; a
|
batch produced categories, it works (save True); if it
|
||||||
transient failure leaves them empty and we fall through
|
returned nothing useful, it's broken (save False). Falls
|
||||||
to the HTML scrape.
|
through to HTML scrape as the universal fallback.
|
||||||
- ``_batch_api_works is None``: route through
|
|
||||||
``_probe_batch_api``, which only flips the flag to
|
|
||||||
True/False on a clean HTTP response. Transient errors
|
|
||||||
leave it ``None`` so the next call retries the probe.
|
|
||||||
Previously this path called ``fetch_via_tag_api`` and
|
|
||||||
inferred the result from empty ``tag_categories`` — but
|
|
||||||
``fetch_via_tag_api`` swallows per-chunk failures with
|
|
||||||
``continue``, so a mid-call network drop poisoned
|
|
||||||
``_batch_api_works = False`` for the site permanently.
|
|
||||||
- ``_batch_api_works is False`` or unavailable: straight
|
|
||||||
to HTML scrape.
|
|
||||||
"""
|
"""
|
||||||
if self._batch_api_works is True and self._batch_api_available():
|
if self._batch_api_works is not False and self._batch_api_available():
|
||||||
try:
|
try:
|
||||||
await self.fetch_via_tag_api([post])
|
await self.fetch_via_tag_api([post])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.debug("Batch API ensure failed (transient): %s", e)
|
log.debug("Batch API ensure failed (transient): %s", e)
|
||||||
|
# Leave _batch_api_works at None → retry next call
|
||||||
|
else:
|
||||||
if post.tag_categories:
|
if post.tag_categories:
|
||||||
|
if self._batch_api_works is None:
|
||||||
|
self._batch_api_works = True
|
||||||
|
self._save_probe_result(True)
|
||||||
return
|
return
|
||||||
elif self._batch_api_works is None and self._batch_api_available():
|
# Batch returned nothing → broken API (Rule34) or
|
||||||
try:
|
# the specific post has only unknown tags (very rare).
|
||||||
result = await self._probe_batch_api([post])
|
if self._batch_api_works is None:
|
||||||
except Exception as e:
|
self._batch_api_works = False
|
||||||
log.info("Batch API probe error (will retry next call): %s: %s",
|
self._save_probe_result(False)
|
||||||
type(e).__name__, e)
|
|
||||||
result = None
|
|
||||||
if result is True:
|
|
||||||
# Probe succeeded — results cached and post composed.
|
|
||||||
return
|
|
||||||
# result is False (broken API) or None (transient) — fall through
|
|
||||||
# HTML scrape fallback (works on Rule34/Safebooru.org/Moebooru,
|
# HTML scrape fallback (works on Rule34/Safebooru.org/Moebooru,
|
||||||
# returns empty on Gelbooru proper which is fine because the
|
# returns empty on Gelbooru proper which is fine because the
|
||||||
# batch path above covers Gelbooru)
|
# batch path above covers Gelbooru)
|
||||||
@ -503,7 +468,21 @@ class CategoryFetcher:
|
|||||||
|
|
||||||
# Send one batch request
|
# Send one batch request
|
||||||
chunk = missing[:500]
|
chunk = missing[:500]
|
||||||
params = self._build_tag_api_params(chunk)
|
params: dict = {
|
||||||
|
"page": "dapi",
|
||||||
|
"s": "tag",
|
||||||
|
"q": "index",
|
||||||
|
"json": "1",
|
||||||
|
"names": " ".join(chunk),
|
||||||
|
"limit": len(chunk),
|
||||||
|
}
|
||||||
|
if self._client.api_key and self._client.api_user:
|
||||||
|
key = self._client.api_key.strip().lstrip("&")
|
||||||
|
user = self._client.api_user.strip().lstrip("&")
|
||||||
|
if key and not key.startswith("api_key="):
|
||||||
|
params["api_key"] = key
|
||||||
|
if user and not user.startswith("user_id="):
|
||||||
|
params["user_id"] = user
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = await self._client._request("GET", tag_api_url, params=params)
|
resp = await self._client._request("GET", tag_api_url, params=params)
|
||||||
@ -602,9 +581,6 @@ def _parse_tag_response(resp) -> list[tuple[str, int]]:
|
|||||||
return []
|
return []
|
||||||
out: list[tuple[str, int]] = []
|
out: list[tuple[str, int]] = []
|
||||||
if body.startswith("<"):
|
if body.startswith("<"):
|
||||||
if "<!DOCTYPE" in body or "<!ENTITY" in body:
|
|
||||||
log.warning("XML response contains DOCTYPE/ENTITY, skipping")
|
|
||||||
return []
|
|
||||||
try:
|
try:
|
||||||
root = ET.fromstring(body)
|
root = ET.fromstring(body)
|
||||||
except ET.ParseError as e:
|
except ET.ParseError as e:
|
||||||
|
|||||||
@ -5,7 +5,6 @@ from __future__ import annotations
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..config import DEFAULT_PAGE_SIZE
|
from ..config import DEFAULT_PAGE_SIZE
|
||||||
from ._safety import redact_params
|
|
||||||
from .base import BooruClient, Post, _parse_date
|
from .base import BooruClient, Post, _parse_date
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
@ -24,7 +23,7 @@ class DanbooruClient(BooruClient):
|
|||||||
|
|
||||||
url = f"{self.base_url}/posts.json"
|
url = f"{self.base_url}/posts.json"
|
||||||
log.info(f"GET {url}")
|
log.info(f"GET {url}")
|
||||||
log.debug(f" params: {redact_params(params)}")
|
log.debug(f" params: {params}")
|
||||||
resp = await self._request("GET", url, params=params)
|
resp = await self._request("GET", url, params=params)
|
||||||
log.info(f" -> {resp.status_code}")
|
log.info(f" -> {resp.status_code}")
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
|
|||||||
@ -4,7 +4,9 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..http import make_client
|
import httpx
|
||||||
|
|
||||||
|
from ..config import USER_AGENT
|
||||||
from .danbooru import DanbooruClient
|
from .danbooru import DanbooruClient
|
||||||
from .gelbooru import GelbooruClient
|
from .gelbooru import GelbooruClient
|
||||||
from .moebooru import MoebooruClient
|
from .moebooru import MoebooruClient
|
||||||
@ -26,12 +28,16 @@ async def detect_site_type(
|
|||||||
url = url.rstrip("/")
|
url = url.rstrip("/")
|
||||||
|
|
||||||
from .base import BooruClient as _BC
|
from .base import BooruClient as _BC
|
||||||
# Reuse shared client for site detection. Event hooks mirror
|
# Reuse shared client for site detection
|
||||||
# BooruClient.client so detection requests get the same SSRF
|
|
||||||
# validation and connection logging as regular API calls.
|
|
||||||
if _BC._shared_client is None or _BC._shared_client.is_closed:
|
if _BC._shared_client is None or _BC._shared_client.is_closed:
|
||||||
_BC._shared_client = make_client(extra_request_hooks=[_BC._log_request])
|
_BC._shared_client = httpx.AsyncClient(
|
||||||
|
headers={"User-Agent": USER_AGENT},
|
||||||
|
follow_redirects=True,
|
||||||
|
timeout=20.0,
|
||||||
|
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||||
|
)
|
||||||
client = _BC._shared_client
|
client = _BC._shared_client
|
||||||
|
if True: # keep indent level
|
||||||
# Try Danbooru / e621 first — /posts.json is a definitive endpoint
|
# Try Danbooru / e621 first — /posts.json is a definitive endpoint
|
||||||
try:
|
try:
|
||||||
params: dict = {"limit": 1}
|
params: dict = {"limit": 1}
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import threading
|
|||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from ..config import DEFAULT_PAGE_SIZE, USER_AGENT
|
from ..config import DEFAULT_PAGE_SIZE, USER_AGENT
|
||||||
from ._safety import redact_params, validate_public_request
|
|
||||||
from .base import BooruClient, Post, _parse_date
|
from .base import BooruClient, Post, _parse_date
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
@ -48,12 +47,6 @@ class E621Client(BooruClient):
|
|||||||
headers={"User-Agent": ua},
|
headers={"User-Agent": ua},
|
||||||
follow_redirects=True,
|
follow_redirects=True,
|
||||||
timeout=20.0,
|
timeout=20.0,
|
||||||
event_hooks={
|
|
||||||
"request": [
|
|
||||||
validate_public_request,
|
|
||||||
BooruClient._log_request,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||||
)
|
)
|
||||||
E621Client._e621_client = c
|
E621Client._e621_client = c
|
||||||
@ -84,7 +77,7 @@ class E621Client(BooruClient):
|
|||||||
|
|
||||||
url = f"{self.base_url}/posts.json"
|
url = f"{self.base_url}/posts.json"
|
||||||
log.info(f"GET {url}")
|
log.info(f"GET {url}")
|
||||||
log.debug(f" params: {redact_params(params)}")
|
log.debug(f" params: {params}")
|
||||||
resp = await self._request("GET", url, params=params)
|
resp = await self._request("GET", url, params=params)
|
||||||
log.info(f" -> {resp.status_code}")
|
log.info(f" -> {resp.status_code}")
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
@ -92,7 +85,7 @@ class E621Client(BooruClient):
|
|||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
try:
|
try:
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
except ValueError as e:
|
except Exception as e:
|
||||||
log.warning("e621 search JSON parse failed: %s: %s — body: %s",
|
log.warning("e621 search JSON parse failed: %s: %s — body: %s",
|
||||||
type(e).__name__, e, resp.text[:200])
|
type(e).__name__, e, resp.text[:200])
|
||||||
return []
|
return []
|
||||||
|
|||||||
@ -5,7 +5,6 @@ from __future__ import annotations
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..config import DEFAULT_PAGE_SIZE
|
from ..config import DEFAULT_PAGE_SIZE
|
||||||
from ._safety import redact_params
|
|
||||||
from .base import BooruClient, Post, _parse_date
|
from .base import BooruClient, Post, _parse_date
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
@ -44,7 +43,7 @@ class GelbooruClient(BooruClient):
|
|||||||
|
|
||||||
url = f"{self.base_url}/index.php"
|
url = f"{self.base_url}/index.php"
|
||||||
log.info(f"GET {url}")
|
log.info(f"GET {url}")
|
||||||
log.debug(f" params: {redact_params(params)}")
|
log.debug(f" params: {params}")
|
||||||
resp = await self._request("GET", url, params=params)
|
resp = await self._request("GET", url, params=params)
|
||||||
log.info(f" -> {resp.status_code}")
|
log.info(f" -> {resp.status_code}")
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
|
|||||||
@ -28,7 +28,7 @@ class MoebooruClient(BooruClient):
|
|||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
try:
|
try:
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
except ValueError as e:
|
except Exception as e:
|
||||||
log.warning("Moebooru search JSON parse failed: %s: %s — body: %s",
|
log.warning("Moebooru search JSON parse failed: %s: %s — body: %s",
|
||||||
type(e).__name__, e, resp.text[:200])
|
type(e).__name__, e, resp.text[:200])
|
||||||
return []
|
return []
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import os
|
|||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict, defaultdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
@ -17,7 +17,7 @@ from urllib.parse import urlparse
|
|||||||
import httpx
|
import httpx
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from .config import cache_dir, thumbnails_dir
|
from .config import cache_dir, thumbnails_dir, USER_AGENT
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
|
|
||||||
@ -33,8 +33,10 @@ MAX_DOWNLOAD_BYTES = 500 * 1024 * 1024 # 500 MB
|
|||||||
# regression risk of the streaming rewrite is zero.
|
# regression risk of the streaming rewrite is zero.
|
||||||
STREAM_TO_DISK_THRESHOLD = 50 * 1024 * 1024 # 50 MB
|
STREAM_TO_DISK_THRESHOLD = 50 * 1024 * 1024 # 50 MB
|
||||||
|
|
||||||
# PIL's MAX_IMAGE_PIXELS cap is set in core/__init__.py so any
|
# Cap PIL's auto-DOS guard at 256M pixels (~1 GB raw). Default warns
|
||||||
# `booru_viewer.core.*` import installs it first — see audit #8.
|
# silently above ~89M; we want a hard fail so DecompressionBombError
|
||||||
|
# can be caught and treated as a download failure.
|
||||||
|
Image.MAX_IMAGE_PIXELS = 256 * 1024 * 1024
|
||||||
|
|
||||||
# Defends `_convert_ugoira_to_gif` against zip bombs. A real ugoira is
|
# Defends `_convert_ugoira_to_gif` against zip bombs. A real ugoira is
|
||||||
# typically <500 frames at 1080p; these caps comfortably allow legit
|
# typically <500 frames at 1080p; these caps comfortably allow legit
|
||||||
@ -77,14 +79,18 @@ def _get_shared_client(referer: str = "") -> httpx.AsyncClient:
|
|||||||
c = _shared_client
|
c = _shared_client
|
||||||
if c is not None and not c.is_closed:
|
if c is not None and not c.is_closed:
|
||||||
return c
|
return c
|
||||||
# Lazy import: core.http imports from core.api._safety, which
|
|
||||||
# lives inside the api package that imports this module, so a
|
|
||||||
# top-level import would circular through cache.py's load.
|
|
||||||
from .http import make_client
|
|
||||||
with _shared_client_lock:
|
with _shared_client_lock:
|
||||||
c = _shared_client
|
c = _shared_client
|
||||||
if c is None or c.is_closed:
|
if c is None or c.is_closed:
|
||||||
c = make_client(timeout=60.0, accept="image/*,video/*,*/*")
|
c = httpx.AsyncClient(
|
||||||
|
headers={
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
"Accept": "image/*,video/*,*/*",
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
timeout=60.0,
|
||||||
|
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||||
|
)
|
||||||
_shared_client = c
|
_shared_client = c
|
||||||
return c
|
return c
|
||||||
|
|
||||||
@ -113,33 +119,6 @@ _IMAGE_MAGIC = {
|
|||||||
b'PK\x03\x04': True, # ZIP (ugoira)
|
b'PK\x03\x04': True, # ZIP (ugoira)
|
||||||
}
|
}
|
||||||
|
|
||||||
# Header size used by both _looks_like_media (in-memory bytes) and the
|
|
||||||
# in-stream early validator in _do_download. 16 bytes covers JPEG (3),
|
|
||||||
# PNG (8), GIF (6), WebP (12), MP4/MOV (8), WebM/MKV (4), and ZIP (4)
|
|
||||||
# magics with comfortable margin.
|
|
||||||
_MEDIA_HEADER_MIN = 16
|
|
||||||
|
|
||||||
|
|
||||||
def _looks_like_media(header: bytes) -> bool:
|
|
||||||
"""Return True if the leading bytes match a known media magic.
|
|
||||||
|
|
||||||
Conservative on the empty case: an empty header is "unknown",
|
|
||||||
not "valid", because the streaming validator (audit #10) calls us
|
|
||||||
before any bytes have arrived means the server returned nothing
|
|
||||||
useful. The on-disk validator wraps this with an OSError fallback
|
|
||||||
that returns True instead — see _is_valid_media.
|
|
||||||
"""
|
|
||||||
if not header:
|
|
||||||
return False
|
|
||||||
if header.startswith(b'<') or header.startswith(b'<!'):
|
|
||||||
return False
|
|
||||||
for magic in _IMAGE_MAGIC:
|
|
||||||
if header.startswith(magic):
|
|
||||||
return True
|
|
||||||
# Not a known magic and not HTML: treat as ok (some boorus serve
|
|
||||||
# exotic-but-legal containers we don't enumerate above).
|
|
||||||
return b'<html' not in header.lower() and b'<!doctype' not in header.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def _is_valid_media(path: Path) -> bool:
|
def _is_valid_media(path: Path) -> bool:
|
||||||
"""Check if a file looks like actual media, not an HTML error page.
|
"""Check if a file looks like actual media, not an HTML error page.
|
||||||
@ -151,11 +130,18 @@ def _is_valid_media(path: Path) -> bool:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
header = f.read(_MEDIA_HEADER_MIN)
|
header = f.read(16)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.warning("Cannot read %s for validation (%s); treating as valid", path, e)
|
log.warning("Cannot read %s for validation (%s); treating as valid", path, e)
|
||||||
return True
|
return True
|
||||||
return _looks_like_media(header)
|
if not header or header.startswith(b'<') or header.startswith(b'<!'):
|
||||||
|
return False
|
||||||
|
# Check for known magic bytes
|
||||||
|
for magic in _IMAGE_MAGIC:
|
||||||
|
if header.startswith(magic):
|
||||||
|
return True
|
||||||
|
# If not a known type but not HTML, assume it's ok
|
||||||
|
return b'<html' not in header.lower() and b'<!doctype' not in header.lower()
|
||||||
|
|
||||||
|
|
||||||
def _ext_from_url(url: str) -> str:
|
def _ext_from_url(url: str) -> str:
|
||||||
@ -285,59 +271,7 @@ def _referer_for(parsed) -> str:
|
|||||||
# does the actual download; the other waits and reads the cached file.
|
# does the actual download; the other waits and reads the cached file.
|
||||||
# Loop-bound, but the existing module is already loop-bound, so this
|
# Loop-bound, but the existing module is already loop-bound, so this
|
||||||
# doesn't make anything worse and is fixed cleanly in PR2.
|
# doesn't make anything worse and is fixed cleanly in PR2.
|
||||||
#
|
_url_locks: dict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
|
||||||
# Capped at _URL_LOCKS_MAX entries (audit finding #5). The previous
|
|
||||||
# defaultdict grew unbounded over a long browsing session, and an
|
|
||||||
# adversarial booru returning cache-buster query strings could turn
|
|
||||||
# the leak into an OOM DoS.
|
|
||||||
_URL_LOCKS_MAX = 4096
|
|
||||||
_url_locks: "OrderedDict[str, asyncio.Lock]" = OrderedDict()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_url_lock(h: str) -> asyncio.Lock:
|
|
||||||
"""Return the asyncio.Lock for URL hash *h*, creating it if needed.
|
|
||||||
|
|
||||||
Touches LRU order on every call so frequently-accessed hashes
|
|
||||||
survive eviction. The first call for a new hash inserts it and
|
|
||||||
triggers _evict_url_locks() to trim back toward the cap.
|
|
||||||
"""
|
|
||||||
lock = _url_locks.get(h)
|
|
||||||
if lock is None:
|
|
||||||
lock = asyncio.Lock()
|
|
||||||
_url_locks[h] = lock
|
|
||||||
_evict_url_locks(skip=h)
|
|
||||||
else:
|
|
||||||
_url_locks.move_to_end(h)
|
|
||||||
return lock
|
|
||||||
|
|
||||||
|
|
||||||
def _evict_url_locks(skip: str) -> None:
|
|
||||||
"""Trim _url_locks back toward _URL_LOCKS_MAX, oldest first.
|
|
||||||
|
|
||||||
Each pass skips:
|
|
||||||
- the hash *skip* we just inserted (it's the youngest — evicting
|
|
||||||
it immediately would be self-defeating), and
|
|
||||||
- any entry whose lock is currently held (we can't drop a lock
|
|
||||||
that a coroutine is mid-`async with` on without that coroutine
|
|
||||||
blowing up on exit).
|
|
||||||
|
|
||||||
Stops as soon as one pass finds no evictable entries — that
|
|
||||||
handles the edge case where every remaining entry is either
|
|
||||||
*skip* or currently held. In that state the cap is temporarily
|
|
||||||
exceeded; the next insertion will retry eviction.
|
|
||||||
"""
|
|
||||||
while len(_url_locks) > _URL_LOCKS_MAX:
|
|
||||||
evicted = False
|
|
||||||
for old_h in list(_url_locks.keys()):
|
|
||||||
if old_h == skip:
|
|
||||||
continue
|
|
||||||
if _url_locks[old_h].locked():
|
|
||||||
continue
|
|
||||||
_url_locks.pop(old_h, None)
|
|
||||||
evicted = True
|
|
||||||
break
|
|
||||||
if not evicted:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
async def download_image(
|
async def download_image(
|
||||||
@ -354,7 +288,7 @@ async def download_image(
|
|||||||
filename = _url_hash(url) + _ext_from_url(url)
|
filename = _url_hash(url) + _ext_from_url(url)
|
||||||
local = dest_dir / filename
|
local = dest_dir / filename
|
||||||
|
|
||||||
async with _get_url_lock(_url_hash(url)):
|
async with _url_locks[_url_hash(url)]:
|
||||||
# Check if a ugoira zip was already converted to gif
|
# Check if a ugoira zip was already converted to gif
|
||||||
if local.suffix.lower() == ".zip":
|
if local.suffix.lower() == ".zip":
|
||||||
gif_path = local.with_suffix(".gif")
|
gif_path = local.with_suffix(".gif")
|
||||||
@ -440,30 +374,7 @@ async def _do_download(
|
|||||||
f"Download too large: {total} bytes (cap {MAX_DOWNLOAD_BYTES})"
|
f"Download too large: {total} bytes (cap {MAX_DOWNLOAD_BYTES})"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Audit #10: accumulate the leading bytes (≥16) before
|
if total >= STREAM_TO_DISK_THRESHOLD:
|
||||||
# committing to writing the rest. A hostile server that omits
|
|
||||||
# Content-Type and ignores the HTML check could otherwise
|
|
||||||
# stream up to MAX_DOWNLOAD_BYTES of garbage to disk before
|
|
||||||
# the post-download _is_valid_media check rejects and deletes
|
|
||||||
# it. We accumulate across chunks because slow servers (or
|
|
||||||
# chunked encoding with tiny chunks) can deliver fewer than
|
|
||||||
# 16 bytes in the first chunk and validation would false-fail.
|
|
||||||
use_large = total >= STREAM_TO_DISK_THRESHOLD
|
|
||||||
chunk_iter = resp.aiter_bytes(64 * 1024 if use_large else 8192)
|
|
||||||
|
|
||||||
header_buf = bytearray()
|
|
||||||
async for chunk in chunk_iter:
|
|
||||||
header_buf.extend(chunk)
|
|
||||||
if len(header_buf) >= _MEDIA_HEADER_MIN:
|
|
||||||
break
|
|
||||||
if len(header_buf) > MAX_DOWNLOAD_BYTES:
|
|
||||||
raise ValueError(
|
|
||||||
f"Download exceeded cap mid-stream: {len(header_buf)} bytes"
|
|
||||||
)
|
|
||||||
if not _looks_like_media(bytes(header_buf)):
|
|
||||||
raise ValueError("Downloaded data is not valid media")
|
|
||||||
|
|
||||||
if use_large:
|
|
||||||
# Large download: stream to tempfile in the same dir, atomic replace.
|
# Large download: stream to tempfile in the same dir, atomic replace.
|
||||||
local.parent.mkdir(parents=True, exist_ok=True)
|
local.parent.mkdir(parents=True, exist_ok=True)
|
||||||
fd, tmp_name = tempfile.mkstemp(
|
fd, tmp_name = tempfile.mkstemp(
|
||||||
@ -471,12 +382,9 @@ async def _do_download(
|
|||||||
)
|
)
|
||||||
tmp_path = Path(tmp_name)
|
tmp_path = Path(tmp_name)
|
||||||
try:
|
try:
|
||||||
downloaded = len(header_buf)
|
downloaded = 0
|
||||||
with os.fdopen(fd, "wb") as out:
|
with os.fdopen(fd, "wb") as out:
|
||||||
out.write(header_buf)
|
async for chunk in resp.aiter_bytes(64 * 1024):
|
||||||
if progress_callback:
|
|
||||||
progress_callback(downloaded, total)
|
|
||||||
async for chunk in chunk_iter:
|
|
||||||
out.write(chunk)
|
out.write(chunk)
|
||||||
downloaded += len(chunk)
|
downloaded += len(chunk)
|
||||||
if downloaded > MAX_DOWNLOAD_BYTES:
|
if downloaded > MAX_DOWNLOAD_BYTES:
|
||||||
@ -487,8 +395,6 @@ async def _do_download(
|
|||||||
progress_callback(downloaded, total)
|
progress_callback(downloaded, total)
|
||||||
os.replace(tmp_path, local)
|
os.replace(tmp_path, local)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# BaseException on purpose: also clean up the .part file on
|
|
||||||
# Ctrl-C / task cancellation, not just on Exception.
|
|
||||||
try:
|
try:
|
||||||
tmp_path.unlink(missing_ok=True)
|
tmp_path.unlink(missing_ok=True)
|
||||||
except OSError:
|
except OSError:
|
||||||
@ -496,11 +402,9 @@ async def _do_download(
|
|||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
# Small/unknown size: buffer in memory, write whole.
|
# Small/unknown size: buffer in memory, write whole.
|
||||||
chunks: list[bytes] = [bytes(header_buf)]
|
chunks: list[bytes] = []
|
||||||
downloaded = len(header_buf)
|
downloaded = 0
|
||||||
if progress_callback:
|
async for chunk in resp.aiter_bytes(8192):
|
||||||
progress_callback(downloaded, total)
|
|
||||||
async for chunk in chunk_iter:
|
|
||||||
chunks.append(chunk)
|
chunks.append(chunk)
|
||||||
downloaded += len(chunk)
|
downloaded += len(chunk)
|
||||||
if downloaded > MAX_DOWNLOAD_BYTES:
|
if downloaded > MAX_DOWNLOAD_BYTES:
|
||||||
@ -592,36 +496,23 @@ def cache_file_count(include_thumbnails: bool = True) -> tuple[int, int]:
|
|||||||
return images, thumbs
|
return images, thumbs
|
||||||
|
|
||||||
|
|
||||||
def evict_oldest(max_bytes: int, protected_paths: set[str] | None = None,
|
def evict_oldest(max_bytes: int, protected_paths: set[str] | None = None) -> int:
|
||||||
current_bytes: int | None = None) -> int:
|
"""Delete oldest non-protected cached images until under max_bytes. Returns count deleted."""
|
||||||
"""Delete oldest non-protected cached images until under max_bytes. Returns count deleted.
|
|
||||||
|
|
||||||
*current_bytes* avoids a redundant directory scan when the caller
|
|
||||||
already measured the cache size.
|
|
||||||
"""
|
|
||||||
protected = protected_paths or set()
|
protected = protected_paths or set()
|
||||||
# Single directory walk: collect (path, stat) pairs, sort by mtime,
|
files = sorted(cache_dir().iterdir(), key=lambda f: f.stat().st_mtime)
|
||||||
# and sum sizes — avoids the previous pattern of iterdir() for the
|
|
||||||
# sort + a second full iterdir()+stat() inside cache_size_bytes().
|
|
||||||
entries = []
|
|
||||||
total = 0
|
|
||||||
for f in cache_dir().iterdir():
|
|
||||||
if not f.is_file():
|
|
||||||
continue
|
|
||||||
st = f.stat()
|
|
||||||
entries.append((f, st))
|
|
||||||
total += st.st_size
|
|
||||||
current = current_bytes if current_bytes is not None else total
|
|
||||||
entries.sort(key=lambda e: e[1].st_mtime)
|
|
||||||
deleted = 0
|
deleted = 0
|
||||||
for f, st in entries:
|
current = cache_size_bytes(include_thumbnails=False)
|
||||||
|
|
||||||
|
for f in files:
|
||||||
if current <= max_bytes:
|
if current <= max_bytes:
|
||||||
break
|
break
|
||||||
if str(f) in protected or f.suffix == ".part":
|
if not f.is_file() or str(f) in protected or f.suffix == ".part":
|
||||||
continue
|
continue
|
||||||
|
size = f.stat().st_size
|
||||||
f.unlink()
|
f.unlink()
|
||||||
current -= st.st_size
|
current -= size
|
||||||
deleted += 1
|
deleted += 1
|
||||||
|
|
||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
|
|
||||||
@ -630,23 +521,17 @@ def evict_oldest_thumbnails(max_bytes: int) -> int:
|
|||||||
td = thumbnails_dir()
|
td = thumbnails_dir()
|
||||||
if not td.exists():
|
if not td.exists():
|
||||||
return 0
|
return 0
|
||||||
entries = []
|
files = sorted(td.iterdir(), key=lambda f: f.stat().st_mtime)
|
||||||
current = 0
|
|
||||||
for f in td.iterdir():
|
|
||||||
if not f.is_file():
|
|
||||||
continue
|
|
||||||
st = f.stat()
|
|
||||||
entries.append((f, st))
|
|
||||||
current += st.st_size
|
|
||||||
if current <= max_bytes:
|
|
||||||
return 0
|
|
||||||
entries.sort(key=lambda e: e[1].st_mtime)
|
|
||||||
deleted = 0
|
deleted = 0
|
||||||
for f, st in entries:
|
current = sum(f.stat().st_size for f in td.iterdir() if f.is_file())
|
||||||
|
for f in files:
|
||||||
if current <= max_bytes:
|
if current <= max_bytes:
|
||||||
break
|
break
|
||||||
|
if not f.is_file():
|
||||||
|
continue
|
||||||
|
size = f.stat().st_size
|
||||||
f.unlink()
|
f.unlink()
|
||||||
current -= st.st_size
|
current -= size
|
||||||
deleted += 1
|
deleted += 1
|
||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
|
|||||||
@ -15,18 +15,6 @@ if TYPE_CHECKING:
|
|||||||
APPNAME = "booru-viewer"
|
APPNAME = "booru-viewer"
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
|
|
||||||
# Windows reserved device names (audit finding #7). Filenames whose stem
|
|
||||||
# (before the first dot) lower-cases to one of these are illegal on
|
|
||||||
# Windows because the OS routes opens of `con.jpg` to the CON device.
|
|
||||||
# Checked by render_filename_template() unconditionally so a library
|
|
||||||
# saved on Linux can still be copied to a Windows machine without
|
|
||||||
# breaking on these stems.
|
|
||||||
_WINDOWS_RESERVED_NAMES = frozenset({
|
|
||||||
"con", "prn", "aux", "nul",
|
|
||||||
*{f"com{i}" for i in range(1, 10)},
|
|
||||||
*{f"lpt{i}" for i in range(1, 10)},
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def hypr_rules_enabled() -> bool:
|
def hypr_rules_enabled() -> bool:
|
||||||
"""Whether the in-code hyprctl dispatches that change window state
|
"""Whether the in-code hyprctl dispatches that change window state
|
||||||
@ -56,15 +44,7 @@ def popout_aspect_lock_enabled() -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def data_dir() -> Path:
|
def data_dir() -> Path:
|
||||||
"""Return the platform-appropriate data/cache directory.
|
"""Return the platform-appropriate data/cache directory."""
|
||||||
|
|
||||||
On POSIX, the directory is chmod'd to 0o700 after creation so the
|
|
||||||
SQLite DB inside (and the api_key/api_user columns it stores) are
|
|
||||||
not exposed to other local users on shared workstations or
|
|
||||||
networked home dirs with permissive umasks. On Windows the chmod
|
|
||||||
is a no-op — NTFS ACLs handle access control separately and the
|
|
||||||
OS already restricts AppData\\Roaming\\<app> to the owning user.
|
|
||||||
"""
|
|
||||||
if IS_WINDOWS:
|
if IS_WINDOWS:
|
||||||
base = Path.home() / "AppData" / "Roaming"
|
base = Path.home() / "AppData" / "Roaming"
|
||||||
else:
|
else:
|
||||||
@ -75,13 +55,6 @@ def data_dir() -> Path:
|
|||||||
)
|
)
|
||||||
path = base / APPNAME
|
path = base / APPNAME
|
||||||
path.mkdir(parents=True, exist_ok=True)
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
if not IS_WINDOWS:
|
|
||||||
try:
|
|
||||||
os.chmod(path, 0o700)
|
|
||||||
except OSError:
|
|
||||||
# Filesystem may not support chmod (e.g. some FUSE mounts).
|
|
||||||
# Better to keep working than refuse to start.
|
|
||||||
pass
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
@ -303,16 +276,6 @@ def render_filename_template(template: str, post: "Post", ext: str) -> str:
|
|||||||
if len(rendered) > 200:
|
if len(rendered) > 200:
|
||||||
rendered = rendered[:200].rstrip("._ ")
|
rendered = rendered[:200].rstrip("._ ")
|
||||||
|
|
||||||
# Reject Windows reserved device names (audit finding #7). On Windows,
|
|
||||||
# opening `con.jpg` or `prn.png` for writing redirects to the device,
|
|
||||||
# so a tag value of `con` from a hostile booru would silently break
|
|
||||||
# save. Prefix with `_` to break the device-name match while keeping
|
|
||||||
# the user's intended name visible.
|
|
||||||
if rendered:
|
|
||||||
stem_lower = rendered.split(".", 1)[0].lower()
|
|
||||||
if stem_lower in _WINDOWS_RESERVED_NAMES:
|
|
||||||
rendered = "_" + rendered
|
|
||||||
|
|
||||||
if not rendered:
|
if not rendered:
|
||||||
return f"{post.id}{ext}"
|
return f"{post.id}{ext}"
|
||||||
|
|
||||||
|
|||||||
@ -11,7 +11,7 @@ from dataclasses import dataclass, field
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .config import IS_WINDOWS, db_path
|
from .config import db_path
|
||||||
|
|
||||||
|
|
||||||
def _validate_folder_name(name: str) -> str:
|
def _validate_folder_name(name: str) -> str:
|
||||||
@ -185,6 +185,10 @@ class Bookmark:
|
|||||||
tag_categories: dict = field(default_factory=dict)
|
tag_categories: dict = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
# Back-compat alias — will be removed in a future version.
|
||||||
|
Favorite = Bookmark
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
def __init__(self, path: Path | None = None) -> None:
|
def __init__(self, path: Path | None = None) -> None:
|
||||||
self._path = path or db_path()
|
self._path = path or db_path()
|
||||||
@ -206,30 +210,8 @@ class Database:
|
|||||||
self._conn.execute("PRAGMA foreign_keys=ON")
|
self._conn.execute("PRAGMA foreign_keys=ON")
|
||||||
self._conn.executescript(_SCHEMA)
|
self._conn.executescript(_SCHEMA)
|
||||||
self._migrate()
|
self._migrate()
|
||||||
self._restrict_perms()
|
|
||||||
return self._conn
|
return self._conn
|
||||||
|
|
||||||
def _restrict_perms(self) -> None:
|
|
||||||
"""Tighten the DB file (and WAL/SHM sidecars) to 0o600 on POSIX.
|
|
||||||
|
|
||||||
The sites table stores api_key + api_user in plaintext, so the
|
|
||||||
file must not be readable by other local users. Sidecars only
|
|
||||||
exist after the first WAL checkpoint, so we tolerate
|
|
||||||
FileNotFoundError. Windows: NTFS ACLs handle this; chmod is a
|
|
||||||
no-op there. Filesystem-level chmod failures are swallowed —
|
|
||||||
better to keep working than refuse to start.
|
|
||||||
"""
|
|
||||||
if IS_WINDOWS:
|
|
||||||
return
|
|
||||||
for suffix in ("", "-wal", "-shm"):
|
|
||||||
target = Path(str(self._path) + suffix) if suffix else self._path
|
|
||||||
try:
|
|
||||||
os.chmod(target, 0o600)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _write(self):
|
def _write(self):
|
||||||
"""Context manager for write methods.
|
"""Context manager for write methods.
|
||||||
@ -763,14 +745,9 @@ class Database:
|
|||||||
|
|
||||||
def search_library_meta(self, query: str) -> set[int]:
|
def search_library_meta(self, query: str) -> set[int]:
|
||||||
"""Search library metadata by tags. Returns matching post IDs."""
|
"""Search library metadata by tags. Returns matching post IDs."""
|
||||||
escaped = (
|
|
||||||
query.replace("\\", "\\\\")
|
|
||||||
.replace("%", "\\%")
|
|
||||||
.replace("_", "\\_")
|
|
||||||
)
|
|
||||||
rows = self.conn.execute(
|
rows = self.conn.execute(
|
||||||
"SELECT post_id FROM library_meta WHERE tags LIKE ? ESCAPE '\\'",
|
"SELECT post_id FROM library_meta WHERE tags LIKE ?",
|
||||||
(f"%{escaped}%",),
|
(f"%{query}%",),
|
||||||
).fetchall()
|
).fetchall()
|
||||||
return {r["post_id"] for r in rows}
|
return {r["post_id"] for r in rows}
|
||||||
|
|
||||||
|
|||||||
@ -1,73 +0,0 @@
|
|||||||
"""Shared httpx.AsyncClient constructor.
|
|
||||||
|
|
||||||
Three call sites build near-identical clients: the cache module's
|
|
||||||
download pool, ``BooruClient``'s shared API pool, and
|
|
||||||
``detect.detect_site_type``'s reach into that same pool. Centralising
|
|
||||||
the construction in one place means a future change (new SSRF hook,
|
|
||||||
new connection limit, different default UA) doesn't have to be made
|
|
||||||
three times and kept in sync.
|
|
||||||
|
|
||||||
The module does NOT manage the singletons themselves — each call site
|
|
||||||
keeps its own ``_shared_client`` and its own lock, so the cache
|
|
||||||
pool's long-lived large transfers don't compete with short JSON
|
|
||||||
requests from the API layer. ``make_client`` is a pure constructor.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Callable, Iterable
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from .config import USER_AGENT
|
|
||||||
from .api._safety import validate_public_request
|
|
||||||
|
|
||||||
|
|
||||||
# Connection pool limits are identical across all three call sites.
|
|
||||||
# Keeping the default here centralises any future tuning.
|
|
||||||
_DEFAULT_LIMITS = httpx.Limits(max_connections=10, max_keepalive_connections=5)
|
|
||||||
|
|
||||||
|
|
||||||
def make_client(
|
|
||||||
*,
|
|
||||||
timeout: float = 20.0,
|
|
||||||
accept: str | None = None,
|
|
||||||
extra_request_hooks: Iterable[Callable] | None = None,
|
|
||||||
) -> httpx.AsyncClient:
|
|
||||||
"""Return a fresh ``httpx.AsyncClient`` with the project's defaults.
|
|
||||||
|
|
||||||
Defaults applied unconditionally:
|
|
||||||
- ``User-Agent`` header from ``core.config.USER_AGENT``
|
|
||||||
- ``follow_redirects=True``
|
|
||||||
- ``validate_public_request`` SSRF hook (always first on the
|
|
||||||
request-hook chain; extras run after it)
|
|
||||||
- Connection limits: 10 max, 5 keepalive
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
timeout: per-request timeout in seconds. Cache downloads pass
|
|
||||||
60s for large videos; the API pool uses 20s.
|
|
||||||
accept: optional ``Accept`` header value. The cache pool sets
|
|
||||||
``image/*,video/*,*/*``; the API pool leaves it unset so
|
|
||||||
httpx's ``*/*`` default takes effect.
|
|
||||||
extra_request_hooks: optional extra callables to run after
|
|
||||||
``validate_public_request``. The API clients pass their
|
|
||||||
connection-logging hook here; detect passes the same.
|
|
||||||
|
|
||||||
Call sites are responsible for their own singleton caching —
|
|
||||||
``make_client`` always returns a fresh instance.
|
|
||||||
"""
|
|
||||||
headers: dict[str, str] = {"User-Agent": USER_AGENT}
|
|
||||||
if accept is not None:
|
|
||||||
headers["Accept"] = accept
|
|
||||||
|
|
||||||
hooks: list[Callable] = [validate_public_request]
|
|
||||||
if extra_request_hooks:
|
|
||||||
hooks.extend(extra_request_hooks)
|
|
||||||
|
|
||||||
return httpx.AsyncClient(
|
|
||||||
headers=headers,
|
|
||||||
follow_redirects=True,
|
|
||||||
timeout=timeout,
|
|
||||||
event_hooks={"request": hooks},
|
|
||||||
limits=_DEFAULT_LIMITS,
|
|
||||||
)
|
|
||||||
31
booru_viewer/core/images.py
Normal file
31
booru_viewer/core/images.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Image thumbnailing and format helpers."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
from .config import DEFAULT_THUMBNAIL_SIZE, thumbnails_dir
|
||||||
|
|
||||||
|
|
||||||
|
def make_thumbnail(
|
||||||
|
source: Path,
|
||||||
|
size: tuple[int, int] = DEFAULT_THUMBNAIL_SIZE,
|
||||||
|
dest: Path | None = None,
|
||||||
|
) -> Path:
|
||||||
|
"""Create a thumbnail, returning its path. Returns existing if already made."""
|
||||||
|
dest = dest or thumbnails_dir() / f"thumb_{source.stem}_{size[0]}x{size[1]}.jpg"
|
||||||
|
if dest.exists():
|
||||||
|
return dest
|
||||||
|
with Image.open(source) as img:
|
||||||
|
img.thumbnail(size, Image.Resampling.LANCZOS)
|
||||||
|
if img.mode in ("RGBA", "P"):
|
||||||
|
img = img.convert("RGB")
|
||||||
|
img.save(dest, "JPEG", quality=85)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
|
||||||
|
def image_dimensions(path: Path) -> tuple[int, int]:
|
||||||
|
with Image.open(path) as img:
|
||||||
|
return img.size
|
||||||
@ -24,7 +24,6 @@ from .db import Database
|
|||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .api.base import Post
|
from .api.base import Post
|
||||||
from .api.category_fetcher import CategoryFetcher
|
|
||||||
|
|
||||||
|
|
||||||
_CATEGORY_TOKENS = {"%artist%", "%character%", "%copyright%", "%general%", "%meta%", "%species%"}
|
_CATEGORY_TOKENS = {"%artist%", "%character%", "%copyright%", "%general%", "%meta%", "%species%"}
|
||||||
@ -37,8 +36,7 @@ async def save_post_file(
|
|||||||
db: Database,
|
db: Database,
|
||||||
in_flight: set[str] | None = None,
|
in_flight: set[str] | None = None,
|
||||||
explicit_name: str | None = None,
|
explicit_name: str | None = None,
|
||||||
*,
|
category_fetcher=None,
|
||||||
category_fetcher: "CategoryFetcher | None",
|
|
||||||
) -> Path:
|
) -> Path:
|
||||||
"""Copy a Post's already-cached media file into `dest_dir`.
|
"""Copy a Post's already-cached media file into `dest_dir`.
|
||||||
|
|
||||||
@ -91,13 +89,6 @@ async def save_post_file(
|
|||||||
explicit_name: optional override. When set, the template is
|
explicit_name: optional override. When set, the template is
|
||||||
bypassed and this basename (already including extension)
|
bypassed and this basename (already including extension)
|
||||||
is used as the starting point for collision resolution.
|
is used as the starting point for collision resolution.
|
||||||
category_fetcher: keyword-only, required. The CategoryFetcher
|
|
||||||
for the post's site, or None when the site categorises tags
|
|
||||||
inline (Danbooru, e621) so ``post.tag_categories`` is always
|
|
||||||
pre-populated. Pass ``None`` explicitly rather than omitting
|
|
||||||
the argument — the ``=None`` default was removed so saves
|
|
||||||
can't silently render templates with empty category tokens
|
|
||||||
just because a caller forgot to plumb the fetcher through.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
The actual `Path` the file landed at after collision
|
The actual `Path` the file landed at after collision
|
||||||
|
|||||||
@ -1,34 +0,0 @@
|
|||||||
"""Pure helper for the info-panel Source line.
|
|
||||||
|
|
||||||
Lives in its own module so the helper can be unit-tested from CI
|
|
||||||
without pulling in PySide6. ``info_panel.py`` imports it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from html import escape
|
|
||||||
|
|
||||||
|
|
||||||
def build_source_html(source: str | None) -> str:
|
|
||||||
"""Build the rich-text fragment for the Source line in the info panel.
|
|
||||||
|
|
||||||
The fragment is inserted into a QLabel set to RichText format with
|
|
||||||
setOpenExternalLinks(True) — that means QTextBrowser parses any HTML
|
|
||||||
in *source* as markup. Without escaping, a hostile booru can break
|
|
||||||
out of the href attribute, inject ``<img>`` tracking pixels, or make
|
|
||||||
the visible text disagree with the click target.
|
|
||||||
|
|
||||||
The href is only emitted for an http(s) URL; everything else is
|
|
||||||
rendered as escaped plain text. Both the href value and the visible
|
|
||||||
display text are HTML-escaped (audit finding #6).
|
|
||||||
"""
|
|
||||||
if not source:
|
|
||||||
return "none"
|
|
||||||
# Truncate display text but keep the full URL for the link target.
|
|
||||||
display = source if len(source) <= 60 else source[:57] + "..."
|
|
||||||
if source.startswith(("http://", "https://")):
|
|
||||||
return (
|
|
||||||
f'<a href="{escape(source, quote=True)}" '
|
|
||||||
f'style="color: #4fc3f7;">{escape(display)}</a>'
|
|
||||||
)
|
|
||||||
return escape(display)
|
|
||||||
@ -148,15 +148,6 @@ QWidget#_slideshow_controls QLabel {
|
|||||||
background: transparent;
|
background: transparent;
|
||||||
color: white;
|
color: white;
|
||||||
}
|
}
|
||||||
/* Hide the standard icon column on every QMessageBox (question mark,
|
|
||||||
* warning triangle, info circle) so confirm dialogs are text-only. */
|
|
||||||
QMessageBox QLabel#qt_msgboxex_icon_label {
|
|
||||||
image: none;
|
|
||||||
max-width: 0px;
|
|
||||||
max-height: 0px;
|
|
||||||
margin: 0px;
|
|
||||||
padding: 0px;
|
|
||||||
}
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@ -306,37 +297,9 @@ def run() -> None:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warning(f"Operation failed: {e}")
|
log.warning(f"Operation failed: {e}")
|
||||||
else:
|
else:
|
||||||
# No custom.qss — force Fusion widgets so distro pyside6 builds linked
|
# No custom.qss — still install the popout overlay defaults so the
|
||||||
# against system Qt don't pick up Breeze (or whatever the platform
|
# floating toolbar/controls have a sane background instead of bare
|
||||||
# theme plugin supplies) and diverge from the bundled-Qt look that
|
# letterbox color.
|
||||||
# source-from-pip users get.
|
|
||||||
app.setStyle("Fusion")
|
|
||||||
# If no system theme is detected, apply a dark Fusion palette so
|
|
||||||
# fresh installs don't land on blinding white. KDE/GNOME users
|
|
||||||
# keep their palette (dark or light) — we only intervene when
|
|
||||||
# Qt is running on its built-in defaults with no Trolltech.conf.
|
|
||||||
from PySide6.QtGui import QPalette, QColor
|
|
||||||
pal = app.palette()
|
|
||||||
_has_system_theme = Path("~/.config/Trolltech.conf").expanduser().exists()
|
|
||||||
if not _has_system_theme and pal.color(QPalette.ColorRole.Window).lightness() > 128:
|
|
||||||
dark = QPalette()
|
|
||||||
dark.setColor(QPalette.ColorRole.Window, QColor("#2b2b2b"))
|
|
||||||
dark.setColor(QPalette.ColorRole.WindowText, QColor("#d4d4d4"))
|
|
||||||
dark.setColor(QPalette.ColorRole.Base, QColor("#232323"))
|
|
||||||
dark.setColor(QPalette.ColorRole.AlternateBase, QColor("#2b2b2b"))
|
|
||||||
dark.setColor(QPalette.ColorRole.Text, QColor("#d4d4d4"))
|
|
||||||
dark.setColor(QPalette.ColorRole.Button, QColor("#353535"))
|
|
||||||
dark.setColor(QPalette.ColorRole.ButtonText, QColor("#d4d4d4"))
|
|
||||||
dark.setColor(QPalette.ColorRole.BrightText, QColor("#ff4444"))
|
|
||||||
dark.setColor(QPalette.ColorRole.Highlight, QColor("#3daee9"))
|
|
||||||
dark.setColor(QPalette.ColorRole.HighlightedText, QColor("#1e1e1e"))
|
|
||||||
dark.setColor(QPalette.ColorRole.ToolTipBase, QColor("#353535"))
|
|
||||||
dark.setColor(QPalette.ColorRole.ToolTipText, QColor("#d4d4d4"))
|
|
||||||
dark.setColor(QPalette.ColorRole.PlaceholderText, QColor("#7a7a7a"))
|
|
||||||
dark.setColor(QPalette.ColorRole.Link, QColor("#3daee9"))
|
|
||||||
app.setPalette(dark)
|
|
||||||
# Install the popout overlay defaults so the floating toolbar/controls
|
|
||||||
# have a sane background instead of bare letterbox color.
|
|
||||||
app.setStyleSheet(_BASE_POPOUT_OVERLAY_QSS)
|
app.setStyleSheet(_BASE_POPOUT_OVERLAY_QSS)
|
||||||
|
|
||||||
# Set app icon (works in taskbar on all platforms)
|
# Set app icon (works in taskbar on all platforms)
|
||||||
|
|||||||
@ -4,7 +4,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, TYPE_CHECKING
|
|
||||||
|
|
||||||
from PySide6.QtCore import Qt, Signal, QObject, QTimer
|
from PySide6.QtCore import Qt, Signal, QObject, QTimer
|
||||||
from PySide6.QtGui import QPixmap
|
from PySide6.QtGui import QPixmap
|
||||||
@ -28,15 +27,11 @@ from ..core.cache import download_thumbnail
|
|||||||
from ..core.concurrency import run_on_app_loop
|
from ..core.concurrency import run_on_app_loop
|
||||||
from .grid import ThumbnailGrid
|
from .grid import ThumbnailGrid
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ..core.api.category_fetcher import CategoryFetcher
|
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
|
|
||||||
|
|
||||||
class BookmarkThumbSignals(QObject):
|
class BookmarkThumbSignals(QObject):
|
||||||
thumb_ready = Signal(int, str)
|
thumb_ready = Signal(int, str)
|
||||||
save_done = Signal(int) # post_id
|
|
||||||
|
|
||||||
|
|
||||||
class BookmarksView(QWidget):
|
class BookmarksView(QWidget):
|
||||||
@ -47,23 +42,12 @@ class BookmarksView(QWidget):
|
|||||||
bookmarks_changed = Signal() # emitted after bookmark add/remove/unsave
|
bookmarks_changed = Signal() # emitted after bookmark add/remove/unsave
|
||||||
open_in_browser_requested = Signal(int, int) # (site_id, post_id)
|
open_in_browser_requested = Signal(int, int) # (site_id, post_id)
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, db: Database, parent: QWidget | None = None) -> None:
|
||||||
self,
|
|
||||||
db: Database,
|
|
||||||
category_fetcher_factory: Callable[[], "CategoryFetcher | None"],
|
|
||||||
parent: QWidget | None = None,
|
|
||||||
) -> None:
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self._db = db
|
self._db = db
|
||||||
# Factory returns the fetcher for the currently-active site, or
|
|
||||||
# None when the site categorises tags inline (Danbooru, e621).
|
|
||||||
# Called at save time so a site switch between BookmarksView
|
|
||||||
# construction and a save picks up the new site's fetcher.
|
|
||||||
self._category_fetcher_factory = category_fetcher_factory
|
|
||||||
self._bookmarks: list[Bookmark] = []
|
self._bookmarks: list[Bookmark] = []
|
||||||
self._signals = BookmarkThumbSignals()
|
self._signals = BookmarkThumbSignals()
|
||||||
self._signals.thumb_ready.connect(self._on_thumb_ready, Qt.ConnectionType.QueuedConnection)
|
self._signals.thumb_ready.connect(self._on_thumb_ready, Qt.ConnectionType.QueuedConnection)
|
||||||
self._signals.save_done.connect(self._on_save_done, Qt.ConnectionType.QueuedConnection)
|
|
||||||
|
|
||||||
layout = QVBoxLayout(self)
|
layout = QVBoxLayout(self)
|
||||||
layout.setContentsMargins(0, 0, 0, 0)
|
layout.setContentsMargins(0, 0, 0, 0)
|
||||||
@ -229,7 +213,7 @@ class BookmarksView(QWidget):
|
|||||||
elif fav.cached_path and Path(fav.cached_path).exists():
|
elif fav.cached_path and Path(fav.cached_path).exists():
|
||||||
pix = QPixmap(fav.cached_path)
|
pix = QPixmap(fav.cached_path)
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
thumb.set_pixmap(pix, fav.cached_path)
|
thumb.set_pixmap(pix)
|
||||||
|
|
||||||
def _load_thumb_async(self, index: int, url: str) -> None:
|
def _load_thumb_async(self, index: int, url: str) -> None:
|
||||||
# Schedule the download on the persistent event loop instead of
|
# Schedule the download on the persistent event loop instead of
|
||||||
@ -250,14 +234,7 @@ class BookmarksView(QWidget):
|
|||||||
if 0 <= index < len(thumbs):
|
if 0 <= index < len(thumbs):
|
||||||
pix = QPixmap(path)
|
pix = QPixmap(path)
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
thumbs[index].set_pixmap(pix, path)
|
thumbs[index].set_pixmap(pix)
|
||||||
|
|
||||||
def _on_save_done(self, post_id: int) -> None:
|
|
||||||
"""Light the saved-locally dot on the thumbnail for post_id."""
|
|
||||||
for i, fav in enumerate(self._bookmarks):
|
|
||||||
if fav.post_id == post_id and i < len(self._grid._thumbs):
|
|
||||||
self._grid._thumbs[i].set_saved_locally(True)
|
|
||||||
break
|
|
||||||
|
|
||||||
def _do_search(self) -> None:
|
def _do_search(self) -> None:
|
||||||
text = self._search_input.text().strip()
|
text = self._search_input.text().strip()
|
||||||
@ -310,15 +287,9 @@ class BookmarksView(QWidget):
|
|||||||
src = Path(fav.cached_path)
|
src = Path(fav.cached_path)
|
||||||
post = self._bookmark_to_post(fav)
|
post = self._bookmark_to_post(fav)
|
||||||
|
|
||||||
fetcher = self._category_fetcher_factory()
|
|
||||||
|
|
||||||
async def _do():
|
async def _do():
|
||||||
try:
|
try:
|
||||||
await save_post_file(
|
await save_post_file(src, post, dest_dir, self._db)
|
||||||
src, post, dest_dir, self._db,
|
|
||||||
category_fetcher=fetcher,
|
|
||||||
)
|
|
||||||
self._signals.save_done.emit(fav.post_id)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warning(f"Bookmark→library save #{fav.post_id} failed: {e}")
|
log.warning(f"Bookmark→library save #{fav.post_id} failed: {e}")
|
||||||
|
|
||||||
@ -358,25 +329,25 @@ class BookmarksView(QWidget):
|
|||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
save_as = menu.addAction("Save As...")
|
save_as = menu.addAction("Save As...")
|
||||||
|
|
||||||
# Save to Library / Unsave — mutually exclusive based on
|
# Save to Library submenu — folders come from the library
|
||||||
# whether the post is already in the library.
|
# filesystem, not the bookmark folder DB.
|
||||||
from ..core.config import library_folders
|
from ..core.config import library_folders
|
||||||
save_lib_menu = None
|
|
||||||
save_lib_unsorted = None
|
|
||||||
save_lib_new = None
|
|
||||||
save_lib_folders = {}
|
|
||||||
unsave_lib = None
|
|
||||||
if self._db.is_post_in_library(fav.post_id):
|
|
||||||
unsave_lib = menu.addAction("Unsave from Library")
|
|
||||||
else:
|
|
||||||
save_lib_menu = menu.addMenu("Save to Library")
|
save_lib_menu = menu.addMenu("Save to Library")
|
||||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
|
save_lib_folders = {}
|
||||||
for folder in library_folders():
|
for folder in library_folders():
|
||||||
a = save_lib_menu.addAction(folder)
|
a = save_lib_menu.addAction(folder)
|
||||||
save_lib_folders[id(a)] = folder
|
save_lib_folders[id(a)] = folder
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
|
unsave_lib = None
|
||||||
|
# Only show unsave if the post is actually saved. is_post_in_library
|
||||||
|
# is the format-agnostic DB check — works for digit-stem and
|
||||||
|
# templated filenames alike.
|
||||||
|
if self._db.is_post_in_library(fav.post_id):
|
||||||
|
unsave_lib = menu.addAction("Unsave from Library")
|
||||||
copy_file = menu.addAction("Copy File to Clipboard")
|
copy_file = menu.addAction("Copy File to Clipboard")
|
||||||
copy_url = menu.addAction("Copy Image URL")
|
copy_url = menu.addAction("Copy Image URL")
|
||||||
copy_tags = menu.addAction("Copy Tags")
|
copy_tags = menu.addAction("Copy Tags")
|
||||||
@ -402,9 +373,13 @@ class BookmarksView(QWidget):
|
|||||||
|
|
||||||
if action == save_lib_unsorted:
|
if action == save_lib_unsorted:
|
||||||
self._copy_to_library_unsorted(fav)
|
self._copy_to_library_unsorted(fav)
|
||||||
|
self.refresh()
|
||||||
elif action == save_lib_new:
|
elif action == save_lib_new:
|
||||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||||
if ok and name.strip():
|
if ok and name.strip():
|
||||||
|
# Validate the name via saved_folder_dir() which mkdir's
|
||||||
|
# the library subdir and runs the path-traversal check.
|
||||||
|
# No DB folder write — bookmark folders are independent.
|
||||||
try:
|
try:
|
||||||
from ..core.config import saved_folder_dir
|
from ..core.config import saved_folder_dir
|
||||||
saved_folder_dir(name.strip())
|
saved_folder_dir(name.strip())
|
||||||
@ -412,9 +387,11 @@ class BookmarksView(QWidget):
|
|||||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||||
return
|
return
|
||||||
self._copy_to_library(fav, name.strip())
|
self._copy_to_library(fav, name.strip())
|
||||||
|
self.refresh()
|
||||||
elif id(action) in save_lib_folders:
|
elif id(action) in save_lib_folders:
|
||||||
folder_name = save_lib_folders[id(action)]
|
folder_name = save_lib_folders[id(action)]
|
||||||
self._copy_to_library(fav, folder_name)
|
self._copy_to_library(fav, folder_name)
|
||||||
|
self.refresh()
|
||||||
elif action == open_browser:
|
elif action == open_browser:
|
||||||
self.open_in_browser_requested.emit(fav.site_id, fav.post_id)
|
self.open_in_browser_requested.emit(fav.site_id, fav.post_id)
|
||||||
elif action == open_default:
|
elif action == open_default:
|
||||||
@ -431,14 +408,12 @@ class BookmarksView(QWidget):
|
|||||||
dest = save_file(self, "Save Image", default_name, f"Images (*{src.suffix})")
|
dest = save_file(self, "Save Image", default_name, f"Images (*{src.suffix})")
|
||||||
if dest:
|
if dest:
|
||||||
dest_path = Path(dest)
|
dest_path = Path(dest)
|
||||||
fetcher = self._category_fetcher_factory()
|
|
||||||
|
|
||||||
async def _do_save_as():
|
async def _do_save_as():
|
||||||
try:
|
try:
|
||||||
await save_post_file(
|
await save_post_file(
|
||||||
src, post, dest_path.parent, self._db,
|
src, post, dest_path.parent, self._db,
|
||||||
explicit_name=dest_path.name,
|
explicit_name=dest_path.name,
|
||||||
category_fetcher=fetcher,
|
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warning(f"Bookmark Save As #{fav.post_id} failed: {e}")
|
log.warning(f"Bookmark Save As #{fav.post_id} failed: {e}")
|
||||||
@ -446,11 +421,12 @@ class BookmarksView(QWidget):
|
|||||||
run_on_app_loop(_do_save_as())
|
run_on_app_loop(_do_save_as())
|
||||||
elif action == unsave_lib:
|
elif action == unsave_lib:
|
||||||
from ..core.cache import delete_from_library
|
from ..core.cache import delete_from_library
|
||||||
|
# Pass db so templated filenames are matched and the meta
|
||||||
|
# row gets cleaned up. Refresh on success OR on a meta-only
|
||||||
|
# cleanup (orphan row, no on-disk file) — either way the
|
||||||
|
# saved-dot indicator state has changed.
|
||||||
delete_from_library(fav.post_id, db=self._db)
|
delete_from_library(fav.post_id, db=self._db)
|
||||||
for i, f in enumerate(self._bookmarks):
|
self.refresh()
|
||||||
if f.post_id == fav.post_id and i < len(self._grid._thumbs):
|
|
||||||
self._grid._thumbs[i].set_saved_locally(False)
|
|
||||||
break
|
|
||||||
self.bookmarks_changed.emit()
|
self.bookmarks_changed.emit()
|
||||||
elif action == copy_file:
|
elif action == copy_file:
|
||||||
path = fav.cached_path
|
path = fav.cached_path
|
||||||
@ -501,24 +477,20 @@ class BookmarksView(QWidget):
|
|||||||
|
|
||||||
menu = QMenu(self)
|
menu = QMenu(self)
|
||||||
|
|
||||||
any_unsaved = any(not self._db.is_post_in_library(f.post_id) for f in favs)
|
# Save All to Library submenu — folders are filesystem-truth.
|
||||||
any_saved = any(self._db.is_post_in_library(f.post_id) for f in favs)
|
# Conversion from a flat action to a submenu so the user can
|
||||||
|
# pick a destination instead of having "save all" silently use
|
||||||
save_lib_menu = None
|
# each bookmark's fav.folder (which was the cross-bleed bug).
|
||||||
save_lib_unsorted = None
|
|
||||||
save_lib_new = None
|
|
||||||
save_lib_folder_actions: dict[int, str] = {}
|
|
||||||
unsave_all = None
|
|
||||||
if any_unsaved:
|
|
||||||
save_lib_menu = menu.addMenu(f"Save All ({len(favs)}) to Library")
|
save_lib_menu = menu.addMenu(f"Save All ({len(favs)}) to Library")
|
||||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
|
save_lib_folder_actions: dict[int, str] = {}
|
||||||
for folder in library_folders():
|
for folder in library_folders():
|
||||||
a = save_lib_menu.addAction(folder)
|
a = save_lib_menu.addAction(folder)
|
||||||
save_lib_folder_actions[id(a)] = folder
|
save_lib_folder_actions[id(a)] = folder
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||||
if any_saved:
|
|
||||||
unsave_all = menu.addAction(f"Unsave All ({len(favs)}) from Library")
|
unsave_all = menu.addAction(f"Unsave All ({len(favs)}) from Library")
|
||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
|
|
||||||
@ -544,6 +516,7 @@ class BookmarksView(QWidget):
|
|||||||
self._copy_to_library(fav, folder_name)
|
self._copy_to_library(fav, folder_name)
|
||||||
else:
|
else:
|
||||||
self._copy_to_library_unsorted(fav)
|
self._copy_to_library_unsorted(fav)
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
if action == save_lib_unsorted:
|
if action == save_lib_unsorted:
|
||||||
_save_all_into(None)
|
_save_all_into(None)
|
||||||
@ -561,13 +534,9 @@ class BookmarksView(QWidget):
|
|||||||
_save_all_into(save_lib_folder_actions[id(action)])
|
_save_all_into(save_lib_folder_actions[id(action)])
|
||||||
elif action == unsave_all:
|
elif action == unsave_all:
|
||||||
from ..core.cache import delete_from_library
|
from ..core.cache import delete_from_library
|
||||||
unsaved_ids = set()
|
|
||||||
for fav in favs:
|
for fav in favs:
|
||||||
delete_from_library(fav.post_id, db=self._db)
|
delete_from_library(fav.post_id, db=self._db)
|
||||||
unsaved_ids.add(fav.post_id)
|
self.refresh()
|
||||||
for i, fav in enumerate(self._bookmarks):
|
|
||||||
if fav.post_id in unsaved_ids and i < len(self._grid._thumbs):
|
|
||||||
self._grid._thumbs[i].set_saved_locally(False)
|
|
||||||
self.bookmarks_changed.emit()
|
self.bookmarks_changed.emit()
|
||||||
elif action == move_none:
|
elif action == move_none:
|
||||||
for fav in favs:
|
for fav in favs:
|
||||||
|
|||||||
@ -37,22 +37,19 @@ class ContextMenuHandler:
|
|||||||
save_as = menu.addAction("Save As...")
|
save_as = menu.addAction("Save As...")
|
||||||
|
|
||||||
from ..core.config import library_folders
|
from ..core.config import library_folders
|
||||||
save_lib_menu = None
|
|
||||||
save_lib_unsorted = None
|
|
||||||
save_lib_new = None
|
|
||||||
save_lib_folders = {}
|
|
||||||
unsave_lib = None
|
|
||||||
if self._app._post_actions.is_post_saved(post.id):
|
|
||||||
unsave_lib = menu.addAction("Unsave from Library")
|
|
||||||
else:
|
|
||||||
save_lib_menu = menu.addMenu("Save to Library")
|
save_lib_menu = menu.addMenu("Save to Library")
|
||||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
|
save_lib_folders = {}
|
||||||
for folder in library_folders():
|
for folder in library_folders():
|
||||||
a = save_lib_menu.addAction(folder)
|
a = save_lib_menu.addAction(folder)
|
||||||
save_lib_folders[id(a)] = folder
|
save_lib_folders[id(a)] = folder
|
||||||
save_lib_menu.addSeparator()
|
save_lib_menu.addSeparator()
|
||||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
|
unsave_lib = None
|
||||||
|
if self._app._post_actions.is_post_saved(post.id):
|
||||||
|
unsave_lib = menu.addAction("Unsave from Library")
|
||||||
copy_clipboard = menu.addAction("Copy File to Clipboard")
|
copy_clipboard = menu.addAction("Copy File to Clipboard")
|
||||||
copy_url = menu.addAction("Copy Image URL")
|
copy_url = menu.addAction("Copy Image URL")
|
||||||
copy_tags = menu.addAction("Copy Tags")
|
copy_tags = menu.addAction("Copy Tags")
|
||||||
@ -111,6 +108,7 @@ class ContextMenuHandler:
|
|||||||
elif id(action) in save_lib_folders:
|
elif id(action) in save_lib_folders:
|
||||||
self._app._post_actions.save_to_library(post, save_lib_folders[id(action)])
|
self._app._post_actions.save_to_library(post, save_lib_folders[id(action)])
|
||||||
elif action == unsave_lib:
|
elif action == unsave_lib:
|
||||||
|
self._app._preview._current_post = post
|
||||||
self._app._post_actions.unsave_from_preview()
|
self._app._post_actions.unsave_from_preview()
|
||||||
elif action == copy_clipboard:
|
elif action == copy_clipboard:
|
||||||
self._app._copy_file_to_clipboard()
|
self._app._copy_file_to_clipboard()
|
||||||
|
|||||||
@ -3,35 +3,25 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from PySide6.QtWidgets import QFileDialog, QWidget
|
from PySide6.QtWidgets import QFileDialog, QWidget
|
||||||
|
|
||||||
from ..core.config import IS_WINDOWS
|
from ..core.config import IS_WINDOWS
|
||||||
|
|
||||||
|
|
||||||
_gtk_cached: bool | None = None
|
|
||||||
|
|
||||||
def _use_gtk() -> bool:
|
def _use_gtk() -> bool:
|
||||||
global _gtk_cached
|
|
||||||
if IS_WINDOWS:
|
if IS_WINDOWS:
|
||||||
return False
|
return False
|
||||||
if _gtk_cached is not None:
|
|
||||||
return _gtk_cached
|
|
||||||
try:
|
try:
|
||||||
from ..core.db import Database
|
from ..core.db import Database
|
||||||
db = Database()
|
db = Database()
|
||||||
val = db.get_setting("file_dialog_platform")
|
val = db.get_setting("file_dialog_platform")
|
||||||
db.close()
|
db.close()
|
||||||
_gtk_cached = val == "gtk"
|
return val == "gtk"
|
||||||
except Exception:
|
except Exception:
|
||||||
_gtk_cached = False
|
return False
|
||||||
return _gtk_cached
|
|
||||||
|
|
||||||
|
|
||||||
def reset_gtk_cache() -> None:
|
|
||||||
"""Called after settings change so the next dialog picks up the new value."""
|
|
||||||
global _gtk_cached
|
|
||||||
_gtk_cached = None
|
|
||||||
|
|
||||||
|
|
||||||
def save_file(
|
def save_file(
|
||||||
|
|||||||
@ -3,17 +3,22 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
|
|
||||||
from PySide6.QtCore import Qt, Signal, QSize, QRect, QRectF, QMimeData, QUrl, QPoint, Property, QPropertyAnimation, QEasingCurve
|
from PySide6.QtCore import Qt, Signal, QSize, QRect, QRectF, QMimeData, QUrl, QPoint, Property, QPropertyAnimation, QEasingCurve
|
||||||
from PySide6.QtGui import QPixmap, QPainter, QColor, QPen, QKeyEvent, QWheelEvent, QDrag, QMouseEvent
|
from PySide6.QtGui import QPixmap, QPainter, QPainterPath, QColor, QPen, QKeyEvent, QWheelEvent, QDrag, QMouseEvent
|
||||||
from PySide6.QtWidgets import (
|
from PySide6.QtWidgets import (
|
||||||
QWidget,
|
QWidget,
|
||||||
QScrollArea,
|
QScrollArea,
|
||||||
|
QMenu,
|
||||||
|
QApplication,
|
||||||
QRubberBand,
|
QRubberBand,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..core.api.base import Post
|
||||||
|
|
||||||
THUMB_SIZE = 180
|
THUMB_SIZE = 180
|
||||||
THUMB_SPACING = 2
|
THUMB_SPACING = 2
|
||||||
BORDER_WIDTH = 2
|
BORDER_WIDTH = 2
|
||||||
@ -74,7 +79,6 @@ class ThumbnailWidget(QWidget):
|
|||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.index = index
|
self.index = index
|
||||||
self._pixmap: QPixmap | None = None
|
self._pixmap: QPixmap | None = None
|
||||||
self._source_path: str | None = None # on-disk path, for re-scaling on size change
|
|
||||||
self._selected = False
|
self._selected = False
|
||||||
self._multi_selected = False
|
self._multi_selected = False
|
||||||
self._bookmarked = False
|
self._bookmarked = False
|
||||||
@ -97,29 +101,19 @@ class ThumbnailWidget(QWidget):
|
|||||||
self.setFixedSize(THUMB_SIZE, THUMB_SIZE)
|
self.setFixedSize(THUMB_SIZE, THUMB_SIZE)
|
||||||
self.setMouseTracking(True)
|
self.setMouseTracking(True)
|
||||||
|
|
||||||
def set_pixmap(self, pixmap: QPixmap, path: str | None = None) -> None:
|
def set_pixmap(self, pixmap: QPixmap) -> None:
|
||||||
if path is not None:
|
|
||||||
self._source_path = path
|
|
||||||
self._pixmap = pixmap.scaled(
|
self._pixmap = pixmap.scaled(
|
||||||
THUMB_SIZE - 4, THUMB_SIZE - 4,
|
THUMB_SIZE - 4, THUMB_SIZE - 4,
|
||||||
Qt.AspectRatioMode.KeepAspectRatio,
|
Qt.AspectRatioMode.KeepAspectRatio,
|
||||||
Qt.TransformationMode.SmoothTransformation,
|
Qt.TransformationMode.SmoothTransformation,
|
||||||
)
|
)
|
||||||
self._thumb_opacity = 0.0
|
self._thumb_opacity = 0.0
|
||||||
anim = QPropertyAnimation(self, b"thumbOpacity")
|
self._fade_anim = QPropertyAnimation(self, b"thumbOpacity")
|
||||||
anim.setDuration(80)
|
self._fade_anim.setDuration(200)
|
||||||
anim.setStartValue(0.0)
|
self._fade_anim.setStartValue(0.0)
|
||||||
anim.setEndValue(1.0)
|
self._fade_anim.setEndValue(1.0)
|
||||||
anim.setEasingCurve(QEasingCurve.Type.OutCubic)
|
self._fade_anim.setEasingCurve(QEasingCurve.Type.OutCubic)
|
||||||
anim.finished.connect(lambda: self._on_fade_done(anim))
|
self._fade_anim.start()
|
||||||
self._fade_anim = anim
|
|
||||||
anim.start()
|
|
||||||
|
|
||||||
def _on_fade_done(self, anim: QPropertyAnimation) -> None:
|
|
||||||
"""Clear the reference then schedule deletion."""
|
|
||||||
if self._fade_anim is anim:
|
|
||||||
self._fade_anim = None
|
|
||||||
anim.deleteLater()
|
|
||||||
|
|
||||||
def set_selected(self, selected: bool) -> None:
|
def set_selected(self, selected: bool) -> None:
|
||||||
self._selected = selected
|
self._selected = selected
|
||||||
@ -152,6 +146,7 @@ class ThumbnailWidget(QWidget):
|
|||||||
# Defaults were seeded from the palette in __init__.
|
# Defaults were seeded from the palette in __init__.
|
||||||
highlight = self._selection_color
|
highlight = self._selection_color
|
||||||
base = pal.color(pal.ColorRole.Base)
|
base = pal.color(pal.ColorRole.Base)
|
||||||
|
mid = self._idle_color
|
||||||
window = pal.color(pal.ColorRole.Window)
|
window = pal.color(pal.ColorRole.Window)
|
||||||
|
|
||||||
# Fill entire cell with window color
|
# Fill entire cell with window color
|
||||||
@ -302,7 +297,7 @@ class ThumbnailWidget(QWidget):
|
|||||||
self.setCursor(Qt.CursorShape.PointingHandCursor if over else Qt.CursorShape.ArrowCursor)
|
self.setCursor(Qt.CursorShape.PointingHandCursor if over else Qt.CursorShape.ArrowCursor)
|
||||||
self.update()
|
self.update()
|
||||||
if (self._drag_start and self._cached_path
|
if (self._drag_start and self._cached_path
|
||||||
and (event.position().toPoint() - self._drag_start).manhattanLength() > 30):
|
and (event.position().toPoint() - self._drag_start).manhattanLength() > 10):
|
||||||
drag = QDrag(self)
|
drag = QDrag(self)
|
||||||
mime = QMimeData()
|
mime = QMimeData()
|
||||||
mime.setUrls([QUrl.fromLocalFile(self._cached_path)])
|
mime.setUrls([QUrl.fromLocalFile(self._cached_path)])
|
||||||
@ -340,11 +335,6 @@ class ThumbnailWidget(QWidget):
|
|||||||
grid.on_padding_click(self, pos)
|
grid.on_padding_click(self, pos)
|
||||||
event.accept()
|
event.accept()
|
||||||
return
|
return
|
||||||
# Pixmap click — clear any stale rubber band state from a
|
|
||||||
# previous interrupted drag before starting a new interaction.
|
|
||||||
grid = self._grid()
|
|
||||||
if grid:
|
|
||||||
grid._clear_stale_rubber_band()
|
|
||||||
self._drag_start = pos
|
self._drag_start = pos
|
||||||
self.clicked.emit(self.index, event)
|
self.clicked.emit(self.index, event)
|
||||||
elif event.button() == Qt.MouseButton.RightButton:
|
elif event.button() == Qt.MouseButton.RightButton:
|
||||||
@ -387,8 +377,6 @@ class FlowLayout(QWidget):
|
|||||||
|
|
||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
for w in self._items:
|
for w in self._items:
|
||||||
if hasattr(w, '_fade_anim') and w._fade_anim is not None:
|
|
||||||
w._fade_anim.stop()
|
|
||||||
w.setParent(None) # type: ignore
|
w.setParent(None) # type: ignore
|
||||||
w.deleteLater()
|
w.deleteLater()
|
||||||
self._items.clear()
|
self._items.clear()
|
||||||
@ -556,21 +544,6 @@ class ThumbnailGrid(QScrollArea):
|
|||||||
self._thumbs[self._selected_index].set_selected(False)
|
self._thumbs[self._selected_index].set_selected(False)
|
||||||
self._selected_index = -1
|
self._selected_index = -1
|
||||||
|
|
||||||
def _clear_stale_rubber_band(self) -> None:
|
|
||||||
"""Reset any leftover rubber band state before starting a new interaction.
|
|
||||||
|
|
||||||
Rubber band state can get stuck if a drag is interrupted without
|
|
||||||
a matching release event — Wayland focus steal, drag outside the
|
|
||||||
window, tab switch mid-drag, etc. Every new mouse press calls this
|
|
||||||
so the next interaction starts from a clean slate instead of
|
|
||||||
reusing a stale origin (which would make the rubber band "not
|
|
||||||
work" until the app is restarted).
|
|
||||||
"""
|
|
||||||
if self._rubber_band is not None:
|
|
||||||
self._rubber_band.hide()
|
|
||||||
self._rb_origin = None
|
|
||||||
self._rb_pending_origin = None
|
|
||||||
|
|
||||||
def _select(self, index: int) -> None:
|
def _select(self, index: int) -> None:
|
||||||
if index < 0 or index >= len(self._thumbs):
|
if index < 0 or index >= len(self._thumbs):
|
||||||
return
|
return
|
||||||
@ -644,14 +617,12 @@ class ThumbnailGrid(QScrollArea):
|
|||||||
|
|
||||||
def on_padding_click(self, thumb, local_pos) -> None:
|
def on_padding_click(self, thumb, local_pos) -> None:
|
||||||
"""Called directly by ThumbnailWidget when a click misses the pixmap."""
|
"""Called directly by ThumbnailWidget when a click misses the pixmap."""
|
||||||
self._clear_stale_rubber_band()
|
|
||||||
vp_pos = thumb.mapTo(self.viewport(), local_pos)
|
vp_pos = thumb.mapTo(self.viewport(), local_pos)
|
||||||
self._rb_pending_origin = vp_pos
|
self._rb_pending_origin = vp_pos
|
||||||
|
|
||||||
def mousePressEvent(self, event: QMouseEvent) -> None:
|
def mousePressEvent(self, event: QMouseEvent) -> None:
|
||||||
# Clicks on viewport/flow (gaps, space below thumbs) start rubber band
|
# Clicks on viewport/flow (gaps, space below thumbs) start rubber band
|
||||||
if event.button() == Qt.MouseButton.LeftButton:
|
if event.button() == Qt.MouseButton.LeftButton:
|
||||||
self._clear_stale_rubber_band()
|
|
||||||
child = self.childAt(event.position().toPoint())
|
child = self.childAt(event.position().toPoint())
|
||||||
if child is self.widget() or child is self.viewport():
|
if child is self.widget() or child is self.viewport():
|
||||||
self._rb_pending_origin = event.position().toPoint()
|
self._rb_pending_origin = event.position().toPoint()
|
||||||
@ -664,15 +635,11 @@ class ThumbnailGrid(QScrollArea):
|
|||||||
return
|
return
|
||||||
rb_rect = QRect(self._rb_origin, vp_pos).normalized()
|
rb_rect = QRect(self._rb_origin, vp_pos).normalized()
|
||||||
self._rubber_band.setGeometry(rb_rect)
|
self._rubber_band.setGeometry(rb_rect)
|
||||||
# rb_rect is in viewport coords; thumb.geometry() is in widget (content)
|
|
||||||
# coords. Convert rb_rect to widget coords for the intersection test —
|
|
||||||
# widget.mapFrom(viewport, (0,0)) gives the widget-coord of viewport's
|
|
||||||
# origin, which is exactly the translation needed when scrolled.
|
|
||||||
vp_offset = self.widget().mapFrom(self.viewport(), QPoint(0, 0))
|
vp_offset = self.widget().mapFrom(self.viewport(), QPoint(0, 0))
|
||||||
rb_widget = rb_rect.translated(vp_offset)
|
|
||||||
self._clear_multi()
|
self._clear_multi()
|
||||||
for i, thumb in enumerate(self._thumbs):
|
for i, thumb in enumerate(self._thumbs):
|
||||||
if rb_widget.intersects(thumb.geometry()):
|
thumb_rect = thumb.geometry().translated(vp_offset)
|
||||||
|
if rb_rect.intersects(thumb_rect):
|
||||||
self._multi_selected.add(i)
|
self._multi_selected.add(i)
|
||||||
thumb.set_multi_selected(True)
|
thumb.set_multi_selected(True)
|
||||||
|
|
||||||
@ -791,58 +758,6 @@ class ThumbnailGrid(QScrollArea):
|
|||||||
self.reached_bottom.emit()
|
self.reached_bottom.emit()
|
||||||
if value <= 0 and sb.maximum() > 0:
|
if value <= 0 and sb.maximum() > 0:
|
||||||
self.reached_top.emit()
|
self.reached_top.emit()
|
||||||
self._recycle_offscreen()
|
|
||||||
|
|
||||||
def _recycle_offscreen(self) -> None:
|
|
||||||
"""Release decoded pixmaps for thumbnails far from the viewport.
|
|
||||||
|
|
||||||
Thumbnails within the visible area plus a buffer zone keep their
|
|
||||||
pixmaps. Thumbnails outside that zone have their pixmap set to
|
|
||||||
None to free decoded-image memory. When they scroll back into
|
|
||||||
view, the pixmap is re-decoded from the on-disk thumbnail cache
|
|
||||||
via ``_source_path``.
|
|
||||||
|
|
||||||
This caps decoded-thumbnail memory to roughly (visible + buffer)
|
|
||||||
widgets instead of every widget ever created during infinite scroll.
|
|
||||||
"""
|
|
||||||
if not self._thumbs:
|
|
||||||
return
|
|
||||||
step = THUMB_SIZE + THUMB_SPACING
|
|
||||||
if step == 0:
|
|
||||||
return
|
|
||||||
cols = self._flow.columns
|
|
||||||
vp_top = self.verticalScrollBar().value()
|
|
||||||
vp_height = self.viewport().height()
|
|
||||||
|
|
||||||
# Row range that's visible (0-based row indices)
|
|
||||||
first_visible_row = max(0, (vp_top - THUMB_SPACING) // step)
|
|
||||||
last_visible_row = (vp_top + vp_height) // step
|
|
||||||
|
|
||||||
# Buffer: keep ±5 rows of decoded pixmaps beyond the viewport
|
|
||||||
buffer_rows = 5
|
|
||||||
keep_first = max(0, first_visible_row - buffer_rows)
|
|
||||||
keep_last = last_visible_row + buffer_rows
|
|
||||||
|
|
||||||
keep_start = keep_first * cols
|
|
||||||
keep_end = min(len(self._thumbs), (keep_last + 1) * cols)
|
|
||||||
|
|
||||||
for i, thumb in enumerate(self._thumbs):
|
|
||||||
if keep_start <= i < keep_end:
|
|
||||||
# Inside keep zone — restore if missing
|
|
||||||
if thumb._pixmap is None and thumb._source_path:
|
|
||||||
pix = QPixmap(thumb._source_path)
|
|
||||||
if not pix.isNull():
|
|
||||||
thumb._pixmap = pix.scaled(
|
|
||||||
THUMB_SIZE - 4, THUMB_SIZE - 4,
|
|
||||||
Qt.AspectRatioMode.KeepAspectRatio,
|
|
||||||
Qt.TransformationMode.SmoothTransformation,
|
|
||||||
)
|
|
||||||
thumb._thumb_opacity = 1.0
|
|
||||||
thumb.update()
|
|
||||||
else:
|
|
||||||
# Outside keep zone — release
|
|
||||||
if thumb._pixmap is not None:
|
|
||||||
thumb._pixmap = None
|
|
||||||
|
|
||||||
def _nav_horizontal(self, direction: int) -> None:
|
def _nav_horizontal(self, direction: int) -> None:
|
||||||
"""Move selection one cell left (-1) or right (+1); emit edge signals at boundaries."""
|
"""Move selection one cell left (-1) or right (+1); emit edge signals at boundaries."""
|
||||||
@ -868,10 +783,3 @@ class ThumbnailGrid(QScrollArea):
|
|||||||
super().resizeEvent(event)
|
super().resizeEvent(event)
|
||||||
if self._flow:
|
if self._flow:
|
||||||
self._flow.resize(self.viewport().size().width(), self._flow.minimumHeight())
|
self._flow.resize(self.viewport().size().width(), self._flow.minimumHeight())
|
||||||
# Column count can change on resize (splitter drag, tile/float
|
|
||||||
# toggle). Thumbs that were outside the keep zone had their
|
|
||||||
# pixmap freed by _recycle_offscreen and will paint as empty
|
|
||||||
# cells if the row shift moves them into view without a scroll
|
|
||||||
# event to refresh them. Re-run the recycle pass against the
|
|
||||||
# new geometry so newly-visible thumbs get their pixmap back.
|
|
||||||
self._recycle_offscreen()
|
|
||||||
|
|||||||
@ -3,7 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from html import escape
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from PySide6.QtCore import Qt, Property, Signal
|
from PySide6.QtCore import Qt, Property, Signal
|
||||||
@ -13,7 +12,6 @@ from PySide6.QtWidgets import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from ..core.api.base import Post
|
from ..core.api.base import Post
|
||||||
from ._source_html import build_source_html
|
|
||||||
|
|
||||||
log = logging.getLogger("booru")
|
log = logging.getLogger("booru")
|
||||||
|
|
||||||
@ -117,12 +115,28 @@ class InfoPanel(QWidget):
|
|||||||
log.debug(f"InfoPanel: tag_categories={list(post.tag_categories.keys()) if post.tag_categories else 'empty'}")
|
log.debug(f"InfoPanel: tag_categories={list(post.tag_categories.keys()) if post.tag_categories else 'empty'}")
|
||||||
self._title.setText(f"Post #{post.id}")
|
self._title.setText(f"Post #{post.id}")
|
||||||
filetype = Path(post.file_url.split("?")[0]).suffix.lstrip(".").upper() if post.file_url else "unknown"
|
filetype = Path(post.file_url.split("?")[0]).suffix.lstrip(".").upper() if post.file_url else "unknown"
|
||||||
source_html = build_source_html(post.source)
|
source = post.source or "none"
|
||||||
|
# Truncate display text but keep full URL for the link
|
||||||
|
source_full = source
|
||||||
|
if len(source) > 60:
|
||||||
|
source_display = source[:57] + "..."
|
||||||
|
else:
|
||||||
|
source_display = source
|
||||||
|
if source_full.startswith(("http://", "https://")):
|
||||||
|
source_html = f'<a href="{source_full}" style="color: #4fc3f7;">{source_display}</a>'
|
||||||
|
else:
|
||||||
|
source_html = source_display
|
||||||
|
from html import escape
|
||||||
|
self._details.setText(
|
||||||
|
f"Score: {post.score}\n"
|
||||||
|
f"Rating: {post.rating or 'unknown'}\n"
|
||||||
|
f"Filetype: {filetype}"
|
||||||
|
)
|
||||||
self._details.setTextFormat(Qt.TextFormat.RichText)
|
self._details.setTextFormat(Qt.TextFormat.RichText)
|
||||||
self._details.setText(
|
self._details.setText(
|
||||||
f"Score: {post.score}<br>"
|
f"Score: {post.score}<br>"
|
||||||
f"Rating: {escape(post.rating or 'unknown')}<br>"
|
f"Rating: {escape(post.rating or 'unknown')}<br>"
|
||||||
f"Filetype: {escape(filetype)}<br>"
|
f"Filetype: {filetype}<br>"
|
||||||
f"Source: {source_html}"
|
f"Source: {source_html}"
|
||||||
)
|
)
|
||||||
self._details.setOpenExternalLinks(True)
|
self._details.setOpenExternalLinks(True)
|
||||||
@ -136,17 +150,15 @@ class InfoPanel(QWidget):
|
|||||||
# Display tags grouped by category. Colors come from the
|
# Display tags grouped by category. Colors come from the
|
||||||
# tag*Color Qt Properties so a custom.qss can override any of
|
# tag*Color Qt Properties so a custom.qss can override any of
|
||||||
# them via `InfoPanel { qproperty-tagCharacterColor: ...; }`.
|
# them via `InfoPanel { qproperty-tagCharacterColor: ...; }`.
|
||||||
rendered: set[str] = set()
|
|
||||||
for category, tags in post.tag_categories.items():
|
for category, tags in post.tag_categories.items():
|
||||||
color = self._category_color(category)
|
color = self._category_color(category)
|
||||||
header = QLabel(f"{category}:")
|
header = QLabel(f"{category}:")
|
||||||
header.setStyleSheet(
|
header.setStyleSheet(
|
||||||
"font-weight: bold; margin-top: 6px; margin-bottom: 2px;"
|
f"font-weight: bold; margin-top: 6px; margin-bottom: 2px;"
|
||||||
+ (f" color: {color};" if color else "")
|
+ (f" color: {color};" if color else "")
|
||||||
)
|
)
|
||||||
self._tags_flow.addWidget(header)
|
self._tags_flow.addWidget(header)
|
||||||
for tag in tags:
|
for tag in tags[:50]:
|
||||||
rendered.add(tag)
|
|
||||||
btn = QPushButton(tag)
|
btn = QPushButton(tag)
|
||||||
btn.setFlat(True)
|
btn.setFlat(True)
|
||||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||||
@ -157,33 +169,12 @@ class InfoPanel(QWidget):
|
|||||||
btn.setStyleSheet(style)
|
btn.setStyleSheet(style)
|
||||||
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
||||||
self._tags_flow.addWidget(btn)
|
self._tags_flow.addWidget(btn)
|
||||||
# Safety net: any tag in post.tag_list that didn't land in
|
|
||||||
# a cached category (batch tag API returned partial results,
|
|
||||||
# HTML scrape fell short, cache stale, etc.) is still shown
|
|
||||||
# under an "Other" bucket so tags can't silently disappear
|
|
||||||
# from the info panel.
|
|
||||||
leftover = [t for t in post.tag_list if t and t not in rendered]
|
|
||||||
if leftover:
|
|
||||||
header = QLabel("Other:")
|
|
||||||
header.setStyleSheet(
|
|
||||||
"font-weight: bold; margin-top: 6px; margin-bottom: 2px;"
|
|
||||||
)
|
|
||||||
self._tags_flow.addWidget(header)
|
|
||||||
for tag in leftover:
|
|
||||||
btn = QPushButton(tag)
|
|
||||||
btn.setFlat(True)
|
|
||||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
|
||||||
btn.setStyleSheet(
|
|
||||||
"QPushButton { text-align: left; padding: 1px 4px; border: none; }"
|
|
||||||
)
|
|
||||||
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
|
||||||
self._tags_flow.addWidget(btn)
|
|
||||||
elif not self._categories_pending:
|
elif not self._categories_pending:
|
||||||
# Flat tag fallback — only when no category fetch is
|
# Flat tag fallback — only when no category fetch is
|
||||||
# in-flight. When a fetch IS pending, leaving the tags
|
# in-flight. When a fetch IS pending, leaving the tags
|
||||||
# area empty avoids the flat→categorized re-layout hitch
|
# area empty avoids the flat→categorized re-layout hitch
|
||||||
# (categories arrive ~200ms later and render in one pass).
|
# (categories arrive ~200ms later and render in one pass).
|
||||||
for tag in post.tag_list:
|
for tag in post.tag_list[:100]:
|
||||||
btn = QPushButton(tag)
|
btn = QPushButton(tag)
|
||||||
btn.setFlat(True)
|
btn.setFlat(True)
|
||||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||||
|
|||||||
@ -201,10 +201,9 @@ class LibraryView(QWidget):
|
|||||||
thumb_name = filepath.stem
|
thumb_name = filepath.stem
|
||||||
cached_thumb = lib_thumb_dir / f"{thumb_name}.jpg"
|
cached_thumb = lib_thumb_dir / f"{thumb_name}.jpg"
|
||||||
if cached_thumb.exists():
|
if cached_thumb.exists():
|
||||||
thumb_path = str(cached_thumb)
|
pix = QPixmap(str(cached_thumb))
|
||||||
pix = QPixmap(thumb_path)
|
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
thumb.set_pixmap(pix, thumb_path)
|
thumb.set_pixmap(pix)
|
||||||
continue
|
continue
|
||||||
self._generate_thumb_async(i, filepath, cached_thumb)
|
self._generate_thumb_async(i, filepath, cached_thumb)
|
||||||
|
|
||||||
@ -275,18 +274,14 @@ class LibraryView(QWidget):
|
|||||||
def _sort_files(self) -> None:
|
def _sort_files(self) -> None:
|
||||||
mode = self._sort_combo.currentText()
|
mode = self._sort_combo.currentText()
|
||||||
if mode == "Post ID":
|
if mode == "Post ID":
|
||||||
# Numeric sort by post id. Resolves templated filenames
|
# Numeric sort by post id (filename stem). Library files are
|
||||||
# (e.g. artist_12345.jpg) via library_meta DB lookup, falls
|
# named {post_id}.{ext} in normal usage; anything with a
|
||||||
# back to digit-stem parsing for legacy files. Anything
|
# non-digit stem (someone manually dropped a file in) sorts
|
||||||
# without a resolvable post_id sorts to the end alphabetically.
|
# to the end alphabetically so the numeric ordering of real
|
||||||
|
# posts isn't disrupted by stray names.
|
||||||
def _key(p: Path) -> tuple:
|
def _key(p: Path) -> tuple:
|
||||||
if self._db:
|
stem = p.stem
|
||||||
pid = self._db.get_library_post_id_by_filename(p.name)
|
return (0, int(stem)) if stem.isdigit() else (1, stem.lower())
|
||||||
if pid is not None:
|
|
||||||
return (0, pid)
|
|
||||||
if p.stem.isdigit():
|
|
||||||
return (0, int(p.stem))
|
|
||||||
return (1, p.stem.lower())
|
|
||||||
self._files.sort(key=_key)
|
self._files.sort(key=_key)
|
||||||
elif mode == "Size":
|
elif mode == "Size":
|
||||||
self._files.sort(key=lambda p: p.stat().st_size, reverse=True)
|
self._files.sort(key=lambda p: p.stat().st_size, reverse=True)
|
||||||
@ -326,56 +321,21 @@ class LibraryView(QWidget):
|
|||||||
threading.Thread(target=_work, daemon=True).start()
|
threading.Thread(target=_work, daemon=True).start()
|
||||||
|
|
||||||
def _capture_video_thumb(self, index: int, source: str, dest: str) -> None:
|
def _capture_video_thumb(self, index: int, source: str, dest: str) -> None:
|
||||||
"""Grab first frame from video using mpv, falls back to placeholder."""
|
"""Grab first frame from video. Tries ffmpeg, falls back to placeholder."""
|
||||||
def _work():
|
def _work():
|
||||||
extracted = False
|
|
||||||
try:
|
try:
|
||||||
import threading as _threading
|
import subprocess
|
||||||
import mpv as mpvlib
|
result = subprocess.run(
|
||||||
|
["ffmpeg", "-y", "-i", source, "-vframes", "1",
|
||||||
frame_ready = _threading.Event()
|
"-vf", f"scale={LIBRARY_THUMB_SIZE}:{LIBRARY_THUMB_SIZE}:force_original_aspect_ratio=decrease",
|
||||||
m = mpvlib.MPV(
|
"-q:v", "5", dest],
|
||||||
vo='null', ao='null', aid='no',
|
capture_output=True, timeout=10,
|
||||||
pause=True, keep_open='yes',
|
|
||||||
terminal=False, config=False,
|
|
||||||
# Seek to 10% before first frame decode so a video that
|
|
||||||
# opens on a black frame (fade-in, title card, codec
|
|
||||||
# warmup) doesn't produce a black thumbnail. mpv clamps
|
|
||||||
# `start` to valid range so very short clips still land
|
|
||||||
# on a real frame.
|
|
||||||
start='10%',
|
|
||||||
hr_seek='yes',
|
|
||||||
)
|
)
|
||||||
try:
|
if Path(dest).exists():
|
||||||
@m.property_observer('video-params')
|
|
||||||
def _on_params(_name, value):
|
|
||||||
if isinstance(value, dict) and value.get('w'):
|
|
||||||
frame_ready.set()
|
|
||||||
|
|
||||||
m.loadfile(source)
|
|
||||||
if frame_ready.wait(timeout=10):
|
|
||||||
m.command('screenshot-to-file', dest, 'video')
|
|
||||||
finally:
|
|
||||||
m.terminate()
|
|
||||||
|
|
||||||
if Path(dest).exists() and Path(dest).stat().st_size > 0:
|
|
||||||
from PIL import Image
|
|
||||||
with Image.open(dest) as img:
|
|
||||||
img.thumbnail(
|
|
||||||
(LIBRARY_THUMB_SIZE, LIBRARY_THUMB_SIZE),
|
|
||||||
Image.LANCZOS,
|
|
||||||
)
|
|
||||||
if img.mode in ("RGBA", "P"):
|
|
||||||
img = img.convert("RGB")
|
|
||||||
img.save(dest, "JPEG", quality=85)
|
|
||||||
extracted = True
|
|
||||||
except Exception as e:
|
|
||||||
log.debug("mpv thumb extraction failed for %s: %s", source, e)
|
|
||||||
|
|
||||||
if extracted and Path(dest).exists():
|
|
||||||
self._signals.thumb_ready.emit(index, dest)
|
self._signals.thumb_ready.emit(index, dest)
|
||||||
return
|
return
|
||||||
|
except (FileNotFoundError, Exception):
|
||||||
|
pass
|
||||||
# Fallback: generate a placeholder
|
# Fallback: generate a placeholder
|
||||||
from PySide6.QtGui import QPainter, QColor, QFont
|
from PySide6.QtGui import QPainter, QColor, QFont
|
||||||
from PySide6.QtGui import QPolygon
|
from PySide6.QtGui import QPolygon
|
||||||
@ -403,7 +363,7 @@ class LibraryView(QWidget):
|
|||||||
if 0 <= index < len(thumbs):
|
if 0 <= index < len(thumbs):
|
||||||
pix = QPixmap(path)
|
pix = QPixmap(path)
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
thumbs[index].set_pixmap(pix, path)
|
thumbs[index].set_pixmap(pix)
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Selection signals
|
# Selection signals
|
||||||
@ -560,8 +520,7 @@ class LibraryView(QWidget):
|
|||||||
if post_id is None and filepath.stem.isdigit():
|
if post_id is None and filepath.stem.isdigit():
|
||||||
post_id = int(filepath.stem)
|
post_id = int(filepath.stem)
|
||||||
filepath.unlink(missing_ok=True)
|
filepath.unlink(missing_ok=True)
|
||||||
thumb_key = str(post_id) if post_id is not None else filepath.stem
|
lib_thumb = thumbnails_dir() / "library" / f"{filepath.stem}.jpg"
|
||||||
lib_thumb = thumbnails_dir() / "library" / f"{thumb_key}.jpg"
|
|
||||||
lib_thumb.unlink(missing_ok=True)
|
lib_thumb.unlink(missing_ok=True)
|
||||||
if post_id is not None:
|
if post_id is not None:
|
||||||
self._db.remove_library_meta(post_id)
|
self._db.remove_library_meta(post_id)
|
||||||
@ -616,8 +575,7 @@ class LibraryView(QWidget):
|
|||||||
if post_id is None and f.stem.isdigit():
|
if post_id is None and f.stem.isdigit():
|
||||||
post_id = int(f.stem)
|
post_id = int(f.stem)
|
||||||
f.unlink(missing_ok=True)
|
f.unlink(missing_ok=True)
|
||||||
thumb_key = str(post_id) if post_id is not None else f.stem
|
lib_thumb = thumbnails_dir() / "library" / f"{f.stem}.jpg"
|
||||||
lib_thumb = thumbnails_dir() / "library" / f"{thumb_key}.jpg"
|
|
||||||
lib_thumb.unlink(missing_ok=True)
|
lib_thumb.unlink(missing_ok=True)
|
||||||
if post_id is not None:
|
if post_id is not None:
|
||||||
self._db.remove_library_meta(post_id)
|
self._db.remove_library_meta(post_id)
|
||||||
|
|||||||
@ -4,6 +4,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -26,12 +28,14 @@ from PySide6.QtWidgets import (
|
|||||||
QProgressBar,
|
QProgressBar,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from dataclasses import field
|
||||||
|
|
||||||
from ..core.db import Database, Site
|
from ..core.db import Database, Site
|
||||||
from ..core.api.base import BooruClient, Post
|
from ..core.api.base import BooruClient, Post
|
||||||
from ..core.api.detect import client_for_type
|
from ..core.api.detect import client_for_type
|
||||||
from ..core.cache import download_image
|
from ..core.cache import download_image
|
||||||
|
|
||||||
from .grid import ThumbnailGrid, THUMB_SIZE, THUMB_SPACING
|
from .grid import ThumbnailGrid
|
||||||
from .preview_pane import ImagePreview
|
from .preview_pane import ImagePreview
|
||||||
from .search import SearchBar
|
from .search import SearchBar
|
||||||
from .sites import SiteManagerDialog
|
from .sites import SiteManagerDialog
|
||||||
@ -306,7 +310,6 @@ class BooruApp(QMainWindow):
|
|||||||
self._stack = QStackedWidget()
|
self._stack = QStackedWidget()
|
||||||
|
|
||||||
self._grid = ThumbnailGrid()
|
self._grid = ThumbnailGrid()
|
||||||
self._grid.setMinimumWidth(THUMB_SIZE + THUMB_SPACING * 2)
|
|
||||||
self._grid.post_selected.connect(self._on_post_selected)
|
self._grid.post_selected.connect(self._on_post_selected)
|
||||||
self._grid.post_activated.connect(self._media_ctrl.on_post_activated)
|
self._grid.post_activated.connect(self._media_ctrl.on_post_activated)
|
||||||
self._grid.context_requested.connect(self._context.show_single)
|
self._grid.context_requested.connect(self._context.show_single)
|
||||||
@ -315,9 +318,7 @@ class BooruApp(QMainWindow):
|
|||||||
self._grid.nav_before_start.connect(self._search_ctrl.on_nav_before_start)
|
self._grid.nav_before_start.connect(self._search_ctrl.on_nav_before_start)
|
||||||
self._stack.addWidget(self._grid)
|
self._stack.addWidget(self._grid)
|
||||||
|
|
||||||
self._bookmarks_view = BookmarksView(
|
self._bookmarks_view = BookmarksView(self._db)
|
||||||
self._db, self._get_category_fetcher,
|
|
||||||
)
|
|
||||||
self._bookmarks_view.bookmark_selected.connect(self._on_bookmark_selected)
|
self._bookmarks_view.bookmark_selected.connect(self._on_bookmark_selected)
|
||||||
self._bookmarks_view.bookmark_activated.connect(self._on_bookmark_activated)
|
self._bookmarks_view.bookmark_activated.connect(self._on_bookmark_activated)
|
||||||
self._bookmarks_view.bookmarks_changed.connect(self._post_actions.refresh_browse_saved_dots)
|
self._bookmarks_view.bookmarks_changed.connect(self._post_actions.refresh_browse_saved_dots)
|
||||||
@ -491,6 +492,7 @@ class BooruApp(QMainWindow):
|
|||||||
file_menu = menu.addMenu("&File")
|
file_menu = menu.addMenu("&File")
|
||||||
|
|
||||||
sites_action = QAction("&Manage Sites...", self)
|
sites_action = QAction("&Manage Sites...", self)
|
||||||
|
sites_action.setShortcut(QKeySequence("Ctrl+S"))
|
||||||
sites_action.triggered.connect(self._open_site_manager)
|
sites_action.triggered.connect(self._open_site_manager)
|
||||||
file_menu.addAction(sites_action)
|
file_menu.addAction(sites_action)
|
||||||
|
|
||||||
@ -502,6 +504,7 @@ class BooruApp(QMainWindow):
|
|||||||
file_menu.addSeparator()
|
file_menu.addSeparator()
|
||||||
|
|
||||||
self._batch_action = QAction("Batch &Download Page...", self)
|
self._batch_action = QAction("Batch &Download Page...", self)
|
||||||
|
self._batch_action.setShortcut(QKeySequence("Ctrl+D"))
|
||||||
self._batch_action.triggered.connect(self._post_actions.batch_download)
|
self._batch_action.triggered.connect(self._post_actions.batch_download)
|
||||||
file_menu.addAction(self._batch_action)
|
file_menu.addAction(self._batch_action)
|
||||||
|
|
||||||
@ -588,30 +591,23 @@ class BooruApp(QMainWindow):
|
|||||||
# them again is meaningless. Disabling the QAction also disables
|
# them again is meaningless. Disabling the QAction also disables
|
||||||
# its keyboard shortcut.
|
# its keyboard shortcut.
|
||||||
self._batch_action.setEnabled(index == 0)
|
self._batch_action.setEnabled(index == 0)
|
||||||
# Clear other tabs' selections to prevent cross-tab action
|
# Clear grid selections and current post to prevent cross-tab action conflicts
|
||||||
# conflicts (B/S keys acting on a stale selection from another
|
# Preview media stays visible but actions are disabled until a new post is selected
|
||||||
# tab). The target tab keeps its selection so the user doesn't
|
|
||||||
# lose their place when switching back and forth.
|
|
||||||
if index != 0:
|
|
||||||
self._grid.clear_selection()
|
self._grid.clear_selection()
|
||||||
if index != 1:
|
|
||||||
self._bookmarks_view._grid.clear_selection()
|
self._bookmarks_view._grid.clear_selection()
|
||||||
if index != 2:
|
|
||||||
self._library_view._grid.clear_selection()
|
self._library_view._grid.clear_selection()
|
||||||
|
self._preview._current_post = None
|
||||||
|
self._preview._current_site_id = None
|
||||||
is_library = index == 2
|
is_library = index == 2
|
||||||
# Resolve actual bookmark/save state for the current preview post
|
|
||||||
# so toolbar buttons reflect reality instead of a per-tab default.
|
|
||||||
post = self._preview._current_post
|
|
||||||
if post:
|
|
||||||
site_id = self._preview._current_site_id or self._site_combo.currentData()
|
|
||||||
self._preview.update_bookmark_state(
|
|
||||||
bool(site_id and self._db.is_bookmarked(site_id, post.id))
|
|
||||||
)
|
|
||||||
self._preview.update_save_state(
|
|
||||||
is_library or self._post_actions.is_post_saved(post.id)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._preview.update_bookmark_state(False)
|
self._preview.update_bookmark_state(False)
|
||||||
|
# On the library tab the Save button is the only toolbar action
|
||||||
|
# left visible (Bookmark / BL Tag / BL Post are hidden a few lines
|
||||||
|
# down). Library files are saved by definition, so the button
|
||||||
|
# should read "Unsave" the entire time the user is in that tab —
|
||||||
|
# forcing the state to True here makes that true even before the
|
||||||
|
# user clicks anything (the toolbar might already be showing old
|
||||||
|
# media from the previous tab; this is fine because the same media
|
||||||
|
# is also in the library if it was just saved).
|
||||||
self._preview.update_save_state(is_library)
|
self._preview.update_save_state(is_library)
|
||||||
# Show/hide preview toolbar buttons per tab
|
# Show/hide preview toolbar buttons per tab
|
||||||
self._preview._bookmark_btn.setVisible(not is_library)
|
self._preview._bookmark_btn.setVisible(not is_library)
|
||||||
@ -776,17 +772,8 @@ class BooruApp(QMainWindow):
|
|||||||
self._preview.update_save_state(self._post_actions.is_post_saved(post.id))
|
self._preview.update_save_state(self._post_actions.is_post_saved(post.id))
|
||||||
info = f"Bookmark #{fav.post_id}"
|
info = f"Bookmark #{fav.post_id}"
|
||||||
|
|
||||||
def _set_dims_from_file(filepath: str) -> None:
|
|
||||||
"""Read image dimensions from a local file into the Post object
|
|
||||||
so the popout can set keep_aspect_ratio correctly."""
|
|
||||||
w, h = MediaController.image_dimensions(filepath)
|
|
||||||
if w and h:
|
|
||||||
post.width = w
|
|
||||||
post.height = h
|
|
||||||
|
|
||||||
# Try local cache first
|
# Try local cache first
|
||||||
if fav.cached_path and Path(fav.cached_path).exists():
|
if fav.cached_path and Path(fav.cached_path).exists():
|
||||||
_set_dims_from_file(fav.cached_path)
|
|
||||||
self._media_ctrl.set_preview_media(fav.cached_path, info)
|
self._media_ctrl.set_preview_media(fav.cached_path, info)
|
||||||
self._popout_ctrl.update_media(fav.cached_path, info)
|
self._popout_ctrl.update_media(fav.cached_path, info)
|
||||||
return
|
return
|
||||||
@ -797,7 +784,6 @@ class BooruApp(QMainWindow):
|
|||||||
# legacy digit-stem files would be found).
|
# legacy digit-stem files would be found).
|
||||||
from ..core.config import find_library_files
|
from ..core.config import find_library_files
|
||||||
for path in find_library_files(fav.post_id, db=self._db):
|
for path in find_library_files(fav.post_id, db=self._db):
|
||||||
_set_dims_from_file(str(path))
|
|
||||||
self._media_ctrl.set_preview_media(str(path), info)
|
self._media_ctrl.set_preview_media(str(path), info)
|
||||||
self._popout_ctrl.update_media(str(path), info)
|
self._popout_ctrl.update_media(str(path), info)
|
||||||
return
|
return
|
||||||
@ -996,7 +982,7 @@ class BooruApp(QMainWindow):
|
|||||||
self._open_post_id_in_browser(post.id)
|
self._open_post_id_in_browser(post.id)
|
||||||
|
|
||||||
def _open_in_default(self, post: Post) -> None:
|
def _open_in_default(self, post: Post) -> None:
|
||||||
from ..core.cache import cached_path_for
|
from ..core.cache import cached_path_for, is_cached
|
||||||
path = cached_path_for(post.file_url)
|
path = cached_path_for(post.file_url)
|
||||||
if path.exists():
|
if path.exists():
|
||||||
# Pause any playing video before opening externally
|
# Pause any playing video before opening externally
|
||||||
@ -1053,33 +1039,12 @@ class BooruApp(QMainWindow):
|
|||||||
if lib_dir:
|
if lib_dir:
|
||||||
from ..core.config import set_library_dir
|
from ..core.config import set_library_dir
|
||||||
set_library_dir(Path(lib_dir))
|
set_library_dir(Path(lib_dir))
|
||||||
# Apply thumbnail size live — update the module constant, resize
|
# Apply thumbnail size
|
||||||
# existing thumbnails, and reflow the grid.
|
|
||||||
from .grid import THUMB_SIZE
|
from .grid import THUMB_SIZE
|
||||||
new_size = self._db.get_setting_int("thumbnail_size")
|
new_size = self._db.get_setting_int("thumbnail_size")
|
||||||
if new_size and new_size != THUMB_SIZE:
|
if new_size and new_size != THUMB_SIZE:
|
||||||
import booru_viewer.gui.grid as grid_mod
|
import booru_viewer.gui.grid as grid_mod
|
||||||
grid_mod.THUMB_SIZE = new_size
|
grid_mod.THUMB_SIZE = new_size
|
||||||
for grid in (self._grid, self._bookmarks_view._grid, self._library_view._grid):
|
|
||||||
for thumb in grid._thumbs:
|
|
||||||
thumb.setFixedSize(new_size, new_size)
|
|
||||||
if thumb._source_path:
|
|
||||||
src = QPixmap(thumb._source_path)
|
|
||||||
if not src.isNull():
|
|
||||||
thumb._pixmap = src.scaled(
|
|
||||||
new_size - 4, new_size - 4,
|
|
||||||
Qt.AspectRatioMode.KeepAspectRatio,
|
|
||||||
Qt.TransformationMode.SmoothTransformation,
|
|
||||||
)
|
|
||||||
thumb.update()
|
|
||||||
grid._flow._do_layout()
|
|
||||||
# Apply flip layout live
|
|
||||||
flip = self._db.get_setting_bool("flip_layout")
|
|
||||||
current_first = self._splitter.widget(0)
|
|
||||||
want_right_first = flip
|
|
||||||
right_is_first = current_first is self._right_splitter
|
|
||||||
if want_right_first != right_is_first:
|
|
||||||
self._splitter.insertWidget(0, self._right_splitter if flip else self._stack)
|
|
||||||
self._status.showMessage("Settings applied")
|
self._status.showMessage("Settings applied")
|
||||||
|
|
||||||
# -- Fullscreen & Privacy --
|
# -- Fullscreen & Privacy --
|
||||||
@ -1123,9 +1088,7 @@ class BooruApp(QMainWindow):
|
|||||||
if 0 <= idx < len(self._posts):
|
if 0 <= idx < len(self._posts):
|
||||||
self._post_actions.toggle_bookmark(idx)
|
self._post_actions.toggle_bookmark(idx)
|
||||||
return
|
return
|
||||||
if key == Qt.Key.Key_S and self._posts:
|
if key == Qt.Key.Key_S and self._preview._current_post:
|
||||||
idx = self._grid.selected_index
|
|
||||||
if 0 <= idx < len(self._posts):
|
|
||||||
self._post_actions.toggle_save_from_preview()
|
self._post_actions.toggle_save_from_preview()
|
||||||
return
|
return
|
||||||
elif key == Qt.Key.Key_I:
|
elif key == Qt.Key.Key_I:
|
||||||
|
|||||||
@ -1,89 +0,0 @@
|
|||||||
"""Pure helpers that build the kwargs dict passed to ``mpv.MPV`` and
|
|
||||||
the post-construction options dict applied via the property API.
|
|
||||||
|
|
||||||
Kept free of any Qt or mpv imports so the options can be audited from
|
|
||||||
a CI test that only installs the stdlib.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
# FFmpeg ``protocol_whitelist`` value applied via mpv's
|
|
||||||
# ``demuxer-lavf-o`` option (audit finding #2). ``file`` must stay so
|
|
||||||
# cached local clips and ``.part`` files keep playing; ``http``/
|
|
||||||
# ``https``/``tls``/``tcp`` are needed for fresh network video.
|
|
||||||
# ``crypto`` is intentionally omitted — it's an FFmpeg pseudo-protocol
|
|
||||||
# for AES-decrypted streams that boorus do not legitimately serve.
|
|
||||||
LAVF_PROTOCOL_WHITELIST = "file,http,https,tls,tcp"
|
|
||||||
|
|
||||||
|
|
||||||
def lavf_options() -> dict[str, str]:
|
|
||||||
"""Return the FFmpeg lavf demuxer options to apply post-construction.
|
|
||||||
|
|
||||||
These cannot be set via ``mpv.MPV(**kwargs)`` because python-mpv's
|
|
||||||
init path uses ``mpv_set_option_string``, which routes through
|
|
||||||
mpv's keyvalue list parser. That parser splits on ``,`` to find
|
|
||||||
entries, so the comma-laden ``protocol_whitelist`` value gets
|
|
||||||
shredded into orphan tokens and mpv rejects the option with
|
|
||||||
-7 OPT_FORMAT. mpv's documented backslash escape (``\\,``) is
|
|
||||||
not unescaped on this code path either.
|
|
||||||
|
|
||||||
The post-construction property API DOES accept dict values for
|
|
||||||
keyvalue-list options via the node API, so we set them after
|
|
||||||
``mpv.MPV()`` returns. Caller pattern:
|
|
||||||
|
|
||||||
m = mpv.MPV(**build_mpv_kwargs(is_windows=...))
|
|
||||||
for k, v in lavf_options().items():
|
|
||||||
m["demuxer-lavf-o"] = {k: v}
|
|
||||||
"""
|
|
||||||
return {"protocol_whitelist": LAVF_PROTOCOL_WHITELIST}
|
|
||||||
|
|
||||||
|
|
||||||
def build_mpv_kwargs(is_windows: bool) -> dict[str, object]:
|
|
||||||
"""Return the kwargs dict for constructing ``mpv.MPV``.
|
|
||||||
|
|
||||||
The playback, audio, and network options are unchanged from
|
|
||||||
pre-audit v0.2.5. The security hardening added by SECURITY_AUDIT.md
|
|
||||||
finding #2 is:
|
|
||||||
|
|
||||||
- ``ytdl="no"``: refuse to delegate URL handling to yt-dlp. mpv's
|
|
||||||
default enables a yt-dlp hook script that matches ~1500 hosts
|
|
||||||
and shells out to ``yt-dlp`` on any URL it recognizes. A
|
|
||||||
compromised booru returning ``file_url: "https://youtube.com/..."``
|
|
||||||
would pull the user through whatever extractor CVE is current.
|
|
||||||
|
|
||||||
- ``load_scripts="no"``: do not auto-load Lua scripts from
|
|
||||||
``~/.config/mpv/scripts``. These scripts run in mpv's context
|
|
||||||
every time the widget is created.
|
|
||||||
|
|
||||||
- ``input_conf="/dev/null"`` (POSIX only): skip loading
|
|
||||||
``~/.config/mpv/input.conf``. The existing
|
|
||||||
``input_default_bindings=False`` + ``input_vo_keyboard=False``
|
|
||||||
are the primary lockdown; this is defense-in-depth. Windows
|
|
||||||
uses a different null-device path and the load behavior varies
|
|
||||||
by mpv build, so it is skipped there.
|
|
||||||
|
|
||||||
The ffmpeg protocol whitelist (also part of finding #2) is NOT
|
|
||||||
in this dict — see ``lavf_options`` for the explanation.
|
|
||||||
"""
|
|
||||||
kwargs: dict[str, object] = {
|
|
||||||
"vo": "libmpv",
|
|
||||||
"hwdec": "auto",
|
|
||||||
"keep_open": "yes",
|
|
||||||
"ao": "pulse,wasapi,",
|
|
||||||
"audio_client_name": "booru-viewer",
|
|
||||||
"input_default_bindings": False,
|
|
||||||
"input_vo_keyboard": False,
|
|
||||||
"osc": False,
|
|
||||||
"vd_lavc_fast": "yes",
|
|
||||||
"vd_lavc_skiploopfilter": "nonkey",
|
|
||||||
"cache": "yes",
|
|
||||||
"cache_pause": "no",
|
|
||||||
"demuxer_max_bytes": "50MiB",
|
|
||||||
"demuxer_readahead_secs": "20",
|
|
||||||
"network_timeout": "10",
|
|
||||||
"ytdl": "no",
|
|
||||||
"load_scripts": "no",
|
|
||||||
}
|
|
||||||
if not is_windows:
|
|
||||||
kwargs["input_conf"] = "/dev/null"
|
|
||||||
return kwargs
|
|
||||||
@ -22,7 +22,6 @@ class ImageViewer(QWidget):
|
|||||||
self._offset = QPointF(0, 0)
|
self._offset = QPointF(0, 0)
|
||||||
self._drag_start: QPointF | None = None
|
self._drag_start: QPointF | None = None
|
||||||
self._drag_offset = QPointF(0, 0)
|
self._drag_offset = QPointF(0, 0)
|
||||||
self._zoom_scroll_accum = 0
|
|
||||||
self.setMouseTracking(True)
|
self.setMouseTracking(True)
|
||||||
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
|
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
|
||||||
self._info_text = ""
|
self._info_text = ""
|
||||||
@ -107,14 +106,9 @@ class ImageViewer(QWidget):
|
|||||||
# Pure horizontal tilt — let parent handle (navigation)
|
# Pure horizontal tilt — let parent handle (navigation)
|
||||||
event.ignore()
|
event.ignore()
|
||||||
return
|
return
|
||||||
self._zoom_scroll_accum += delta
|
|
||||||
steps = self._zoom_scroll_accum // 120
|
|
||||||
if not steps:
|
|
||||||
return
|
|
||||||
self._zoom_scroll_accum -= steps * 120
|
|
||||||
mouse_pos = event.position()
|
mouse_pos = event.position()
|
||||||
old_zoom = self._zoom
|
old_zoom = self._zoom
|
||||||
factor = 1.15 ** steps
|
factor = 1.15 if delta > 0 else 1 / 1.15
|
||||||
self._zoom = max(0.1, min(self._zoom * factor, 20.0))
|
self._zoom = max(0.1, min(self._zoom * factor, 20.0))
|
||||||
ratio = self._zoom / old_zoom
|
ratio = self._zoom / old_zoom
|
||||||
self._offset = mouse_pos - ratio * (mouse_pos - self._offset)
|
self._offset = mouse_pos - ratio * (mouse_pos - self._offset)
|
||||||
|
|||||||
@ -3,7 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
|
|
||||||
from PySide6.QtCore import Signal
|
from PySide6.QtCore import Signal
|
||||||
from PySide6.QtOpenGLWidgets import QOpenGLWidget as _QOpenGLWidget
|
from PySide6.QtOpenGLWidgets import QOpenGLWidget as _QOpenGLWidget
|
||||||
@ -11,8 +10,6 @@ from PySide6.QtWidgets import QWidget, QVBoxLayout
|
|||||||
|
|
||||||
import mpv as mpvlib
|
import mpv as mpvlib
|
||||||
|
|
||||||
from ._mpv_options import build_mpv_kwargs, lavf_options
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -38,22 +35,58 @@ class _MpvGLWidget(QWidget):
|
|||||||
self._frame_ready.connect(self._gl.update)
|
self._frame_ready.connect(self._gl.update)
|
||||||
# Create mpv eagerly on the main thread.
|
# Create mpv eagerly on the main thread.
|
||||||
#
|
#
|
||||||
# Options come from `build_mpv_kwargs` (see `_mpv_options.py`
|
# `ao=pulse` is critical for Linux Discord screen-share audio
|
||||||
# for the full rationale). Summary: Discord screen-share audio
|
# capture. Discord on Linux only enumerates audio clients via
|
||||||
# fix via `ao=pulse`, fast-load vd-lavc options, network cache
|
# the libpulse API; it does not see clients that talk to
|
||||||
# tuning for the uncached-video fast path, and the SECURITY
|
# PipeWire natively (which is mpv's default `ao=pipewire`).
|
||||||
# hardening from audit #2 (ytdl=no, load_scripts=no, POSIX
|
# Forcing the pulseaudio output here makes mpv go through
|
||||||
# input_conf null).
|
# PipeWire's pulseaudio compatibility layer, which Discord
|
||||||
|
# picks up the same way it picks up Firefox. Without this,
|
||||||
|
# videos play locally but the audio is silently dropped from
|
||||||
|
# any Discord screen share. See:
|
||||||
|
# https://github.com/mpv-player/mpv/issues/11100
|
||||||
|
# https://github.com/edisionnano/Screenshare-with-audio-on-Discord-with-Linux
|
||||||
|
# On Windows mpv ignores `ao=pulse` and falls through to the
|
||||||
|
# next entry, so listing `wasapi` second keeps Windows playback
|
||||||
|
# working without a platform branch here.
|
||||||
|
#
|
||||||
|
# `audio_client_name` is the name mpv registers with the audio
|
||||||
|
# backend. Sets `application.name` and friends so capture tools
|
||||||
|
# group mpv's audio under the booru-viewer app identity instead
|
||||||
|
# of the default "mpv Media Player".
|
||||||
self._mpv = mpvlib.MPV(
|
self._mpv = mpvlib.MPV(
|
||||||
**build_mpv_kwargs(is_windows=sys.platform == "win32"),
|
vo="libmpv",
|
||||||
|
hwdec="auto",
|
||||||
|
keep_open="yes",
|
||||||
|
ao="pulse,wasapi,",
|
||||||
|
audio_client_name="booru-viewer",
|
||||||
|
input_default_bindings=False,
|
||||||
|
input_vo_keyboard=False,
|
||||||
|
osc=False,
|
||||||
|
# Fast-load options: shave ~50-100ms off first-frame decode
|
||||||
|
# for h264/hevc by skipping a few bitstream-correctness checks
|
||||||
|
# (`vd-lavc-fast`) and the in-loop filter on non-keyframes
|
||||||
|
# (`vd-lavc-skiploopfilter=nonkey`). The artifacts are only
|
||||||
|
# visible on the first few frames before the decoder steady-
|
||||||
|
# state catches up, and only on degraded sources. mpv
|
||||||
|
# documents these as safe for "fast load" use cases like
|
||||||
|
# ours where we want the first frame on screen ASAP and
|
||||||
|
# don't care about a tiny quality dip during ramp-up.
|
||||||
|
vd_lavc_fast="yes",
|
||||||
|
vd_lavc_skiploopfilter="nonkey",
|
||||||
|
# Network streaming tuning for the uncached-video fast path.
|
||||||
|
# cache=yes is mpv's default for network sources but explicit
|
||||||
|
# is clearer. cache_pause=no keeps playback running through
|
||||||
|
# brief buffer underruns instead of pausing — for short booru
|
||||||
|
# clips a momentary stutter beats a pause icon. demuxer caps
|
||||||
|
# keep RAM bounded. network_timeout=10 replaces mpv's ~60s
|
||||||
|
# default so stalled connections surface errors promptly.
|
||||||
|
cache="yes",
|
||||||
|
cache_pause="no",
|
||||||
|
demuxer_max_bytes="50MiB",
|
||||||
|
demuxer_readahead_secs="20",
|
||||||
|
network_timeout="10",
|
||||||
)
|
)
|
||||||
# The ffmpeg lavf demuxer protocol whitelist (also audit #2)
|
|
||||||
# has to be applied via the property API, not as an init
|
|
||||||
# kwarg — python-mpv's init path goes through
|
|
||||||
# mpv_set_option_string which trips on the comma-laden value.
|
|
||||||
# The property API uses the node API and accepts dict values.
|
|
||||||
for key, value in lavf_options().items():
|
|
||||||
self._mpv["demuxer-lavf-o"] = {key: value}
|
|
||||||
# Wire up the GL surface's callbacks to us
|
# Wire up the GL surface's callbacks to us
|
||||||
self._gl._owner = self
|
self._gl._owner = self
|
||||||
|
|
||||||
@ -111,35 +144,10 @@ class _MpvGLWidget(QWidget):
|
|||||||
self._gl.makeCurrent()
|
self._gl.makeCurrent()
|
||||||
self._init_gl()
|
self._init_gl()
|
||||||
|
|
||||||
def release_render_context(self) -> None:
|
|
||||||
"""Free the GL render context without terminating mpv.
|
|
||||||
|
|
||||||
Releases all GPU-side textures and FBOs that the render context
|
|
||||||
holds. The next ``ensure_gl_init()`` call (from ``play_file``)
|
|
||||||
recreates the context cheaply (~5ms). This is the difference
|
|
||||||
between "mpv is idle but holding VRAM" and "mpv is idle and
|
|
||||||
clean."
|
|
||||||
|
|
||||||
Safe to call when mpv has no active file (after
|
|
||||||
``mpv.command('stop')``). After this, ``_paint_gl`` is a no-op
|
|
||||||
(``_ctx is None`` guard) and mpv won't fire frame-ready
|
|
||||||
callbacks because there's no render context to trigger them.
|
|
||||||
"""
|
|
||||||
if self._ctx:
|
|
||||||
# GL context must be current so mpv can release its textures
|
|
||||||
# and FBOs on the correct context. Without this, drivers that
|
|
||||||
# enforce per-context resource ownership (not NVIDIA, but
|
|
||||||
# Mesa/Intel) leak the GPU objects.
|
|
||||||
self._gl.makeCurrent()
|
|
||||||
try:
|
|
||||||
self._ctx.free()
|
|
||||||
finally:
|
|
||||||
self._gl.doneCurrent()
|
|
||||||
self._ctx = None
|
|
||||||
self._gl_inited = False
|
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
def cleanup(self) -> None:
|
||||||
self.release_render_context()
|
if self._ctx:
|
||||||
|
self._ctx.free()
|
||||||
|
self._ctx = None
|
||||||
if self._mpv:
|
if self._mpv:
|
||||||
self._mpv.terminate()
|
self._mpv.terminate()
|
||||||
self._mpv = None
|
self._mpv = None
|
||||||
|
|||||||
@ -3,12 +3,14 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from PySide6.QtCore import Qt, QTimer, Signal, Property, QPoint
|
from PySide6.QtCore import Qt, QTimer, Signal, Property, QPoint
|
||||||
from PySide6.QtGui import QColor, QIcon, QPixmap, QPainter, QPen, QPolygon, QPainterPath, QFont
|
from PySide6.QtGui import QColor, QIcon, QPixmap, QPainter, QPen, QBrush, QPolygon, QPainterPath, QFont
|
||||||
from PySide6.QtWidgets import (
|
from PySide6.QtWidgets import (
|
||||||
QWidget, QVBoxLayout, QHBoxLayout, QLabel, QPushButton, QSlider, QStyle,
|
QWidget, QVBoxLayout, QHBoxLayout, QLabel, QPushButton, QSlider, QStyle,
|
||||||
|
QApplication,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -159,9 +161,6 @@ class VideoPlayer(QWidget):
|
|||||||
self._mpv['background'] = 'color'
|
self._mpv['background'] = 'color'
|
||||||
self._mpv['background-color'] = self._letterbox_color.name()
|
self._mpv['background-color'] = self._letterbox_color.name()
|
||||||
except Exception:
|
except Exception:
|
||||||
# mpv not fully initialized or torn down; letterbox color
|
|
||||||
# is a cosmetic fallback so a property-write refusal just
|
|
||||||
# leaves the default black until next set.
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __init__(self, parent: QWidget | None = None, embed_controls: bool = True) -> None:
|
def __init__(self, parent: QWidget | None = None, embed_controls: bool = True) -> None:
|
||||||
@ -331,6 +330,14 @@ class VideoPlayer(QWidget):
|
|||||||
# spawn unmuted by default. _ensure_mpv replays this on creation.
|
# spawn unmuted by default. _ensure_mpv replays this on creation.
|
||||||
self._pending_mute: bool = False
|
self._pending_mute: bool = False
|
||||||
|
|
||||||
|
# Stream-record state: mpv's stream-record option tees its
|
||||||
|
# network stream into a .part file that gets promoted to the
|
||||||
|
# real cache path on clean EOF. Eliminates the parallel httpx
|
||||||
|
# download that used to race with mpv for the same bytes.
|
||||||
|
self._stream_record_tmp: Path | None = None
|
||||||
|
self._stream_record_target: Path | None = None
|
||||||
|
self._seeked_during_record: bool = False
|
||||||
|
|
||||||
def _ensure_mpv(self) -> mpvlib.MPV:
|
def _ensure_mpv(self) -> mpvlib.MPV:
|
||||||
"""Set up mpv callbacks on first use. MPV instance is pre-created."""
|
"""Set up mpv callbacks on first use. MPV instance is pre-created."""
|
||||||
if self._mpv is not None:
|
if self._mpv is not None:
|
||||||
@ -414,6 +421,8 @@ class VideoPlayer(QWidget):
|
|||||||
def seek_to_ms(self, ms: int) -> None:
|
def seek_to_ms(self, ms: int) -> None:
|
||||||
if self._mpv:
|
if self._mpv:
|
||||||
self._mpv.seek(ms / 1000.0, 'absolute+exact')
|
self._mpv.seek(ms / 1000.0, 'absolute+exact')
|
||||||
|
if self._stream_record_target is not None:
|
||||||
|
self._seeked_during_record = True
|
||||||
|
|
||||||
def play_file(self, path: str, info: str = "") -> None:
|
def play_file(self, path: str, info: str = "") -> None:
|
||||||
"""Play a file from a local path OR a remote http(s) URL.
|
"""Play a file from a local path OR a remote http(s) URL.
|
||||||
@ -435,19 +444,6 @@ class VideoPlayer(QWidget):
|
|||||||
"""
|
"""
|
||||||
m = self._ensure_mpv()
|
m = self._ensure_mpv()
|
||||||
self._gl_widget.ensure_gl_init()
|
self._gl_widget.ensure_gl_init()
|
||||||
# Re-arm hardware decoder before each load. stop() sets
|
|
||||||
# hwdec=no to release the NVDEC/VAAPI surface pool (the bulk
|
|
||||||
# of mpv's idle VRAM footprint on NVIDIA), so we flip it back
|
|
||||||
# to auto here so the next loadfile picks up hwdec again.
|
|
||||||
# mpv re-inits the decoder context on the next frame — swamped
|
|
||||||
# by the network fetch for uncached videos.
|
|
||||||
try:
|
|
||||||
m['hwdec'] = 'auto'
|
|
||||||
except Exception:
|
|
||||||
# If hwdec re-arm is refused, mpv falls back to software
|
|
||||||
# decode silently — playback still works, just at higher
|
|
||||||
# CPU cost on this file.
|
|
||||||
pass
|
|
||||||
self._current_file = path
|
self._current_file = path
|
||||||
self._media_ready_fired = False
|
self._media_ready_fired = False
|
||||||
self._pending_duration = None
|
self._pending_duration = None
|
||||||
@ -457,15 +453,27 @@ class VideoPlayer(QWidget):
|
|||||||
# treated as belonging to the previous file's stop and
|
# treated as belonging to the previous file's stop and
|
||||||
# ignored — see the long comment at __init__'s
|
# ignored — see the long comment at __init__'s
|
||||||
# `_eof_ignore_until` definition for the race trace.
|
# `_eof_ignore_until` definition for the race trace.
|
||||||
self._eof_ignore_until = time.monotonic() + self._eof_ignore_window_secs
|
import time as _time
|
||||||
|
self._eof_ignore_until = _time.monotonic() + self._eof_ignore_window_secs
|
||||||
self._last_video_size = None # reset dedupe so new file fires a fit
|
self._last_video_size = None # reset dedupe so new file fires a fit
|
||||||
self._apply_loop_to_mpv()
|
self._apply_loop_to_mpv()
|
||||||
|
|
||||||
|
# Clean up any leftover .part from a previous play_file that
|
||||||
|
# didn't finish (rapid clicks, popout closed mid-stream, etc).
|
||||||
|
self._discard_stream_record()
|
||||||
|
|
||||||
if path.startswith(("http://", "https://")):
|
if path.startswith(("http://", "https://")):
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from ...core.cache import _referer_for
|
from ...core.cache import _referer_for, cached_path_for
|
||||||
referer = _referer_for(urlparse(path))
|
referer = _referer_for(urlparse(path))
|
||||||
m.loadfile(path, "replace", referrer=referer)
|
target = cached_path_for(path)
|
||||||
|
target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
tmp = target.with_suffix(target.suffix + ".part")
|
||||||
|
m.loadfile(path, "replace",
|
||||||
|
referrer=referer,
|
||||||
|
stream_record=tmp.as_posix())
|
||||||
|
self._stream_record_tmp = tmp
|
||||||
|
self._stream_record_target = target
|
||||||
else:
|
else:
|
||||||
m.loadfile(path)
|
m.loadfile(path)
|
||||||
if self._autoplay:
|
if self._autoplay:
|
||||||
@ -476,26 +484,10 @@ class VideoPlayer(QWidget):
|
|||||||
self._poll_timer.start()
|
self._poll_timer.start()
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
|
self._discard_stream_record()
|
||||||
self._poll_timer.stop()
|
self._poll_timer.stop()
|
||||||
if self._mpv:
|
if self._mpv:
|
||||||
self._mpv.command('stop')
|
self._mpv.command('stop')
|
||||||
# Drop the hardware decoder surface pool to release VRAM
|
|
||||||
# while idle. On NVIDIA the NVDEC pool is the bulk of mpv's
|
|
||||||
# idle footprint and keep_open=yes + the live GL render
|
|
||||||
# context would otherwise pin it for the widget lifetime.
|
|
||||||
# play_file re-arms hwdec='auto' before the next loadfile.
|
|
||||||
try:
|
|
||||||
self._mpv['hwdec'] = 'no'
|
|
||||||
except Exception:
|
|
||||||
# Best-effort VRAM release on stop; if mpv is mid-
|
|
||||||
# teardown and rejects the write, GL context destruction
|
|
||||||
# still drops the surface pool eventually.
|
|
||||||
pass
|
|
||||||
# Free the GL render context so its internal textures and FBOs
|
|
||||||
# release VRAM while no video is playing. The next play_file()
|
|
||||||
# call recreates the context via ensure_gl_init() (~5ms cost,
|
|
||||||
# swamped by the network fetch for uncached videos).
|
|
||||||
self._gl_widget.release_render_context()
|
|
||||||
self._time_label.setText("0:00")
|
self._time_label.setText("0:00")
|
||||||
self._duration_label.setText("0:00")
|
self._duration_label.setText("0:00")
|
||||||
self._seek_slider.setRange(0, 0)
|
self._seek_slider.setRange(0, 0)
|
||||||
@ -541,9 +533,6 @@ class VideoPlayer(QWidget):
|
|||||||
if pos is not None and dur is not None and dur > 0 and pos >= dur - 0.5:
|
if pos is not None and dur is not None and dur > 0 and pos >= dur - 0.5:
|
||||||
self._mpv.command('seek', 0, 'absolute+exact')
|
self._mpv.command('seek', 0, 'absolute+exact')
|
||||||
except Exception:
|
except Exception:
|
||||||
# Replay-on-end is a UX nicety; if mpv refuses the
|
|
||||||
# seek (stream not ready, state mid-transition) just
|
|
||||||
# toggle pause without rewinding.
|
|
||||||
pass
|
pass
|
||||||
self._mpv.pause = not self._mpv.pause
|
self._mpv.pause = not self._mpv.pause
|
||||||
self._play_btn.setIcon(self._play_icon if self._mpv.pause else self._pause_icon)
|
self._play_btn.setIcon(self._play_icon if self._mpv.pause else self._pause_icon)
|
||||||
@ -580,6 +569,8 @@ class VideoPlayer(QWidget):
|
|||||||
"""
|
"""
|
||||||
if self._mpv:
|
if self._mpv:
|
||||||
self._mpv.seek(pos / 1000.0, 'absolute+exact')
|
self._mpv.seek(pos / 1000.0, 'absolute+exact')
|
||||||
|
if self._stream_record_target is not None:
|
||||||
|
self._seeked_during_record = True
|
||||||
|
|
||||||
def _seek_relative(self, ms: int) -> None:
|
def _seek_relative(self, ms: int) -> None:
|
||||||
if self._mpv:
|
if self._mpv:
|
||||||
@ -617,7 +608,8 @@ class VideoPlayer(QWidget):
|
|||||||
reset and trigger a spurious play_next auto-advance.
|
reset and trigger a spurious play_next auto-advance.
|
||||||
"""
|
"""
|
||||||
if value is True:
|
if value is True:
|
||||||
if time.monotonic() < self._eof_ignore_until:
|
import time as _time
|
||||||
|
if _time.monotonic() < self._eof_ignore_until:
|
||||||
# Stale eof from a previous file's stop. Drop it.
|
# Stale eof from a previous file's stop. Drop it.
|
||||||
return
|
return
|
||||||
self._eof_pending = True
|
self._eof_pending = True
|
||||||
@ -676,12 +668,61 @@ class VideoPlayer(QWidget):
|
|||||||
if not self._eof_pending:
|
if not self._eof_pending:
|
||||||
return
|
return
|
||||||
self._eof_pending = False
|
self._eof_pending = False
|
||||||
|
self._finalize_stream_record()
|
||||||
if self._loop_state == 1: # Once
|
if self._loop_state == 1: # Once
|
||||||
self.pause()
|
self.pause()
|
||||||
elif self._loop_state == 2: # Next
|
elif self._loop_state == 2: # Next
|
||||||
self.pause()
|
self.pause()
|
||||||
self.play_next.emit()
|
self.play_next.emit()
|
||||||
|
|
||||||
|
# -- Stream-record helpers --
|
||||||
|
|
||||||
|
def _discard_stream_record(self) -> None:
|
||||||
|
"""Remove any pending stream-record temp file without promoting."""
|
||||||
|
tmp = self._stream_record_tmp
|
||||||
|
self._stream_record_tmp = None
|
||||||
|
self._stream_record_target = None
|
||||||
|
self._seeked_during_record = False
|
||||||
|
if tmp is not None:
|
||||||
|
try:
|
||||||
|
tmp.unlink(missing_ok=True)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _finalize_stream_record(self) -> None:
|
||||||
|
"""Promote the stream-record .part file to its final cache path.
|
||||||
|
|
||||||
|
Only promotes if: (a) there is a pending stream-record, (b) the
|
||||||
|
user did not seek during playback (seeking invalidates the file
|
||||||
|
because mpv may have skipped byte ranges), and (c) the .part
|
||||||
|
file exists and is non-empty.
|
||||||
|
"""
|
||||||
|
tmp = self._stream_record_tmp
|
||||||
|
target = self._stream_record_target
|
||||||
|
self._stream_record_tmp = None
|
||||||
|
self._stream_record_target = None
|
||||||
|
if tmp is None or target is None:
|
||||||
|
return
|
||||||
|
if self._seeked_during_record:
|
||||||
|
log.debug("Stream-record discarded (seek during playback): %s", tmp.name)
|
||||||
|
try:
|
||||||
|
tmp.unlink(missing_ok=True)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return
|
||||||
|
if not tmp.exists() or tmp.stat().st_size == 0:
|
||||||
|
log.debug("Stream-record .part missing or empty: %s", tmp.name)
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
os.replace(tmp, target)
|
||||||
|
log.debug("Stream-record promoted: %s -> %s", tmp.name, target.name)
|
||||||
|
except OSError as e:
|
||||||
|
log.warning("Stream-record promote failed: %s", e)
|
||||||
|
try:
|
||||||
|
tmp.unlink(missing_ok=True)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _fmt(ms: int) -> str:
|
def _fmt(ms: int) -> str:
|
||||||
s = ms // 1000
|
s = ms // 1000
|
||||||
|
|||||||
@ -72,8 +72,6 @@ class MediaController:
|
|||||||
self._app = app
|
self._app = app
|
||||||
self._prefetch_pause = asyncio.Event()
|
self._prefetch_pause = asyncio.Event()
|
||||||
self._prefetch_pause.set() # not paused
|
self._prefetch_pause.set() # not paused
|
||||||
self._last_evict_check = 0.0 # monotonic timestamp
|
|
||||||
self._prefetch_gen = 0 # incremented on each prefetch_adjacent call
|
|
||||||
|
|
||||||
# -- Post activation (media load) --
|
# -- Post activation (media load) --
|
||||||
|
|
||||||
@ -133,6 +131,8 @@ class MediaController:
|
|||||||
async def _load():
|
async def _load():
|
||||||
self._prefetch_pause.clear()
|
self._prefetch_pause.clear()
|
||||||
try:
|
try:
|
||||||
|
if streaming:
|
||||||
|
return
|
||||||
path = await download_image(post.file_url, progress_callback=_progress)
|
path = await download_image(post.file_url, progress_callback=_progress)
|
||||||
self._app._signals.image_done.emit(str(path), info)
|
self._app._signals.image_done.emit(str(path), info)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -152,39 +152,15 @@ class MediaController:
|
|||||||
|
|
||||||
def on_image_done(self, path: str, info: str) -> None:
|
def on_image_done(self, path: str, info: str) -> None:
|
||||||
self._app._dl_progress.hide()
|
self._app._dl_progress.hide()
|
||||||
# If the preview is already streaming this video from URL,
|
|
||||||
# just update path references so copy/paste works — don't
|
|
||||||
# restart playback.
|
|
||||||
current = self._app._preview._current_path
|
|
||||||
if current and current.startswith(("http://", "https://")):
|
|
||||||
from ..core.cache import cached_path_for
|
|
||||||
if Path(path) == cached_path_for(current):
|
|
||||||
self._app._preview._current_path = path
|
|
||||||
idx = self._app._grid.selected_index
|
|
||||||
if 0 <= idx < len(self._app._grid._thumbs):
|
|
||||||
self._app._grid._thumbs[idx]._cached_path = path
|
|
||||||
cn = self._app._search_ctrl._cached_names
|
|
||||||
if cn is not None:
|
|
||||||
cn.add(Path(path).name)
|
|
||||||
self._app._status.showMessage(info)
|
|
||||||
self.auto_evict_cache()
|
|
||||||
return
|
|
||||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||||
self._app._preview._info_label.setText(info)
|
self._app._preview._info_label.setText(info)
|
||||||
self._app._preview._current_path = path
|
self._app._preview._current_path = path
|
||||||
else:
|
else:
|
||||||
self.set_preview_media(path, info)
|
self.set_preview_media(path, info)
|
||||||
self._app._status.showMessage(info)
|
self._app._status.showMessage(f"{len(self._app._posts)} results — Loaded")
|
||||||
idx = self._app._grid.selected_index
|
idx = self._app._grid.selected_index
|
||||||
if 0 <= idx < len(self._app._grid._thumbs):
|
if 0 <= idx < len(self._app._grid._thumbs):
|
||||||
self._app._grid._thumbs[idx]._cached_path = path
|
self._app._grid._thumbs[idx]._cached_path = path
|
||||||
# Keep the search controller's cached-names set current so
|
|
||||||
# subsequent _drain_append_queue calls see newly downloaded files
|
|
||||||
# without a full directory rescan.
|
|
||||||
cn = self._app._search_ctrl._cached_names
|
|
||||||
if cn is not None:
|
|
||||||
from pathlib import Path as _P
|
|
||||||
cn.add(_P(path).name)
|
|
||||||
self._app._popout_ctrl.update_media(path, info)
|
self._app._popout_ctrl.update_media(path, info)
|
||||||
self.auto_evict_cache()
|
self.auto_evict_cache()
|
||||||
|
|
||||||
@ -197,14 +173,6 @@ class MediaController:
|
|||||||
else:
|
else:
|
||||||
self._app._preview._video_player.stop()
|
self._app._preview._video_player.stop()
|
||||||
self._app._preview.set_media(url, info)
|
self._app._preview.set_media(url, info)
|
||||||
# Pre-set the expected cache path on the thumbnail immediately.
|
|
||||||
# The parallel httpx download will also set it via on_image_done
|
|
||||||
# when it completes, but this makes it available for drag-to-copy
|
|
||||||
# from the moment streaming starts.
|
|
||||||
from ..core.cache import cached_path_for
|
|
||||||
idx = self._app._grid.selected_index
|
|
||||||
if 0 <= idx < len(self._app._grid._thumbs):
|
|
||||||
self._app._grid._thumbs[idx]._cached_path = str(cached_path_for(url))
|
|
||||||
self._app._status.showMessage(f"Streaming #{Path(url.split('?')[0]).name}...")
|
self._app._status.showMessage(f"Streaming #{Path(url.split('?')[0]).name}...")
|
||||||
|
|
||||||
def on_download_progress(self, downloaded: int, total: int) -> None:
|
def on_download_progress(self, downloaded: int, total: int) -> None:
|
||||||
@ -238,12 +206,7 @@ class MediaController:
|
|||||||
self._app._grid._thumbs[index].set_prefetch_progress(progress)
|
self._app._grid._thumbs[index].set_prefetch_progress(progress)
|
||||||
|
|
||||||
def prefetch_adjacent(self, index: int) -> None:
|
def prefetch_adjacent(self, index: int) -> None:
|
||||||
"""Prefetch posts around the given index.
|
"""Prefetch posts around the given index."""
|
||||||
|
|
||||||
Bumps a generation counter so any previously running spiral
|
|
||||||
exits at its next iteration instead of continuing to download
|
|
||||||
stale adjacencies.
|
|
||||||
"""
|
|
||||||
total = len(self._app._posts)
|
total = len(self._app._posts)
|
||||||
if total == 0:
|
if total == 0:
|
||||||
return
|
return
|
||||||
@ -251,16 +214,9 @@ class MediaController:
|
|||||||
mode = self._app._db.get_setting("prefetch_mode")
|
mode = self._app._db.get_setting("prefetch_mode")
|
||||||
order = compute_prefetch_order(index, total, cols, mode)
|
order = compute_prefetch_order(index, total, cols, mode)
|
||||||
|
|
||||||
self._prefetch_gen += 1
|
|
||||||
gen = self._prefetch_gen
|
|
||||||
|
|
||||||
async def _prefetch_spiral():
|
async def _prefetch_spiral():
|
||||||
for adj in order:
|
for adj in order:
|
||||||
if self._prefetch_gen != gen:
|
|
||||||
return # superseded by a newer prefetch
|
|
||||||
await self._prefetch_pause.wait()
|
await self._prefetch_pause.wait()
|
||||||
if self._prefetch_gen != gen:
|
|
||||||
return
|
|
||||||
if 0 <= adj < len(self._app._posts) and self._app._posts[adj].file_url:
|
if 0 <= adj < len(self._app._posts) and self._app._posts[adj].file_url:
|
||||||
self._app._signals.prefetch_progress.emit(adj, 0.0)
|
self._app._signals.prefetch_progress.emit(adj, 0.0)
|
||||||
try:
|
try:
|
||||||
@ -277,11 +233,6 @@ class MediaController:
|
|||||||
# -- Cache eviction --
|
# -- Cache eviction --
|
||||||
|
|
||||||
def auto_evict_cache(self) -> None:
|
def auto_evict_cache(self) -> None:
|
||||||
import time
|
|
||||||
now = time.monotonic()
|
|
||||||
if now - self._last_evict_check < 30:
|
|
||||||
return
|
|
||||||
self._last_evict_check = now
|
|
||||||
if not self._app._db.get_setting_bool("auto_evict"):
|
if not self._app._db.get_setting_bool("auto_evict"):
|
||||||
return
|
return
|
||||||
max_mb = self._app._db.get_setting_int("max_cache_mb")
|
max_mb = self._app._db.get_setting_int("max_cache_mb")
|
||||||
@ -294,7 +245,7 @@ class MediaController:
|
|||||||
for fav in self._app._db.get_bookmarks(limit=999999):
|
for fav in self._app._db.get_bookmarks(limit=999999):
|
||||||
if fav.cached_path:
|
if fav.cached_path:
|
||||||
protected.add(fav.cached_path)
|
protected.add(fav.cached_path)
|
||||||
evicted = evict_oldest(max_bytes, protected, current_bytes=current)
|
evicted = evict_oldest(max_bytes, protected)
|
||||||
if evicted:
|
if evicted:
|
||||||
log.info(f"Auto-evicted {evicted} cached files")
|
log.info(f"Auto-evicted {evicted} cached files")
|
||||||
max_thumb_mb = self._app._db.get_setting_int("max_thumb_cache_mb") or 500
|
max_thumb_mb = self._app._db.get_setting_int("max_thumb_cache_mb") or 500
|
||||||
@ -307,16 +258,15 @@ class MediaController:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def image_dimensions(path: str) -> tuple[int, int]:
|
def image_dimensions(path: str) -> tuple[int, int]:
|
||||||
"""Read image width/height from a local file without decoding pixels."""
|
"""Read image width/height from a local file."""
|
||||||
from .media.constants import _is_video
|
from .media.constants import _is_video
|
||||||
if _is_video(path):
|
if _is_video(path):
|
||||||
return 0, 0
|
return 0, 0
|
||||||
try:
|
try:
|
||||||
from PySide6.QtGui import QImageReader
|
from PySide6.QtGui import QPixmap
|
||||||
reader = QImageReader(path)
|
pix = QPixmap(path)
|
||||||
size = reader.size()
|
if not pix.isNull():
|
||||||
if size.isValid():
|
return pix.width(), pix.height()
|
||||||
return size.width(), size.height()
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|||||||
@ -114,7 +114,7 @@ class FitWindowToContent:
|
|||||||
"""Compute the new window rect for the given content aspect using
|
"""Compute the new window rect for the given content aspect using
|
||||||
`state.viewport` and dispatch it to Hyprland (or `setGeometry()`
|
`state.viewport` and dispatch it to Hyprland (or `setGeometry()`
|
||||||
on non-Hyprland). The adapter delegates the rect math + dispatch
|
on non-Hyprland). The adapter delegates the rect math + dispatch
|
||||||
to the helpers in `popout/hyprland.py`.
|
to `popout/hyprland.py`'s helper, which lands in commit 13.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
content_w: int
|
content_w: int
|
||||||
|
|||||||
@ -11,11 +11,11 @@ behind the same `HYPRLAND_INSTANCE_SIGNATURE` env var check the
|
|||||||
legacy code used. Off-Hyprland systems no-op or return None at every
|
legacy code used. Off-Hyprland systems no-op or return None at every
|
||||||
entry point.
|
entry point.
|
||||||
|
|
||||||
The popout adapter calls these helpers directly; there are no
|
The legacy `FullscreenPreview._hyprctl_*` methods become 1-line
|
||||||
`FullscreenPreview._hyprctl_*` shims anymore. Every env-var gate
|
shims that call into this module — see commit 13's changes to
|
||||||
for opt-out (`BOORU_VIEWER_NO_HYPR_RULES`, popout-specific aspect
|
`popout/window.py`. The shims preserve byte-for-byte call-site
|
||||||
lock) is implemented inside these functions so every call site
|
compatibility for the existing window.py code; commit 14's adapter
|
||||||
gets the same behavior.
|
rewrite drops them in favor of direct calls.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
@ -54,7 +54,7 @@ def get_window(window_title: str) -> dict | None:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def resize(window_title: str, w: int, h: int, animate: bool = False) -> None:
|
def resize(window_title: str, w: int, h: int) -> None:
|
||||||
"""Ask Hyprland to resize the popout and lock its aspect ratio.
|
"""Ask Hyprland to resize the popout and lock its aspect ratio.
|
||||||
|
|
||||||
No-op on non-Hyprland systems. Tiled windows skip the resize
|
No-op on non-Hyprland systems. Tiled windows skip the resize
|
||||||
@ -86,12 +86,12 @@ def resize(window_title: str, w: int, h: int, animate: bool = False) -> None:
|
|||||||
if not win.get("floating"):
|
if not win.get("floating"):
|
||||||
# Tiled — don't resize (fights the layout). Optionally set
|
# Tiled — don't resize (fights the layout). Optionally set
|
||||||
# aspect lock and no_anim depending on the env vars.
|
# aspect lock and no_anim depending on the env vars.
|
||||||
if rules_on and not animate:
|
if rules_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||||
if aspect_on:
|
if aspect_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 1")
|
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 1")
|
||||||
else:
|
else:
|
||||||
if rules_on and not animate:
|
if rules_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||||
if aspect_on:
|
if aspect_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
||||||
@ -111,7 +111,6 @@ def resize_and_move(
|
|||||||
x: int,
|
x: int,
|
||||||
y: int,
|
y: int,
|
||||||
win: dict | None = None,
|
win: dict | None = None,
|
||||||
animate: bool = False,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Atomically resize and move the popout via a single hyprctl batch.
|
"""Atomically resize and move the popout via a single hyprctl batch.
|
||||||
|
|
||||||
@ -141,7 +140,7 @@ def resize_and_move(
|
|||||||
if not addr:
|
if not addr:
|
||||||
return
|
return
|
||||||
cmds: list[str] = []
|
cmds: list[str] = []
|
||||||
if rules_on and not animate:
|
if rules_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||||
if aspect_on:
|
if aspect_on:
|
||||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
||||||
@ -211,35 +210,9 @@ def get_monitor_available_rect(monitor_id: int | None = None) -> tuple[int, int,
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def settiled(window_title: str) -> None:
|
|
||||||
"""Ask Hyprland to un-float the popout, restoring it to tiled layout.
|
|
||||||
|
|
||||||
Used on reopen when the popout was tiled at close — the windowrule
|
|
||||||
opens it floating, so we dispatch `settiled` to push it back into
|
|
||||||
the layout.
|
|
||||||
|
|
||||||
Gated by BOORU_VIEWER_NO_HYPR_RULES so ricers with their own rules
|
|
||||||
keep control.
|
|
||||||
"""
|
|
||||||
if not _on_hyprland():
|
|
||||||
return
|
|
||||||
if not hypr_rules_enabled():
|
|
||||||
return
|
|
||||||
win = get_window(window_title)
|
|
||||||
if not win:
|
|
||||||
return
|
|
||||||
addr = win.get("address")
|
|
||||||
if not addr:
|
|
||||||
return
|
|
||||||
if not win.get("floating"):
|
|
||||||
return
|
|
||||||
_dispatch_batch([f"dispatch settiled address:{addr}"])
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"get_window",
|
"get_window",
|
||||||
"get_monitor_available_rect",
|
"get_monitor_available_rect",
|
||||||
"resize",
|
"resize",
|
||||||
"resize_and_move",
|
"resize_and_move",
|
||||||
"settiled",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@ -16,6 +16,12 @@ becomes the forcing function that keeps this module pure.
|
|||||||
The architecture, state diagram, invariant→transition mapping, and
|
The architecture, state diagram, invariant→transition mapping, and
|
||||||
event/effect lists are documented in `docs/POPOUT_ARCHITECTURE.md`.
|
event/effect lists are documented in `docs/POPOUT_ARCHITECTURE.md`.
|
||||||
This module's job is to be the executable form of that document.
|
This module's job is to be the executable form of that document.
|
||||||
|
|
||||||
|
This is the **commit 2 skeleton**: every state, every event type, every
|
||||||
|
effect type, and the `StateMachine` class with all fields initialized.
|
||||||
|
The `dispatch` method routes events to per-event handlers that all
|
||||||
|
currently return empty effect lists. Real transitions land in
|
||||||
|
commits 4-11 of `docs/POPOUT_REFACTOR_PLAN.md`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
@ -417,6 +423,10 @@ class StateMachine:
|
|||||||
The state machine never imports Qt or mpv. It never calls into the
|
The state machine never imports Qt or mpv. It never calls into the
|
||||||
adapter. The communication is one-directional: events in, effects
|
adapter. The communication is one-directional: events in, effects
|
||||||
out.
|
out.
|
||||||
|
|
||||||
|
**This is the commit 2 skeleton**: all state fields are initialized,
|
||||||
|
`dispatch` is wired but every transition handler is a stub that
|
||||||
|
returns an empty effect list. Real transitions land in commits 4-11.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
@ -501,7 +511,14 @@ class StateMachine:
|
|||||||
# and reads back the returned effects + the post-dispatch state.
|
# and reads back the returned effects + the post-dispatch state.
|
||||||
|
|
||||||
def dispatch(self, event: Event) -> list[Effect]:
|
def dispatch(self, event: Event) -> list[Effect]:
|
||||||
"""Process one event and return the effect list."""
|
"""Process one event and return the effect list.
|
||||||
|
|
||||||
|
**Skeleton (commit 2):** every event handler currently returns
|
||||||
|
an empty effect list. Real transitions land in commits 4-11.
|
||||||
|
Tests written in commit 3 will document what each transition
|
||||||
|
is supposed to do; they fail at this point and progressively
|
||||||
|
pass as the transitions land.
|
||||||
|
"""
|
||||||
# Closing is terminal — drop everything once we're done.
|
# Closing is terminal — drop everything once we're done.
|
||||||
if self.state == State.CLOSING:
|
if self.state == State.CLOSING:
|
||||||
return []
|
return []
|
||||||
@ -560,13 +577,13 @@ class StateMachine:
|
|||||||
case CloseRequested():
|
case CloseRequested():
|
||||||
return self._on_close_requested(event)
|
return self._on_close_requested(event)
|
||||||
case _:
|
case _:
|
||||||
# Unknown event type — defensive fall-through. The
|
# Unknown event type. Returning [] keeps the skeleton
|
||||||
# legality check above is the real gate; in release
|
# safe; the illegal-transition handler in commit 11
|
||||||
# mode illegal events log and drop, strict mode raises.
|
# will replace this with the env-gated raise.
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Per-event handlers
|
# Per-event stub handlers (commit 2 — all return [])
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
def _on_open(self, event: Open) -> list[Effect]:
|
def _on_open(self, event: Open) -> list[Effect]:
|
||||||
@ -577,7 +594,8 @@ class StateMachine:
|
|||||||
on the state machine instance for the first ContentArrived
|
on the state machine instance for the first ContentArrived
|
||||||
handler to consume. After Open the machine is still in
|
handler to consume. After Open the machine is still in
|
||||||
AwaitingContent — the actual viewport seeding from saved_geo
|
AwaitingContent — the actual viewport seeding from saved_geo
|
||||||
happens inside the first ContentArrived.
|
happens inside the first ContentArrived (commit 8 wires the
|
||||||
|
actual viewport math; this commit just stashes the inputs).
|
||||||
|
|
||||||
No effects: the popout window is already constructed and
|
No effects: the popout window is already constructed and
|
||||||
showing. The first content load triggers the first fit.
|
showing. The first content load triggers the first fit.
|
||||||
@ -592,11 +610,12 @@ class StateMachine:
|
|||||||
|
|
||||||
Snapshot the content into `current_*` fields regardless of
|
Snapshot the content into `current_*` fields regardless of
|
||||||
kind so the rest of the state machine can read them. Then
|
kind so the rest of the state machine can read them. Then
|
||||||
transition to LoadingVideo (video) or DisplayingImage (image)
|
transition to LoadingVideo (video) or DisplayingImage (image,
|
||||||
and emit the appropriate load + fit effects.
|
commit 10) and emit the appropriate load + fit effects.
|
||||||
|
|
||||||
The first-content-load one-shot consumes `saved_geo` to seed
|
The first-content-load one-shot consumes `saved_geo` to seed
|
||||||
the viewport before the first fit. Every ContentArrived flips
|
the viewport before the first fit (commit 8 wires the actual
|
||||||
|
seeding). After this commit, every ContentArrived flips
|
||||||
`is_first_content_load` to False — the saved_geo path runs at
|
`is_first_content_load` to False — the saved_geo path runs at
|
||||||
most once per popout open.
|
most once per popout open.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from PySide6.QtCore import Qt, QEventLoop, QRect, QTimer, Signal
|
from PySide6.QtCore import Qt, QRect, QTimer, Signal
|
||||||
from PySide6.QtGui import QPixmap
|
from PySide6.QtGui import QPixmap
|
||||||
from PySide6.QtWidgets import (
|
from PySide6.QtWidgets import (
|
||||||
QHBoxLayout, QInputDialog, QLabel, QMainWindow, QMenu, QPushButton,
|
QHBoxLayout, QInputDialog, QLabel, QMainWindow, QMenu, QPushButton,
|
||||||
@ -68,8 +68,9 @@ from .viewport import Viewport, _DRIFT_TOLERANCE, anchor_point
|
|||||||
# the dispatch trace to the Ctrl+L log panel — useful but invisible
|
# the dispatch trace to the Ctrl+L log panel — useful but invisible
|
||||||
# from the shell. We additionally attach a stderr StreamHandler to
|
# from the shell. We additionally attach a stderr StreamHandler to
|
||||||
# the adapter logger so `python -m booru_viewer.main_gui 2>&1 |
|
# the adapter logger so `python -m booru_viewer.main_gui 2>&1 |
|
||||||
# grep POPOUT_FSM` works from the terminal. The handler is tagged
|
# grep POPOUT_FSM` works during the commit-14a verification gate.
|
||||||
# with a sentinel attribute so re-imports don't stack duplicates.
|
# The handler is tagged with a sentinel attribute so re-imports
|
||||||
|
# don't stack duplicates.
|
||||||
import sys as _sys
|
import sys as _sys
|
||||||
_fsm_log = logging.getLogger("booru.popout.adapter")
|
_fsm_log = logging.getLogger("booru.popout.adapter")
|
||||||
_fsm_log.setLevel(logging.DEBUG)
|
_fsm_log.setLevel(logging.DEBUG)
|
||||||
@ -138,27 +139,30 @@ class FullscreenPreview(QMainWindow):
|
|||||||
self._stack = QStackedWidget()
|
self._stack = QStackedWidget()
|
||||||
central.layout().addWidget(self._stack)
|
central.layout().addWidget(self._stack)
|
||||||
|
|
||||||
self._vol_scroll_accum = 0
|
|
||||||
|
|
||||||
self._viewer = ImageViewer()
|
self._viewer = ImageViewer()
|
||||||
self._viewer.close_requested.connect(self.close)
|
self._viewer.close_requested.connect(self.close)
|
||||||
self._stack.addWidget(self._viewer)
|
self._stack.addWidget(self._viewer)
|
||||||
|
|
||||||
self._video = VideoPlayer()
|
self._video = VideoPlayer()
|
||||||
# Two legacy VideoPlayer forwarding connections were removed
|
# Note: two legacy VideoPlayer signal connections removed in
|
||||||
# during the state machine extraction — don't reintroduce:
|
# commits 14b and 16:
|
||||||
#
|
#
|
||||||
# - `self._video.play_next.connect(self.play_next_requested)`:
|
# - `self._video.play_next.connect(self.play_next_requested)`
|
||||||
# the EmitPlayNextRequested effect emits play_next_requested
|
# (removed in 14b): the EmitPlayNextRequested effect now
|
||||||
# via the state machine dispatch path. Keeping the forward
|
# emits play_next_requested via the state machine dispatch
|
||||||
# would double-emit on every video EOF in Loop=Next mode.
|
# path. Keeping the forwarding would double-emit the signal
|
||||||
|
# and cause main_window to navigate twice on every video
|
||||||
|
# EOF in Loop=Next mode.
|
||||||
#
|
#
|
||||||
# - `self._video.video_size.connect(self._on_video_size)`:
|
# - `self._video.video_size.connect(self._on_video_size)`
|
||||||
# the dispatch path's VideoSizeKnown handler produces
|
# (removed in 16): the dispatch path's VideoSizeKnown
|
||||||
# FitWindowToContent which the apply path delegates to
|
# handler emits FitWindowToContent which the apply path
|
||||||
# _fit_to_content. The direct forwarding was a parallel
|
# delegates to _fit_to_content. The legacy direct call to
|
||||||
# duplicate that same-rect-skip in _fit_to_content masked
|
# _on_video_size → _fit_to_content was a parallel duplicate
|
||||||
# but that muddied the dispatch trace.
|
# that the same-rect skip in _fit_to_content made harmless,
|
||||||
|
# but it muddied the trace. The dispatch lambda below is
|
||||||
|
# wired in the same __init__ block (post state machine
|
||||||
|
# construction) and is now the sole path.
|
||||||
self._stack.addWidget(self._video)
|
self._stack.addWidget(self._video)
|
||||||
|
|
||||||
self.setCentralWidget(central)
|
self.setCentralWidget(central)
|
||||||
@ -281,9 +285,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
self._stack.setMouseTracking(True)
|
self._stack.setMouseTracking(True)
|
||||||
|
|
||||||
from PySide6.QtWidgets import QApplication
|
from PySide6.QtWidgets import QApplication
|
||||||
app = QApplication.instance()
|
QApplication.instance().installEventFilter(self)
|
||||||
if app is not None:
|
|
||||||
app.installEventFilter(self)
|
|
||||||
# Pick target monitor
|
# Pick target monitor
|
||||||
target_screen = None
|
target_screen = None
|
||||||
if monitor and monitor != "Same as app":
|
if monitor and monitor != "Same as app":
|
||||||
@ -329,31 +331,13 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# Qt fallback path) skip viewport updates triggered by our own
|
# Qt fallback path) skip viewport updates triggered by our own
|
||||||
# programmatic geometry changes.
|
# programmatic geometry changes.
|
||||||
self._applying_dispatch: bool = False
|
self._applying_dispatch: bool = False
|
||||||
# Stashed content dims from the tiled early-return in
|
|
||||||
# _fit_to_content. When the user un-tiles the window, resizeEvent
|
|
||||||
# fires — the debounce timer re-runs _fit_to_content with these
|
|
||||||
# dims so the floating window gets the correct aspect ratio.
|
|
||||||
self._tiled_pending_content: tuple[int, int] | None = None
|
|
||||||
self._untile_refit_timer = QTimer(self)
|
|
||||||
self._untile_refit_timer.setSingleShot(True)
|
|
||||||
self._untile_refit_timer.setInterval(50)
|
|
||||||
self._untile_refit_timer.timeout.connect(self._check_untile_refit)
|
|
||||||
# Last known windowed geometry — captured on entering fullscreen so
|
# Last known windowed geometry — captured on entering fullscreen so
|
||||||
# F11 → windowed can land back on the same spot. Seeded from saved
|
# F11 → windowed can land back on the same spot. Seeded from saved
|
||||||
# geometry when the popout opens windowed, so even an immediate
|
# geometry when the popout opens windowed, so even an immediate
|
||||||
# F11 → fullscreen → F11 has a sensible target.
|
# F11 → fullscreen → F11 has a sensible target.
|
||||||
self._windowed_geometry = None
|
self._windowed_geometry = None
|
||||||
# Restore saved state or start fullscreen
|
# Restore saved state or start fullscreen
|
||||||
if FullscreenPreview._saved_tiled and not FullscreenPreview._saved_fullscreen:
|
if FullscreenPreview._saved_geometry and not FullscreenPreview._saved_fullscreen:
|
||||||
# Was tiled at last close — let Hyprland's layout place it,
|
|
||||||
# then dispatch `settiled` to override the windowrule's float.
|
|
||||||
# Saved geometry is meaningless for a tiled window, so skip
|
|
||||||
# setGeometry entirely.
|
|
||||||
self.show()
|
|
||||||
QTimer.singleShot(
|
|
||||||
50, lambda: hyprland.settiled(self.windowTitle())
|
|
||||||
)
|
|
||||||
elif FullscreenPreview._saved_geometry and not FullscreenPreview._saved_fullscreen:
|
|
||||||
self.setGeometry(FullscreenPreview._saved_geometry)
|
self.setGeometry(FullscreenPreview._saved_geometry)
|
||||||
self._pending_position_restore = (
|
self._pending_position_restore = (
|
||||||
FullscreenPreview._saved_geometry.x(),
|
FullscreenPreview._saved_geometry.x(),
|
||||||
@ -368,15 +352,17 @@ class FullscreenPreview(QMainWindow):
|
|||||||
else:
|
else:
|
||||||
self.showFullScreen()
|
self.showFullScreen()
|
||||||
|
|
||||||
# ---- State machine adapter wiring ----
|
# ---- State machine adapter wiring (commit 14a) ----
|
||||||
# Construct the pure-Python state machine and dispatch the
|
# Construct the pure-Python state machine and dispatch the
|
||||||
# initial Open event with the cross-popout-session class state
|
# initial Open event with the cross-popout-session class state
|
||||||
# the legacy code stashed above. Every Qt event handler, mpv
|
# the legacy code stashed above. The state machine runs in
|
||||||
# signal, and button click below dispatches a state machine
|
# PARALLEL with the legacy imperative code: every Qt event
|
||||||
# event via `_dispatch_and_apply`, which applies the returned
|
# handler / mpv signal / button click below dispatches a state
|
||||||
# effects to widgets. The state machine is the authority for
|
# machine event AND continues to run the existing imperative
|
||||||
# "what to do next"; the imperative helpers below are the
|
# action. The state machine's returned effects are LOGGED at
|
||||||
# implementation the apply path delegates into.
|
# DEBUG, not applied to widgets. The legacy path stays
|
||||||
|
# authoritative through commit 14a; commit 14b switches the
|
||||||
|
# authority to the dispatch path.
|
||||||
#
|
#
|
||||||
# The grid_cols field is used by the keyboard nav handlers
|
# The grid_cols field is used by the keyboard nav handlers
|
||||||
# for the Up/Down ±cols stride.
|
# for the Up/Down ±cols stride.
|
||||||
@ -395,17 +381,20 @@ class FullscreenPreview(QMainWindow):
|
|||||||
monitor=monitor,
|
monitor=monitor,
|
||||||
))
|
))
|
||||||
|
|
||||||
# Wire VideoPlayer's playback_restart Signal to the adapter's
|
# Wire VideoPlayer's playback_restart Signal (added in commit 1)
|
||||||
# dispatch routing. mpv emits playback-restart once after each
|
# to the adapter's dispatch routing. mpv emits playback-restart
|
||||||
# loadfile and once after each completed seek; the adapter
|
# once after each loadfile and once after each completed seek;
|
||||||
# distinguishes by checking the state machine's current state
|
# the adapter distinguishes by checking the state machine's
|
||||||
# at dispatch time.
|
# current state at dispatch time.
|
||||||
self._video.playback_restart.connect(self._on_video_playback_restart)
|
self._video.playback_restart.connect(self._on_video_playback_restart)
|
||||||
# Wire VideoPlayer signals to dispatch+apply via the
|
# Wire VideoPlayer signals to dispatch+apply via the
|
||||||
# _dispatch_and_apply helper. Every lambda below MUST call
|
# _dispatch_and_apply helper. NOTE: every lambda below MUST
|
||||||
# _dispatch_and_apply, not _fsm_dispatch directly — see the
|
# call _dispatch_and_apply, not _fsm_dispatch directly. Calling
|
||||||
# docstring on _dispatch_and_apply for the historical bug that
|
# _fsm_dispatch alone produces effects that never reach
|
||||||
# explains the distinction.
|
# widgets — the bug that landed in commit 14b and broke
|
||||||
|
# video auto-fit (FitWindowToContent never applied) and
|
||||||
|
# Loop=Next play_next (EmitPlayNextRequested never applied)
|
||||||
|
# until the lambdas were fixed in this commit.
|
||||||
self._video.play_next.connect(
|
self._video.play_next.connect(
|
||||||
lambda: self._dispatch_and_apply(VideoEofReached())
|
lambda: self._dispatch_and_apply(VideoEofReached())
|
||||||
)
|
)
|
||||||
@ -454,8 +443,8 @@ class FullscreenPreview(QMainWindow):
|
|||||||
|
|
||||||
Adapter-internal helper. Centralizes the dispatch + log path
|
Adapter-internal helper. Centralizes the dispatch + log path
|
||||||
so every wire-point is one line. Returns the effect list for
|
so every wire-point is one line. Returns the effect list for
|
||||||
callers that want to inspect it; prefer `_dispatch_and_apply`
|
callers that want to inspect it (commit 14a doesn't use the
|
||||||
at wire-points so the apply step can't be forgotten.
|
return value; commit 14b will pattern-match and apply).
|
||||||
|
|
||||||
The hasattr guard handles edge cases where Qt events might
|
The hasattr guard handles edge cases where Qt events might
|
||||||
fire during __init__ (e.g. resizeEvent on the first show())
|
fire during __init__ (e.g. resizeEvent on the first show())
|
||||||
@ -477,10 +466,10 @@ class FullscreenPreview(QMainWindow):
|
|||||||
return effects
|
return effects
|
||||||
|
|
||||||
def _on_video_playback_restart(self) -> None:
|
def _on_video_playback_restart(self) -> None:
|
||||||
"""mpv `playback-restart` event arrived via VideoPlayer's
|
"""mpv `playback-restart` event arrived (via VideoPlayer's
|
||||||
playback_restart Signal. Distinguish VideoStarted (after load)
|
playback_restart Signal added in commit 1). Distinguish
|
||||||
from SeekCompleted (after seek) by the state machine's current
|
VideoStarted (after load) from SeekCompleted (after seek) by
|
||||||
state.
|
the state machine's current state.
|
||||||
|
|
||||||
This is the ONE place the adapter peeks at state to choose an
|
This is the ONE place the adapter peeks at state to choose an
|
||||||
event type — it's a read, not a write, and it's the price of
|
event type — it's a read, not a write, and it's the price of
|
||||||
@ -497,35 +486,42 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# round trip.
|
# round trip.
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Effect application
|
# Commit 14b — effect application
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# The state machine's dispatch returns a list of Effect descriptors
|
# The state machine's dispatch returns a list of Effect descriptors
|
||||||
# describing what the adapter should do. `_apply_effects` is the
|
# describing what the adapter should do. `_apply_effects` is the
|
||||||
# single dispatch point: `_dispatch_and_apply` dispatches then calls
|
# single dispatch point: every wire-point that calls `_fsm_dispatch`
|
||||||
# this. The pattern-match by type is the architectural choke point
|
# follows it with `_apply_effects(effects)`. The pattern-match by
|
||||||
# — a new Effect type in state.py triggers the TypeError branch at
|
# type is the architectural choke point — if a new effect type is
|
||||||
# runtime instead of silently dropping the effect.
|
# added in state.py, the type-check below catches the missing
|
||||||
|
# handler at runtime instead of silently dropping.
|
||||||
#
|
#
|
||||||
# A few apply handlers are intentional no-ops:
|
# Several apply handlers are deliberate no-ops in commit 14b:
|
||||||
#
|
#
|
||||||
# - ApplyMute / ApplyVolume / ApplyLoopMode: the legacy slot
|
# - ApplyMute / ApplyVolume / ApplyLoopMode: the legacy slot
|
||||||
# connections on the popout's VideoPlayer handle the user-facing
|
# connections on the popout's VideoPlayer are still active and
|
||||||
# toggles directly. The state machine tracks these values as the
|
# handle the user-facing toggles directly. The state machine
|
||||||
# source of truth for sync with the embedded preview; pushing
|
# tracks these values for the upcoming SyncFromEmbedded path
|
||||||
# them back here would create a double-write hazard.
|
# (future commit) but doesn't push them to widgets — pushing
|
||||||
|
# would create a sync hazard with the embedded preview's mute
|
||||||
|
# state, which main_window pushes via direct attribute writes.
|
||||||
#
|
#
|
||||||
# - SeekVideoTo: `_ClickSeekSlider.clicked_position → _seek` on the
|
# - SeekVideoTo: the legacy `_ClickSeekSlider.clicked_position →
|
||||||
# VideoPlayer handles both the mpv.seek call and the legacy
|
# VideoPlayer._seek` connection still handles both the mpv.seek
|
||||||
# 500ms pin window. The state machine's SeekingVideo state
|
# call and the legacy 500ms `_seek_pending_until` pin window.
|
||||||
# tracks the seek; the slider rendering and the seek call itself
|
# The state machine's SeekingVideo state tracks the seek for
|
||||||
# live on VideoPlayer.
|
# future authority, but the slider rendering and the seek call
|
||||||
|
# itself stay legacy. Replacing this requires either modifying
|
||||||
|
# VideoPlayer's _poll loop (forbidden by the no-touch rule) or
|
||||||
|
# building a custom poll loop in the adapter.
|
||||||
#
|
#
|
||||||
# Every other effect (LoadImage, LoadVideo, StopMedia,
|
# The other effect types (LoadImage, LoadVideo, StopMedia,
|
||||||
# FitWindowToContent, EnterFullscreen, ExitFullscreen,
|
# FitWindowToContent, EnterFullscreen, ExitFullscreen,
|
||||||
# EmitNavigate, EmitPlayNextRequested, EmitClosed, TogglePlay)
|
# EmitNavigate, EmitPlayNextRequested, EmitClosed, TogglePlay)
|
||||||
# delegates to a private helper in this file. The state machine
|
# delegate to existing private helpers in this file. The state
|
||||||
# is the entry point; the helpers are the implementation.
|
# machine becomes the official entry point for these operations;
|
||||||
|
# the helpers stay in place as the implementation.
|
||||||
|
|
||||||
def _apply_effects(self, effects: list) -> None:
|
def _apply_effects(self, effects: list) -> None:
|
||||||
"""Apply a list of Effect descriptors returned by dispatch.
|
"""Apply a list of Effect descriptors returned by dispatch.
|
||||||
@ -542,19 +538,18 @@ class FullscreenPreview(QMainWindow):
|
|||||||
elif isinstance(e, StopMedia):
|
elif isinstance(e, StopMedia):
|
||||||
self._apply_stop_media()
|
self._apply_stop_media()
|
||||||
elif isinstance(e, ApplyMute):
|
elif isinstance(e, ApplyMute):
|
||||||
# No-op — VideoPlayer's legacy slot owns widget update;
|
# No-op in 14b — legacy slot handles widget update.
|
||||||
# the state machine keeps state.mute as the sync source
|
# State machine tracks state.mute for future authority.
|
||||||
# for the embedded-preview path.
|
|
||||||
pass
|
pass
|
||||||
elif isinstance(e, ApplyVolume):
|
elif isinstance(e, ApplyVolume):
|
||||||
pass # same — widget update handled by VideoPlayer
|
pass # same — no-op in 14b
|
||||||
elif isinstance(e, ApplyLoopMode):
|
elif isinstance(e, ApplyLoopMode):
|
||||||
pass # same — widget update handled by VideoPlayer
|
pass # same — no-op in 14b
|
||||||
elif isinstance(e, SeekVideoTo):
|
elif isinstance(e, SeekVideoTo):
|
||||||
# No-op — `_seek` slot on VideoPlayer handles both
|
# No-op in 14b — legacy `_seek` slot handles both
|
||||||
# mpv.seek and the pin window. The state's SeekingVideo
|
# mpv.seek (now exact) and the pin window. Replacing
|
||||||
# fields exist so the slider's read-path still returns
|
# this requires touching VideoPlayer._poll which is
|
||||||
# the clicked position during the seek.
|
# out of scope.
|
||||||
pass
|
pass
|
||||||
elif isinstance(e, TogglePlay):
|
elif isinstance(e, TogglePlay):
|
||||||
self._video._toggle_play()
|
self._video._toggle_play()
|
||||||
@ -620,7 +615,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
|
|
||||||
_saved_geometry = None # remembers window size/position across opens
|
_saved_geometry = None # remembers window size/position across opens
|
||||||
_saved_fullscreen = False
|
_saved_fullscreen = False
|
||||||
_saved_tiled = False # True if Hyprland had it tiled at last close
|
|
||||||
_current_tags: dict[str, list[str]] = {}
|
_current_tags: dict[str, list[str]] = {}
|
||||||
_current_tag_list: list[str] = []
|
_current_tag_list: list[str] = []
|
||||||
|
|
||||||
@ -628,25 +622,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
self._current_tags = tag_categories
|
self._current_tags = tag_categories
|
||||||
self._current_tag_list = tag_list
|
self._current_tag_list = tag_list
|
||||||
|
|
||||||
def _exec_menu_at_button(self, menu: QMenu, btn: QPushButton):
|
|
||||||
"""Open a menu anchored below a button, blocking until dismissed.
|
|
||||||
|
|
||||||
Uses popup() + QEventLoop instead of exec(pos) because on
|
|
||||||
Hyprland/Wayland the popout window gets moved via hyprctl after
|
|
||||||
Qt maps it, and Qt's window-position tracking stays stale. Using
|
|
||||||
exec(btn.mapToGlobal(...)) resolves to a global point on the
|
|
||||||
wrong monitor, causing the menu to flash there before the
|
|
||||||
compositor corrects it. popup() routes through the same path
|
|
||||||
but with triggered/aboutToHide signals we can block manually.
|
|
||||||
"""
|
|
||||||
result = [None]
|
|
||||||
menu.triggered.connect(lambda a: result.__setitem__(0, a))
|
|
||||||
loop = QEventLoop()
|
|
||||||
menu.aboutToHide.connect(loop.quit)
|
|
||||||
menu.popup(btn.mapToGlobal(btn.rect().bottomLeft()))
|
|
||||||
loop.exec()
|
|
||||||
return result[0]
|
|
||||||
|
|
||||||
def _show_bl_tag_menu(self) -> None:
|
def _show_bl_tag_menu(self) -> None:
|
||||||
menu = QMenu(self)
|
menu = QMenu(self)
|
||||||
if self._current_tags:
|
if self._current_tags:
|
||||||
@ -657,7 +632,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
else:
|
else:
|
||||||
for tag in self._current_tag_list[:30]:
|
for tag in self._current_tag_list[:30]:
|
||||||
menu.addAction(tag)
|
menu.addAction(tag)
|
||||||
action = self._exec_menu_at_button(menu, self._bl_tag_btn)
|
action = menu.exec(self._bl_tag_btn.mapToGlobal(self._bl_tag_btn.rect().bottomLeft()))
|
||||||
if action:
|
if action:
|
||||||
self.blacklist_tag_requested.emit(action.text())
|
self.blacklist_tag_requested.emit(action.text())
|
||||||
|
|
||||||
@ -670,14 +645,14 @@ class FullscreenPreview(QMainWindow):
|
|||||||
self._save_btn.setToolTip("Unsave from library" if saved else "Save to library (S)")
|
self._save_btn.setToolTip("Unsave from library" if saved else "Save to library (S)")
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Public method interface
|
# Public method interface (commit 15)
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# The methods below are the only entry points main_window.py uses
|
# The methods below replace direct underscore access from
|
||||||
# to drive the popout. They wrap the private fields so main_window
|
# main_window.py. They wrap the existing private fields so
|
||||||
# doesn't have to know about VideoPlayer / ImageViewer /
|
# main_window doesn't have to know about VideoPlayer / ImageViewer
|
||||||
# QStackedWidget internals. The private fields stay in place; these
|
# / QStackedWidget internals. The legacy private fields stay in
|
||||||
# are clean public wrappers, not a re-architecture.
|
# place — these are clean public wrappers, not a re-architecture.
|
||||||
|
|
||||||
def is_video_active(self) -> bool:
|
def is_video_active(self) -> bool:
|
||||||
"""True if the popout is currently showing a video (vs image).
|
"""True if the popout is currently showing a video (vs image).
|
||||||
@ -814,9 +789,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
try:
|
try:
|
||||||
self._video._mpv.pause = True
|
self._video._mpv.pause = True
|
||||||
except Exception:
|
except Exception:
|
||||||
# mpv was torn down or is mid-transition between
|
|
||||||
# files; pause is best-effort so a stale instance
|
|
||||||
# rejecting the property write isn't a real failure.
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stop_media(self) -> None:
|
def stop_media(self) -> None:
|
||||||
@ -865,7 +837,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
folder_actions[id(a)] = folder
|
folder_actions[id(a)] = folder
|
||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
new_action = menu.addAction("+ New Folder...")
|
new_action = menu.addAction("+ New Folder...")
|
||||||
action = self._exec_menu_at_button(menu, self._save_btn)
|
action = menu.exec(self._save_btn.mapToGlobal(self._save_btn.rect().bottomLeft()))
|
||||||
if not action:
|
if not action:
|
||||||
return
|
return
|
||||||
if action == unfiled:
|
if action == unfiled:
|
||||||
@ -897,7 +869,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
folder_actions[id(a)] = folder
|
folder_actions[id(a)] = folder
|
||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
new_action = menu.addAction("+ New Folder...")
|
new_action = menu.addAction("+ New Folder...")
|
||||||
action = self._exec_menu_at_button(menu, self._bookmark_btn)
|
action = menu.exec(self._bookmark_btn.mapToGlobal(self._bookmark_btn.rect().bottomLeft()))
|
||||||
if not action:
|
if not action:
|
||||||
return
|
return
|
||||||
if action == unfiled:
|
if action == unfiled:
|
||||||
@ -930,17 +902,10 @@ class FullscreenPreview(QMainWindow):
|
|||||||
bm_menu.addSeparator()
|
bm_menu.addSeparator()
|
||||||
bm_new_action = bm_menu.addAction("+ New Folder...")
|
bm_new_action = bm_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
save_menu = None
|
|
||||||
save_unsorted = None
|
|
||||||
save_new = None
|
|
||||||
save_folder_actions = {}
|
|
||||||
unsave_action = None
|
|
||||||
if self._is_saved:
|
|
||||||
unsave_action = menu.addAction("Unsave from Library")
|
|
||||||
else:
|
|
||||||
save_menu = menu.addMenu("Save to Library")
|
save_menu = menu.addMenu("Save to Library")
|
||||||
save_unsorted = save_menu.addAction("Unfiled")
|
save_unsorted = save_menu.addAction("Unfiled")
|
||||||
save_menu.addSeparator()
|
save_menu.addSeparator()
|
||||||
|
save_folder_actions = {}
|
||||||
if self._folders_callback:
|
if self._folders_callback:
|
||||||
for folder in self._folders_callback():
|
for folder in self._folders_callback():
|
||||||
a = save_menu.addAction(folder)
|
a = save_menu.addAction(folder)
|
||||||
@ -948,9 +913,12 @@ class FullscreenPreview(QMainWindow):
|
|||||||
save_menu.addSeparator()
|
save_menu.addSeparator()
|
||||||
save_new = save_menu.addAction("+ New Folder...")
|
save_new = save_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
|
unsave_action = None
|
||||||
|
if self._is_saved:
|
||||||
|
unsave_action = menu.addAction("Unsave from Library")
|
||||||
|
|
||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
copy_action = menu.addAction("Copy File to Clipboard")
|
copy_action = menu.addAction("Copy File to Clipboard")
|
||||||
copy_url_action = menu.addAction("Copy Image URL")
|
|
||||||
open_action = menu.addAction("Open in Default App")
|
open_action = menu.addAction("Open in Default App")
|
||||||
browser_action = menu.addAction("Open in Browser")
|
browser_action = menu.addAction("Open in Browser")
|
||||||
|
|
||||||
@ -985,27 +953,15 @@ class FullscreenPreview(QMainWindow):
|
|||||||
elif action == unsave_action:
|
elif action == unsave_action:
|
||||||
self.unsave_requested.emit()
|
self.unsave_requested.emit()
|
||||||
elif action == copy_action:
|
elif action == copy_action:
|
||||||
from pathlib import Path as _Path
|
|
||||||
from PySide6.QtCore import QMimeData, QUrl
|
|
||||||
from PySide6.QtWidgets import QApplication
|
from PySide6.QtWidgets import QApplication
|
||||||
from PySide6.QtGui import QPixmap as _QP
|
from PySide6.QtGui import QPixmap as _QP
|
||||||
cp = self._state_machine.current_path
|
pix = self._viewer._pixmap
|
||||||
if cp and cp.startswith(("http://", "https://")):
|
if pix and not pix.isNull():
|
||||||
from ...core.cache import cached_path_for
|
QApplication.clipboard().setPixmap(pix)
|
||||||
cached = cached_path_for(cp)
|
elif self._state.current_path:
|
||||||
cp = str(cached) if cached.exists() else None
|
pix = _QP(self._state.current_path)
|
||||||
if cp and _Path(cp).exists():
|
|
||||||
mime = QMimeData()
|
|
||||||
mime.setUrls([QUrl.fromLocalFile(str(_Path(cp).resolve()))])
|
|
||||||
pix = _QP(cp)
|
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
mime.setImageData(pix.toImage())
|
QApplication.clipboard().setPixmap(pix)
|
||||||
QApplication.clipboard().setMimeData(mime)
|
|
||||||
elif action == copy_url_action:
|
|
||||||
from PySide6.QtWidgets import QApplication
|
|
||||||
url = self._state_machine.current_path or ""
|
|
||||||
if url:
|
|
||||||
QApplication.clipboard().setText(url)
|
|
||||||
elif action == open_action:
|
elif action == open_action:
|
||||||
self.open_in_default.emit()
|
self.open_in_default.emit()
|
||||||
elif action == browser_action:
|
elif action == browser_action:
|
||||||
@ -1054,9 +1010,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
from ...core.cache import _referer_for
|
from ...core.cache import _referer_for
|
||||||
referer = _referer_for(urlparse(path))
|
referer = _referer_for(urlparse(path))
|
||||||
except Exception:
|
except Exception:
|
||||||
_fsm_log.debug(
|
pass
|
||||||
"referer derivation failed for %s", path, exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Dispatch + apply. The state machine produces:
|
# Dispatch + apply. The state machine produces:
|
||||||
# - LoadVideo or LoadImage (loads the media)
|
# - LoadVideo or LoadImage (loads the media)
|
||||||
@ -1323,10 +1277,8 @@ class FullscreenPreview(QMainWindow):
|
|||||||
else:
|
else:
|
||||||
floating = None
|
floating = None
|
||||||
if floating is False:
|
if floating is False:
|
||||||
hyprland.resize(self.windowTitle(), 0, 0, animate=self._first_fit_pending) # tiled: just set keep_aspect_ratio
|
hyprland.resize(self.windowTitle(), 0, 0) # tiled: just set keep_aspect_ratio
|
||||||
self._tiled_pending_content = (content_w, content_h)
|
|
||||||
return
|
return
|
||||||
self._tiled_pending_content = None
|
|
||||||
aspect = content_w / content_h
|
aspect = content_w / content_h
|
||||||
screen = self.screen()
|
screen = self.screen()
|
||||||
if screen is None:
|
if screen is None:
|
||||||
@ -1371,10 +1323,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# Hyprland: hyprctl is the sole authority. Calling self.resize()
|
# Hyprland: hyprctl is the sole authority. Calling self.resize()
|
||||||
# here would race with the batch below and produce visible flashing
|
# here would race with the batch below and produce visible flashing
|
||||||
# when the window also has to move.
|
# when the window also has to move.
|
||||||
hyprland.resize_and_move(
|
hyprland.resize_and_move(self.windowTitle(), w, h, x, y, win=win)
|
||||||
self.windowTitle(), w, h, x, y, win=win,
|
|
||||||
animate=self._first_fit_pending,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# Non-Hyprland fallback: Qt drives geometry directly. Use
|
# Non-Hyprland fallback: Qt drives geometry directly. Use
|
||||||
# setGeometry with the computed top-left rather than resize()
|
# setGeometry with the computed top-left rather than resize()
|
||||||
@ -1394,18 +1343,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
self._pending_position_restore = None
|
self._pending_position_restore = None
|
||||||
self._pending_size = None
|
self._pending_size = None
|
||||||
|
|
||||||
def _check_untile_refit(self) -> None:
|
|
||||||
"""Debounced callback: re-run fit if we left tiled under new content."""
|
|
||||||
if self._tiled_pending_content is not None:
|
|
||||||
cw, ch = self._tiled_pending_content
|
|
||||||
self._fit_to_content(cw, ch)
|
|
||||||
# Reset image zoom/offset so the image fits the new window
|
|
||||||
# geometry cleanly — the viewer's state is stale from the
|
|
||||||
# tiled layout.
|
|
||||||
if self._stack.currentIndex() == 0:
|
|
||||||
self._viewer._fit_to_view()
|
|
||||||
self._viewer.update()
|
|
||||||
|
|
||||||
def _show_overlay(self) -> None:
|
def _show_overlay(self) -> None:
|
||||||
"""Show toolbar and video controls, restart auto-hide timer."""
|
"""Show toolbar and video controls, restart auto-hide timer."""
|
||||||
if not self._ui_visible:
|
if not self._ui_visible:
|
||||||
@ -1477,11 +1414,11 @@ class FullscreenPreview(QMainWindow):
|
|||||||
return True
|
return True
|
||||||
elif key == Qt.Key.Key_Period and self._stack.currentIndex() == 1:
|
elif key == Qt.Key.Key_Period and self._stack.currentIndex() == 1:
|
||||||
# +/- keys are seek-relative, NOT slider-pin seeks. The
|
# +/- keys are seek-relative, NOT slider-pin seeks. The
|
||||||
# state machine's SeekRequested models slider-driven
|
# state machine's SeekRequested is for slider-driven
|
||||||
# seeks (target_ms known up front); relative seeks go
|
# seeks. The +/- keys go straight to mpv via the
|
||||||
# straight to mpv. If we ever want the dispatch path to
|
# legacy path; the dispatch path doesn't see them in
|
||||||
# own them, compute target_ms from current position and
|
# 14a (commit 14b will route them through SeekRequested
|
||||||
# route through SeekRequested.
|
# with a target_ms computed from current position).
|
||||||
self._video._seek_relative(1800)
|
self._video._seek_relative(1800)
|
||||||
return True
|
return True
|
||||||
elif key == Qt.Key.Key_Comma and self._stack.currentIndex() == 1:
|
elif key == Qt.Key.Key_Comma and self._stack.currentIndex() == 1:
|
||||||
@ -1498,11 +1435,13 @@ class FullscreenPreview(QMainWindow):
|
|||||||
return True
|
return True
|
||||||
# Vertical wheel adjusts volume on the video stack only
|
# Vertical wheel adjusts volume on the video stack only
|
||||||
if self._stack.currentIndex() == 1:
|
if self._stack.currentIndex() == 1:
|
||||||
self._vol_scroll_accum += event.angleDelta().y()
|
delta = event.angleDelta().y()
|
||||||
steps = self._vol_scroll_accum // 120
|
if delta:
|
||||||
if steps:
|
vol = max(0, min(100, self._video.volume + (5 if delta > 0 else -5)))
|
||||||
self._vol_scroll_accum -= steps * 120
|
# Dispatch VolumeSet so state.volume tracks. The
|
||||||
vol = max(0, min(100, self._video.volume + 5 * steps))
|
# actual mpv.volume write still happens via the
|
||||||
|
# legacy assignment below — ApplyVolume is a no-op
|
||||||
|
# in 14b (see _apply_effects docstring).
|
||||||
self._dispatch_and_apply(VolumeSet(value=vol))
|
self._dispatch_and_apply(VolumeSet(value=vol))
|
||||||
self._video.volume = vol
|
self._video.volume = vol
|
||||||
self._show_overlay()
|
self._show_overlay()
|
||||||
@ -1512,7 +1451,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
cursor_pos = self.mapFromGlobal(event.globalPosition().toPoint() if hasattr(event, 'globalPosition') else event.globalPos())
|
cursor_pos = self.mapFromGlobal(event.globalPosition().toPoint() if hasattr(event, 'globalPosition') else event.globalPos())
|
||||||
y = cursor_pos.y()
|
y = cursor_pos.y()
|
||||||
h = self.height()
|
h = self.height()
|
||||||
zone = max(60, h // 10) # ~10% of window height, floor 60px
|
zone = 40 # px from top/bottom edge to trigger
|
||||||
if y < zone:
|
if y < zone:
|
||||||
self._toolbar.show()
|
self._toolbar.show()
|
||||||
self._hide_timer.start()
|
self._hide_timer.start()
|
||||||
@ -1614,9 +1553,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
if vp and vp.get('w') and vp.get('h'):
|
if vp and vp.get('w') and vp.get('h'):
|
||||||
content_w, content_h = vp['w'], vp['h']
|
content_w, content_h = vp['w'], vp['h']
|
||||||
except Exception:
|
except Exception:
|
||||||
# mpv is mid-shutdown or between files; leave
|
|
||||||
# content_w/h at 0 so the caller falls back to the
|
|
||||||
# saved viewport rather than a bogus fit rect.
|
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
pix = self._viewer._pixmap
|
pix = self._viewer._pixmap
|
||||||
@ -1637,11 +1573,8 @@ class FullscreenPreview(QMainWindow):
|
|||||||
def resizeEvent(self, event) -> None:
|
def resizeEvent(self, event) -> None:
|
||||||
super().resizeEvent(event)
|
super().resizeEvent(event)
|
||||||
# Position floating overlays
|
# Position floating overlays
|
||||||
central = self.centralWidget()
|
w = self.centralWidget().width()
|
||||||
if central is None:
|
h = self.centralWidget().height()
|
||||||
return
|
|
||||||
w = central.width()
|
|
||||||
h = central.height()
|
|
||||||
tb_h = self._toolbar.sizeHint().height()
|
tb_h = self._toolbar.sizeHint().height()
|
||||||
self._toolbar.setGeometry(0, 0, w, tb_h)
|
self._toolbar.setGeometry(0, 0, w, tb_h)
|
||||||
ctrl_h = self._video._controls_bar.sizeHint().height()
|
ctrl_h = self._video._controls_bar.sizeHint().height()
|
||||||
@ -1678,8 +1611,6 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# position source on Wayland).
|
# position source on Wayland).
|
||||||
import os
|
import os
|
||||||
if os.environ.get("HYPRLAND_INSTANCE_SIGNATURE"):
|
if os.environ.get("HYPRLAND_INSTANCE_SIGNATURE"):
|
||||||
if self._tiled_pending_content is not None:
|
|
||||||
self._untile_refit_timer.start()
|
|
||||||
return
|
return
|
||||||
if self._applying_dispatch or self.isFullScreen():
|
if self._applying_dispatch or self.isFullScreen():
|
||||||
return
|
return
|
||||||
@ -1755,13 +1686,9 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# Geometry is adapter-side concern, not state machine concern,
|
# Geometry is adapter-side concern, not state machine concern,
|
||||||
# so the state machine doesn't see it.
|
# so the state machine doesn't see it.
|
||||||
FullscreenPreview._saved_fullscreen = self.isFullScreen()
|
FullscreenPreview._saved_fullscreen = self.isFullScreen()
|
||||||
FullscreenPreview._saved_tiled = False
|
|
||||||
if not self.isFullScreen():
|
if not self.isFullScreen():
|
||||||
# On Hyprland, Qt doesn't know the real position — ask the WM
|
# On Hyprland, Qt doesn't know the real position — ask the WM
|
||||||
win = hyprland.get_window(self.windowTitle())
|
win = hyprland.get_window(self.windowTitle())
|
||||||
if win and win.get("floating") is False:
|
|
||||||
# Tiled: reopen will re-tile instead of restoring geometry.
|
|
||||||
FullscreenPreview._saved_tiled = True
|
|
||||||
if win and win.get("at") and win.get("size"):
|
if win and win.get("at") and win.get("size"):
|
||||||
from PySide6.QtCore import QRect
|
from PySide6.QtCore import QRect
|
||||||
x, y = win["at"]
|
x, y = win["at"]
|
||||||
@ -1769,9 +1696,7 @@ class FullscreenPreview(QMainWindow):
|
|||||||
FullscreenPreview._saved_geometry = QRect(x, y, w, h)
|
FullscreenPreview._saved_geometry = QRect(x, y, w, h)
|
||||||
else:
|
else:
|
||||||
FullscreenPreview._saved_geometry = self.frameGeometry()
|
FullscreenPreview._saved_geometry = self.frameGeometry()
|
||||||
app = QApplication.instance()
|
QApplication.instance().removeEventFilter(self)
|
||||||
if app is not None:
|
|
||||||
app.removeEventFilter(self)
|
|
||||||
# Snapshot video position BEFORE StopMedia destroys it.
|
# Snapshot video position BEFORE StopMedia destroys it.
|
||||||
# _on_fullscreen_closed reads this via get_video_state() to
|
# _on_fullscreen_closed reads this via get_video_state() to
|
||||||
# seek the embedded preview to the same position.
|
# seek the embedded preview to the same position.
|
||||||
@ -1785,16 +1710,4 @@ class FullscreenPreview(QMainWindow):
|
|||||||
# EmitClosed emits self.closed which triggers main_window's
|
# EmitClosed emits self.closed which triggers main_window's
|
||||||
# _on_fullscreen_closed handler.
|
# _on_fullscreen_closed handler.
|
||||||
self._dispatch_and_apply(CloseRequested())
|
self._dispatch_and_apply(CloseRequested())
|
||||||
# Tear down the popout's mpv + GL render context explicitly.
|
|
||||||
# FullscreenPreview has no WA_DeleteOnClose and Qt's C++ dtor
|
|
||||||
# doesn't reliably call Python-side destroy() overrides once
|
|
||||||
# popout_controller drops its reference, so without this the
|
|
||||||
# popout's separate mpv instance + NVDEC surface pool leak
|
|
||||||
# until the next full Python GC cycle.
|
|
||||||
try:
|
|
||||||
self._video._gl_widget.cleanup()
|
|
||||||
except Exception:
|
|
||||||
# Close path — a cleanup failure can't be recovered from
|
|
||||||
# here. Swallowing beats letting Qt abort mid-teardown.
|
|
||||||
pass
|
|
||||||
super().closeEvent(event)
|
super().closeEvent(event)
|
||||||
|
|||||||
@ -76,21 +76,17 @@ class PopoutController:
|
|||||||
from .popout.window import FullscreenPreview
|
from .popout.window import FullscreenPreview
|
||||||
saved_geo = self._app._db.get_setting("slideshow_geometry")
|
saved_geo = self._app._db.get_setting("slideshow_geometry")
|
||||||
saved_fs = self._app._db.get_setting_bool("slideshow_fullscreen")
|
saved_fs = self._app._db.get_setting_bool("slideshow_fullscreen")
|
||||||
saved_tiled = self._app._db.get_setting_bool("slideshow_tiled")
|
|
||||||
if saved_geo:
|
if saved_geo:
|
||||||
parts = saved_geo.split(",")
|
parts = saved_geo.split(",")
|
||||||
if len(parts) == 4:
|
if len(parts) == 4:
|
||||||
from PySide6.QtCore import QRect
|
from PySide6.QtCore import QRect
|
||||||
FullscreenPreview._saved_geometry = QRect(*[int(p) for p in parts])
|
FullscreenPreview._saved_geometry = QRect(*[int(p) for p in parts])
|
||||||
FullscreenPreview._saved_fullscreen = saved_fs
|
FullscreenPreview._saved_fullscreen = saved_fs
|
||||||
FullscreenPreview._saved_tiled = saved_tiled
|
|
||||||
else:
|
else:
|
||||||
FullscreenPreview._saved_geometry = None
|
FullscreenPreview._saved_geometry = None
|
||||||
FullscreenPreview._saved_fullscreen = True
|
FullscreenPreview._saved_fullscreen = True
|
||||||
FullscreenPreview._saved_tiled = False
|
|
||||||
else:
|
else:
|
||||||
FullscreenPreview._saved_fullscreen = True
|
FullscreenPreview._saved_fullscreen = True
|
||||||
FullscreenPreview._saved_tiled = saved_tiled
|
|
||||||
cols = self._app._grid._flow.columns
|
cols = self._app._grid._flow.columns
|
||||||
show_actions = self._app._stack.currentIndex() != 2
|
show_actions = self._app._stack.currentIndex() != 2
|
||||||
monitor = self._app._db.get_setting("slideshow_monitor")
|
monitor = self._app._db.get_setting("slideshow_monitor")
|
||||||
@ -139,9 +135,7 @@ class PopoutController:
|
|||||||
from .popout.window import FullscreenPreview
|
from .popout.window import FullscreenPreview
|
||||||
fs = FullscreenPreview._saved_fullscreen
|
fs = FullscreenPreview._saved_fullscreen
|
||||||
geo = FullscreenPreview._saved_geometry
|
geo = FullscreenPreview._saved_geometry
|
||||||
tiled = FullscreenPreview._saved_tiled
|
|
||||||
self._app._db.set_setting("slideshow_fullscreen", "1" if fs else "0")
|
self._app._db.set_setting("slideshow_fullscreen", "1" if fs else "0")
|
||||||
self._app._db.set_setting("slideshow_tiled", "1" if tiled else "0")
|
|
||||||
if geo:
|
if geo:
|
||||||
self._app._db.set_setting("slideshow_geometry", f"{geo.x()},{geo.y()},{geo.width()},{geo.height()}")
|
self._app._db.set_setting("slideshow_geometry", f"{geo.x()},{geo.y()},{geo.width()},{geo.height()}")
|
||||||
self._app._preview.show()
|
self._app._preview.show()
|
||||||
|
|||||||
@ -21,7 +21,11 @@ def is_batch_message(msg: str) -> bool:
|
|||||||
return "/" in msg and any(c.isdigit() for c in msg.split("/")[0][-2:])
|
return "/" in msg and any(c.isdigit() for c in msg.split("/")[0][-2:])
|
||||||
|
|
||||||
def is_in_library(path: Path, saved_root: Path) -> bool:
|
def is_in_library(path: Path, saved_root: Path) -> bool:
|
||||||
|
"""Check if path is inside the library root."""
|
||||||
|
try:
|
||||||
return path.is_relative_to(saved_root)
|
return path.is_relative_to(saved_root)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class PostActionsController:
|
class PostActionsController:
|
||||||
@ -189,12 +193,9 @@ class PostActionsController:
|
|||||||
if fav.post_id == post.id and i < len(bm_grid._thumbs):
|
if fav.post_id == post.id and i < len(bm_grid._thumbs):
|
||||||
bm_grid._thumbs[i].set_saved_locally(False)
|
bm_grid._thumbs[i].set_saved_locally(False)
|
||||||
break
|
break
|
||||||
# Refresh the active tab's grid so the unsaved post disappears
|
# Refresh library tab if visible
|
||||||
# from library or loses its saved dot on bookmarks.
|
|
||||||
if self._app._stack.currentIndex() == 2:
|
if self._app._stack.currentIndex() == 2:
|
||||||
self._app._library_view.refresh()
|
self._app._library_view.refresh()
|
||||||
elif self._app._stack.currentIndex() == 1:
|
|
||||||
self._app._bookmarks_view.refresh()
|
|
||||||
else:
|
else:
|
||||||
self._app._status.showMessage(f"#{post.id} not in library")
|
self._app._status.showMessage(f"#{post.id} not in library")
|
||||||
self._app._popout_ctrl.update_state()
|
self._app._popout_ctrl.update_state()
|
||||||
@ -243,7 +244,6 @@ class PostActionsController:
|
|||||||
|
|
||||||
if self._app._db.is_bookmarked(site_id, post.id):
|
if self._app._db.is_bookmarked(site_id, post.id):
|
||||||
self._app._db.remove_bookmark(site_id, post.id)
|
self._app._db.remove_bookmark(site_id, post.id)
|
||||||
self._app._search_ctrl.invalidate_lookup_caches()
|
|
||||||
self._app._status.showMessage(f"Unbookmarked #{post.id}")
|
self._app._status.showMessage(f"Unbookmarked #{post.id}")
|
||||||
thumbs = self._app._grid._thumbs
|
thumbs = self._app._grid._thumbs
|
||||||
if 0 <= index < len(thumbs):
|
if 0 <= index < len(thumbs):
|
||||||
@ -538,7 +538,6 @@ class PostActionsController:
|
|||||||
|
|
||||||
def on_bookmark_done(self, index: int, msg: str) -> None:
|
def on_bookmark_done(self, index: int, msg: str) -> None:
|
||||||
self._app._status.showMessage(f"{len(self._app._posts)} results — {msg}")
|
self._app._status.showMessage(f"{len(self._app._posts)} results — {msg}")
|
||||||
self._app._search_ctrl.invalidate_lookup_caches()
|
|
||||||
# Detect batch operations (e.g. "Saved 3/10 to Unfiled") -- skip heavy updates
|
# Detect batch operations (e.g. "Saved 3/10 to Unfiled") -- skip heavy updates
|
||||||
is_batch = is_batch_message(msg)
|
is_batch = is_batch_message(msg)
|
||||||
thumbs = self._app._grid._thumbs
|
thumbs = self._app._grid._thumbs
|
||||||
|
|||||||
@ -51,7 +51,6 @@ class ImagePreview(QWidget):
|
|||||||
self._is_bookmarked = False # tracks bookmark state for the button submenu
|
self._is_bookmarked = False # tracks bookmark state for the button submenu
|
||||||
self._current_tags: dict[str, list[str]] = {}
|
self._current_tags: dict[str, list[str]] = {}
|
||||||
self._current_tag_list: list[str] = []
|
self._current_tag_list: list[str] = []
|
||||||
self._vol_scroll_accum = 0
|
|
||||||
|
|
||||||
layout = QVBoxLayout(self)
|
layout = QVBoxLayout(self)
|
||||||
layout.setContentsMargins(0, 0, 0, 0)
|
layout.setContentsMargins(0, 0, 0, 0)
|
||||||
@ -197,7 +196,7 @@ class ImagePreview(QWidget):
|
|||||||
self.bookmark_to_folder.emit(folder_actions[id(action)])
|
self.bookmark_to_folder.emit(folder_actions[id(action)])
|
||||||
|
|
||||||
def _on_save_clicked(self) -> None:
|
def _on_save_clicked(self) -> None:
|
||||||
if self._is_saved:
|
if self._save_btn.text() == "Unsave":
|
||||||
self.unsave_requested.emit()
|
self.unsave_requested.emit()
|
||||||
return
|
return
|
||||||
menu = QMenu(self)
|
menu = QMenu(self)
|
||||||
@ -315,17 +314,10 @@ class ImagePreview(QWidget):
|
|||||||
bm_menu.addSeparator()
|
bm_menu.addSeparator()
|
||||||
bm_new_action = bm_menu.addAction("+ New Folder...")
|
bm_new_action = bm_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
save_menu = None
|
|
||||||
save_unsorted = None
|
|
||||||
save_new = None
|
|
||||||
save_folder_actions = {}
|
|
||||||
unsave_action = None
|
|
||||||
if self._is_saved:
|
|
||||||
unsave_action = menu.addAction("Unsave from Library")
|
|
||||||
else:
|
|
||||||
save_menu = menu.addMenu("Save to Library")
|
save_menu = menu.addMenu("Save to Library")
|
||||||
save_unsorted = save_menu.addAction("Unfiled")
|
save_unsorted = save_menu.addAction("Unfiled")
|
||||||
save_menu.addSeparator()
|
save_menu.addSeparator()
|
||||||
|
save_folder_actions = {}
|
||||||
if self._folders_callback:
|
if self._folders_callback:
|
||||||
for folder in self._folders_callback():
|
for folder in self._folders_callback():
|
||||||
a = save_menu.addAction(folder)
|
a = save_menu.addAction(folder)
|
||||||
@ -333,9 +325,12 @@ class ImagePreview(QWidget):
|
|||||||
save_menu.addSeparator()
|
save_menu.addSeparator()
|
||||||
save_new = save_menu.addAction("+ New Folder...")
|
save_new = save_menu.addAction("+ New Folder...")
|
||||||
|
|
||||||
|
unsave_action = None
|
||||||
|
if self._is_saved:
|
||||||
|
unsave_action = menu.addAction("Unsave from Library")
|
||||||
|
|
||||||
menu.addSeparator()
|
menu.addSeparator()
|
||||||
copy_image = menu.addAction("Copy File to Clipboard")
|
copy_image = menu.addAction("Copy File to Clipboard")
|
||||||
copy_url = menu.addAction("Copy Image URL")
|
|
||||||
open_action = menu.addAction("Open in Default App")
|
open_action = menu.addAction("Open in Default App")
|
||||||
browser_action = menu.addAction("Open in Browser")
|
browser_action = menu.addAction("Open in Browser")
|
||||||
|
|
||||||
@ -371,22 +366,15 @@ class ImagePreview(QWidget):
|
|||||||
elif id(action) in save_folder_actions:
|
elif id(action) in save_folder_actions:
|
||||||
self.save_to_folder.emit(save_folder_actions[id(action)])
|
self.save_to_folder.emit(save_folder_actions[id(action)])
|
||||||
elif action == copy_image:
|
elif action == copy_image:
|
||||||
from pathlib import Path as _Path
|
|
||||||
from PySide6.QtCore import QMimeData, QUrl
|
|
||||||
from PySide6.QtWidgets import QApplication
|
from PySide6.QtWidgets import QApplication
|
||||||
from PySide6.QtGui import QPixmap as _QP
|
from PySide6.QtGui import QPixmap as _QP
|
||||||
cp = self._current_path
|
pix = self._image_viewer._pixmap
|
||||||
if cp and _Path(cp).exists():
|
if pix and not pix.isNull():
|
||||||
mime = QMimeData()
|
QApplication.clipboard().setPixmap(pix)
|
||||||
mime.setUrls([QUrl.fromLocalFile(str(_Path(cp).resolve()))])
|
elif self._current_path:
|
||||||
pix = _QP(cp)
|
pix = _QP(self._current_path)
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
mime.setImageData(pix.toImage())
|
QApplication.clipboard().setPixmap(pix)
|
||||||
QApplication.clipboard().setMimeData(mime)
|
|
||||||
elif action == copy_url:
|
|
||||||
from PySide6.QtWidgets import QApplication
|
|
||||||
if self._current_post and self._current_post.file_url:
|
|
||||||
QApplication.clipboard().setText(self._current_post.file_url)
|
|
||||||
elif action == open_action:
|
elif action == open_action:
|
||||||
self.open_in_default.emit()
|
self.open_in_default.emit()
|
||||||
elif action == browser_action:
|
elif action == browser_action:
|
||||||
@ -417,11 +405,9 @@ class ImagePreview(QWidget):
|
|||||||
self.navigate.emit(1)
|
self.navigate.emit(1)
|
||||||
return
|
return
|
||||||
if self._stack.currentIndex() == 1:
|
if self._stack.currentIndex() == 1:
|
||||||
self._vol_scroll_accum += event.angleDelta().y()
|
delta = event.angleDelta().y()
|
||||||
steps = self._vol_scroll_accum // 120
|
if delta:
|
||||||
if steps:
|
vol = max(0, min(100, self._video_player.volume + (5 if delta > 0 else -5)))
|
||||||
self._vol_scroll_accum -= steps * 120
|
|
||||||
vol = max(0, min(100, self._video_player.volume + 5 * steps))
|
|
||||||
self._video_player.volume = vol
|
self._video_player.volume = vol
|
||||||
else:
|
else:
|
||||||
super().wheelEvent(event)
|
super().wheelEvent(event)
|
||||||
|
|||||||
@ -18,7 +18,6 @@ class PrivacyController:
|
|||||||
self._on = False
|
self._on = False
|
||||||
self._overlay: QWidget | None = None
|
self._overlay: QWidget | None = None
|
||||||
self._popout_was_visible = False
|
self._popout_was_visible = False
|
||||||
self._preview_was_playing = False
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_active(self) -> bool:
|
def is_active(self) -> bool:
|
||||||
@ -41,11 +40,8 @@ class PrivacyController:
|
|||||||
self._overlay.raise_()
|
self._overlay.raise_()
|
||||||
self._overlay.show()
|
self._overlay.show()
|
||||||
self._app.setWindowTitle("booru-viewer")
|
self._app.setWindowTitle("booru-viewer")
|
||||||
# Pause preview video, remembering whether it was playing
|
# Pause preview video
|
||||||
self._preview_was_playing = False
|
|
||||||
if self._app._preview._stack.currentIndex() == 1:
|
if self._app._preview._stack.currentIndex() == 1:
|
||||||
mpv = self._app._preview._video_player._mpv
|
|
||||||
self._preview_was_playing = mpv is not None and not mpv.pause
|
|
||||||
self._app._preview._video_player.pause()
|
self._app._preview._video_player.pause()
|
||||||
# Delegate popout hide-and-pause to FullscreenPreview so it
|
# Delegate popout hide-and-pause to FullscreenPreview so it
|
||||||
# can capture its own geometry for restore.
|
# can capture its own geometry for restore.
|
||||||
@ -57,8 +53,10 @@ class PrivacyController:
|
|||||||
self._app._popout_ctrl.window.privacy_hide()
|
self._app._popout_ctrl.window.privacy_hide()
|
||||||
else:
|
else:
|
||||||
self._overlay.hide()
|
self._overlay.hide()
|
||||||
# Resume embedded preview video only if it was playing before
|
# Resume embedded preview video — unconditional resume, the
|
||||||
if self._preview_was_playing and self._app._preview._stack.currentIndex() == 1:
|
# common case (privacy hides -> user comes back -> video should
|
||||||
|
# be playing again) wins over the manually-paused edge case.
|
||||||
|
if self._app._preview._stack.currentIndex() == 1:
|
||||||
self._app._preview._video_player.resume()
|
self._app._preview._video_player.resume()
|
||||||
# Restore the popout via its own privacy_show method, which
|
# Restore the popout via its own privacy_show method, which
|
||||||
# also re-dispatches the captured geometry to Hyprland (Qt
|
# also re-dispatches the captured geometry to Hyprland (Qt
|
||||||
|
|||||||
@ -17,29 +17,6 @@ from PySide6.QtWidgets import (
|
|||||||
from ..core.db import Database
|
from ..core.db import Database
|
||||||
|
|
||||||
|
|
||||||
class _TagCompleter(QCompleter):
|
|
||||||
"""Completer that operates on the last space-separated tag only.
|
|
||||||
|
|
||||||
When the user types "blue_sky tre", the completer matches against
|
|
||||||
"tre" and the popup shows suggestions for that fragment. Accepting
|
|
||||||
a suggestion replaces only the last tag, preserving everything
|
|
||||||
before the final space.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def splitPath(self, path: str) -> list[str]:
|
|
||||||
return [path.split()[-1]] if path.split() else [""]
|
|
||||||
|
|
||||||
def pathFromIndex(self, index) -> str:
|
|
||||||
completion = super().pathFromIndex(index)
|
|
||||||
text = self.widget().text()
|
|
||||||
parts = text.split()
|
|
||||||
if parts:
|
|
||||||
parts[-1] = completion
|
|
||||||
else:
|
|
||||||
parts = [completion]
|
|
||||||
return " ".join(parts) + " "
|
|
||||||
|
|
||||||
|
|
||||||
class SearchBar(QWidget):
|
class SearchBar(QWidget):
|
||||||
"""Tag search bar with autocomplete, history dropdown, and saved searches."""
|
"""Tag search bar with autocomplete, history dropdown, and saved searches."""
|
||||||
|
|
||||||
@ -86,10 +63,9 @@ class SearchBar(QWidget):
|
|||||||
self._btn.clicked.connect(self._do_search)
|
self._btn.clicked.connect(self._do_search)
|
||||||
layout.addWidget(self._btn)
|
layout.addWidget(self._btn)
|
||||||
|
|
||||||
# Autocomplete — _TagCompleter only completes the last tag,
|
# Autocomplete
|
||||||
# preserving previous tags in multi-tag queries.
|
|
||||||
self._completer_model = QStringListModel()
|
self._completer_model = QStringListModel()
|
||||||
self._completer = _TagCompleter(self._completer_model)
|
self._completer = QCompleter(self._completer_model)
|
||||||
self._completer.setCaseSensitivity(Qt.CaseSensitivity.CaseInsensitive)
|
self._completer.setCaseSensitivity(Qt.CaseSensitivity.CaseInsensitive)
|
||||||
self._completer.setCompletionMode(QCompleter.CompletionMode.PopupCompletion)
|
self._completer.setCompletionMode(QCompleter.CompletionMode.PopupCompletion)
|
||||||
self._input.setCompleter(self._completer)
|
self._input.setCompleter(self._completer)
|
||||||
@ -102,9 +78,6 @@ class SearchBar(QWidget):
|
|||||||
self._input.textChanged.connect(self._on_text_changed)
|
self._input.textChanged.connect(self._on_text_changed)
|
||||||
|
|
||||||
def _on_text_changed(self, text: str) -> None:
|
def _on_text_changed(self, text: str) -> None:
|
||||||
if text.endswith(" "):
|
|
||||||
self._completer_model.setStringList([])
|
|
||||||
return
|
|
||||||
self._ac_timer.start()
|
self._ac_timer.start()
|
||||||
|
|
||||||
def _request_autocomplete(self) -> None:
|
def _request_autocomplete(self) -> None:
|
||||||
|
|||||||
@ -124,29 +124,11 @@ class SearchController:
|
|||||||
self._search = SearchState()
|
self._search = SearchState()
|
||||||
self._last_scroll_page = 0
|
self._last_scroll_page = 0
|
||||||
self._infinite_scroll = app._db.get_setting_bool("infinite_scroll")
|
self._infinite_scroll = app._db.get_setting_bool("infinite_scroll")
|
||||||
# Cached lookup sets — rebuilt once per search, reused in
|
|
||||||
# _drain_append_queue to avoid repeated DB queries and directory
|
|
||||||
# listings on every infinite-scroll append.
|
|
||||||
self._cached_names: set[str] | None = None
|
|
||||||
self._bookmarked_ids: set[int] | None = None
|
|
||||||
self._saved_ids: set[int] | None = None
|
|
||||||
|
|
||||||
def reset(self) -> None:
|
def reset(self) -> None:
|
||||||
"""Reset search state for a site change."""
|
"""Reset search state for a site change."""
|
||||||
self._search.shown_post_ids.clear()
|
self._search.shown_post_ids.clear()
|
||||||
self._search.page_cache.clear()
|
self._search.page_cache.clear()
|
||||||
self._cached_names = None
|
|
||||||
self._bookmarked_ids = None
|
|
||||||
self._saved_ids = None
|
|
||||||
|
|
||||||
def invalidate_lookup_caches(self) -> None:
|
|
||||||
"""Clear cached bookmark/saved/cache-dir sets.
|
|
||||||
|
|
||||||
Call after a bookmark or save operation so the next
|
|
||||||
``_drain_append_queue`` picks up the change.
|
|
||||||
"""
|
|
||||||
self._bookmarked_ids = None
|
|
||||||
self._saved_ids = None
|
|
||||||
|
|
||||||
def clear_loading(self) -> None:
|
def clear_loading(self) -> None:
|
||||||
self._loading = False
|
self._loading = False
|
||||||
@ -155,12 +137,8 @@ class SearchController:
|
|||||||
|
|
||||||
def on_search(self, tags: str) -> None:
|
def on_search(self, tags: str) -> None:
|
||||||
self._current_tags = tags
|
self._current_tags = tags
|
||||||
self._app._page_spin.setValue(1)
|
self._current_page = self._app._page_spin.value()
|
||||||
self._current_page = 1
|
|
||||||
self._search = SearchState()
|
self._search = SearchState()
|
||||||
self._cached_names = None
|
|
||||||
self._bookmarked_ids = None
|
|
||||||
self._saved_ids = None
|
|
||||||
self._min_score = self._app._score_spin.value()
|
self._min_score = self._app._score_spin.value()
|
||||||
self._app._preview.clear()
|
self._app._preview.clear()
|
||||||
self._app._next_page_btn.setVisible(True)
|
self._app._next_page_btn.setVisible(True)
|
||||||
@ -314,25 +292,26 @@ class SearchController:
|
|||||||
from PySide6.QtCore import QTimer
|
from PySide6.QtCore import QTimer
|
||||||
QTimer.singleShot(100, self.clear_loading)
|
QTimer.singleShot(100, self.clear_loading)
|
||||||
|
|
||||||
|
from ..core.config import saved_dir
|
||||||
from ..core.cache import cached_path_for, cache_dir
|
from ..core.cache import cached_path_for, cache_dir
|
||||||
site_id = self._app._site_combo.currentData()
|
site_id = self._app._site_combo.currentData()
|
||||||
|
|
||||||
self._saved_ids = self._app._db.get_saved_post_ids()
|
_saved_ids = self._app._db.get_saved_post_ids()
|
||||||
|
|
||||||
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
||||||
self._bookmarked_ids = {f.post_id for f in _favs}
|
_bookmarked_ids: set[int] = {f.post_id for f in _favs}
|
||||||
|
|
||||||
_cd = cache_dir()
|
_cd = cache_dir()
|
||||||
self._cached_names = set()
|
_cached_names: set[str] = set()
|
||||||
if _cd.exists():
|
if _cd.exists():
|
||||||
self._cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
_cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
||||||
|
|
||||||
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
||||||
if post.id in self._bookmarked_ids:
|
if post.id in _bookmarked_ids:
|
||||||
thumb.set_bookmarked(True)
|
thumb.set_bookmarked(True)
|
||||||
thumb.set_saved_locally(post.id in self._saved_ids)
|
thumb.set_saved_locally(post.id in _saved_ids)
|
||||||
cached = cached_path_for(post.file_url)
|
cached = cached_path_for(post.file_url)
|
||||||
if cached.name in self._cached_names:
|
if cached.name in _cached_names:
|
||||||
thumb._cached_path = str(cached)
|
thumb._cached_path = str(cached)
|
||||||
|
|
||||||
if post.preview_url:
|
if post.preview_url:
|
||||||
@ -470,23 +449,16 @@ class SearchController:
|
|||||||
self._loading = False
|
self._loading = False
|
||||||
return
|
return
|
||||||
|
|
||||||
from ..core.cache import cached_path_for
|
from ..core.cache import cached_path_for, cache_dir
|
||||||
|
|
||||||
# Reuse the lookup sets built in on_search_done. They stay valid
|
|
||||||
# within an infinite-scroll session — bookmarks/saves don't change
|
|
||||||
# during passive scrolling, and the cache directory only grows.
|
|
||||||
if self._saved_ids is None:
|
|
||||||
self._saved_ids = self._app._db.get_saved_post_ids()
|
|
||||||
if self._bookmarked_ids is None:
|
|
||||||
site_id = self._app._site_combo.currentData()
|
site_id = self._app._site_combo.currentData()
|
||||||
|
_saved_ids = self._app._db.get_saved_post_ids()
|
||||||
|
|
||||||
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
||||||
self._bookmarked_ids = {f.post_id for f in _favs}
|
_bookmarked_ids: set[int] = {f.post_id for f in _favs}
|
||||||
if self._cached_names is None:
|
|
||||||
from ..core.cache import cache_dir
|
|
||||||
_cd = cache_dir()
|
_cd = cache_dir()
|
||||||
self._cached_names = set()
|
_cached_names: set[str] = set()
|
||||||
if _cd.exists():
|
if _cd.exists():
|
||||||
self._cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
_cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
||||||
|
|
||||||
posts = ss.append_queue[:]
|
posts = ss.append_queue[:]
|
||||||
ss.append_queue.clear()
|
ss.append_queue.clear()
|
||||||
@ -496,11 +468,11 @@ class SearchController:
|
|||||||
|
|
||||||
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
||||||
idx = start_idx + i
|
idx = start_idx + i
|
||||||
if post.id in self._bookmarked_ids:
|
if post.id in _bookmarked_ids:
|
||||||
thumb.set_bookmarked(True)
|
thumb.set_bookmarked(True)
|
||||||
thumb.set_saved_locally(post.id in self._saved_ids)
|
thumb.set_saved_locally(post.id in _saved_ids)
|
||||||
cached = cached_path_for(post.file_url)
|
cached = cached_path_for(post.file_url)
|
||||||
if cached.name in self._cached_names:
|
if cached.name in _cached_names:
|
||||||
thumb._cached_path = str(cached)
|
thumb._cached_path = str(cached)
|
||||||
if post.preview_url:
|
if post.preview_url:
|
||||||
self.fetch_thumbnail(idx, post.preview_url)
|
self.fetch_thumbnail(idx, post.preview_url)
|
||||||
@ -534,7 +506,7 @@ class SearchController:
|
|||||||
if 0 <= index < len(thumbs):
|
if 0 <= index < len(thumbs):
|
||||||
pix = QPixmap(path)
|
pix = QPixmap(path)
|
||||||
if not pix.isNull():
|
if not pix.isNull():
|
||||||
thumbs[index].set_pixmap(pix, path)
|
thumbs[index].set_pixmap(pix)
|
||||||
|
|
||||||
# -- Autocomplete --
|
# -- Autocomplete --
|
||||||
|
|
||||||
|
|||||||
@ -21,6 +21,7 @@ from PySide6.QtWidgets import (
|
|||||||
QListWidget,
|
QListWidget,
|
||||||
QMessageBox,
|
QMessageBox,
|
||||||
QGroupBox,
|
QGroupBox,
|
||||||
|
QProgressBar,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..core.db import Database
|
from ..core.db import Database
|
||||||
@ -64,10 +65,6 @@ class SettingsDialog(QDialog):
|
|||||||
btns = QHBoxLayout()
|
btns = QHBoxLayout()
|
||||||
btns.addStretch()
|
btns.addStretch()
|
||||||
|
|
||||||
apply_btn = QPushButton("Apply")
|
|
||||||
apply_btn.clicked.connect(self._apply)
|
|
||||||
btns.addWidget(apply_btn)
|
|
||||||
|
|
||||||
save_btn = QPushButton("Save")
|
save_btn = QPushButton("Save")
|
||||||
save_btn.clicked.connect(self._save_and_close)
|
save_btn.clicked.connect(self._save_and_close)
|
||||||
btns.addWidget(save_btn)
|
btns.addWidget(save_btn)
|
||||||
@ -201,7 +198,7 @@ class SettingsDialog(QDialog):
|
|||||||
form.addRow("", self._search_history)
|
form.addRow("", self._search_history)
|
||||||
|
|
||||||
# Flip layout
|
# Flip layout
|
||||||
self._flip_layout = QCheckBox("Preview on left")
|
self._flip_layout = QCheckBox("Preview on left (restart required)")
|
||||||
self._flip_layout.setChecked(self._db.get_setting_bool("flip_layout"))
|
self._flip_layout.setChecked(self._db.get_setting_bool("flip_layout"))
|
||||||
form.addRow("", self._flip_layout)
|
form.addRow("", self._flip_layout)
|
||||||
|
|
||||||
@ -313,15 +310,6 @@ class SettingsDialog(QDialog):
|
|||||||
clear_cache_btn.clicked.connect(self._clear_image_cache)
|
clear_cache_btn.clicked.connect(self._clear_image_cache)
|
||||||
btn_row1.addWidget(clear_cache_btn)
|
btn_row1.addWidget(clear_cache_btn)
|
||||||
|
|
||||||
clear_tags_btn = QPushButton("Clear Tag Cache")
|
|
||||||
clear_tags_btn.setToolTip(
|
|
||||||
"Wipe the per-site tag-type cache (Gelbooru/Moebooru sites). "
|
|
||||||
"Use this if category colors stop appearing correctly — the "
|
|
||||||
"app will re-fetch tag types on the next post view."
|
|
||||||
)
|
|
||||||
clear_tags_btn.clicked.connect(self._clear_tag_cache)
|
|
||||||
btn_row1.addWidget(clear_tags_btn)
|
|
||||||
|
|
||||||
actions_layout.addLayout(btn_row1)
|
actions_layout.addLayout(btn_row1)
|
||||||
|
|
||||||
btn_row2 = QHBoxLayout()
|
btn_row2 = QHBoxLayout()
|
||||||
@ -552,6 +540,7 @@ class SettingsDialog(QDialog):
|
|||||||
# -- Network tab --
|
# -- Network tab --
|
||||||
|
|
||||||
def _build_network_tab(self) -> QWidget:
|
def _build_network_tab(self) -> QWidget:
|
||||||
|
from ..core.cache import get_connection_log
|
||||||
w = QWidget()
|
w = QWidget()
|
||||||
layout = QVBoxLayout(w)
|
layout = QVBoxLayout(w)
|
||||||
|
|
||||||
@ -708,18 +697,6 @@ class SettingsDialog(QDialog):
|
|||||||
QMessageBox.information(self, "Done", f"Evicted {count} files.")
|
QMessageBox.information(self, "Done", f"Evicted {count} files.")
|
||||||
self._refresh_stats()
|
self._refresh_stats()
|
||||||
|
|
||||||
def _clear_tag_cache(self) -> None:
|
|
||||||
reply = QMessageBox.question(
|
|
||||||
self, "Confirm",
|
|
||||||
"Wipe the tag category cache for every site? This also clears "
|
|
||||||
"the per-site batch-API probe result, so the app will re-probe "
|
|
||||||
"Gelbooru/Moebooru backends on next use.",
|
|
||||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
|
||||||
)
|
|
||||||
if reply == QMessageBox.StandardButton.Yes:
|
|
||||||
count = self._db.clear_tag_cache()
|
|
||||||
QMessageBox.information(self, "Done", f"Deleted {count} tag-type rows.")
|
|
||||||
|
|
||||||
def _bl_export(self) -> None:
|
def _bl_export(self) -> None:
|
||||||
from .dialogs import save_file
|
from .dialogs import save_file
|
||||||
path = save_file(self, "Export Blacklist", "blacklist.txt", "Text (*.txt)")
|
path = save_file(self, "Export Blacklist", "blacklist.txt", "Text (*.txt)")
|
||||||
@ -818,8 +795,7 @@ class SettingsDialog(QDialog):
|
|||||||
|
|
||||||
# -- Save --
|
# -- Save --
|
||||||
|
|
||||||
def _apply(self) -> None:
|
def _save_and_close(self) -> None:
|
||||||
"""Write all settings to DB and emit settings_changed."""
|
|
||||||
self._db.set_setting("page_size", str(self._page_size.value()))
|
self._db.set_setting("page_size", str(self._page_size.value()))
|
||||||
self._db.set_setting("thumbnail_size", str(self._thumb_size.value()))
|
self._db.set_setting("thumbnail_size", str(self._thumb_size.value()))
|
||||||
self._db.set_setting("default_rating", self._default_rating.currentText())
|
self._db.set_setting("default_rating", self._default_rating.currentText())
|
||||||
@ -850,10 +826,5 @@ class SettingsDialog(QDialog):
|
|||||||
self._db.add_blacklisted_tag(tag)
|
self._db.add_blacklisted_tag(tag)
|
||||||
if self._file_dialog_combo is not None:
|
if self._file_dialog_combo is not None:
|
||||||
self._db.set_setting("file_dialog_platform", self._file_dialog_combo.currentText())
|
self._db.set_setting("file_dialog_platform", self._file_dialog_combo.currentText())
|
||||||
from .dialogs import reset_gtk_cache
|
|
||||||
reset_gtk_cache()
|
|
||||||
self.settings_changed.emit()
|
self.settings_changed.emit()
|
||||||
|
|
||||||
def _save_and_close(self) -> None:
|
|
||||||
self._apply()
|
|
||||||
self.accept()
|
self.accept()
|
||||||
|
|||||||
@ -191,7 +191,7 @@ class SiteDialog(QDialog):
|
|||||||
|
|
||||||
def _try_parse_url(self, text: str) -> None:
|
def _try_parse_url(self, text: str) -> None:
|
||||||
"""Strip query params from pasted URLs like https://gelbooru.com/index.php?page=post&s=list&tags=all."""
|
"""Strip query params from pasted URLs like https://gelbooru.com/index.php?page=post&s=list&tags=all."""
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse, parse_qs
|
||||||
text = text.strip()
|
text = text.strip()
|
||||||
if "?" not in text:
|
if "?" not in text:
|
||||||
return
|
return
|
||||||
|
|||||||
@ -160,10 +160,6 @@ class WindowStateController:
|
|||||||
continue
|
continue
|
||||||
return c
|
return c
|
||||||
except Exception:
|
except Exception:
|
||||||
# hyprctl unavailable (non-Hyprland session), timed out,
|
|
||||||
# or produced invalid JSON. Caller treats None as
|
|
||||||
# "no Hyprland-visible main window" and falls back to
|
|
||||||
# Qt's own geometry tracking.
|
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -211,9 +207,6 @@ class WindowStateController:
|
|||||||
# When tiled, intentionally do NOT touch floating_geometry --
|
# When tiled, intentionally do NOT touch floating_geometry --
|
||||||
# preserve the last good floating dimensions.
|
# preserve the last good floating dimensions.
|
||||||
except Exception:
|
except Exception:
|
||||||
# Geometry persistence is best-effort; swallowing here
|
|
||||||
# beats crashing closeEvent over a hyprctl timeout or a
|
|
||||||
# setting-write race. Next save attempt will retry.
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def restore_main_window_state(self) -> None:
|
def restore_main_window_state(self) -> None:
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
[Setup]
|
[Setup]
|
||||||
AppName=booru-viewer
|
AppName=booru-viewer
|
||||||
AppVersion=0.2.7
|
AppVersion=v0.2.5
|
||||||
AppPublisher=pax
|
AppPublisher=pax
|
||||||
AppPublisherURL=https://git.pax.moe/pax/booru-viewer
|
AppPublisherURL=https://git.pax.moe/pax/booru-viewer
|
||||||
DefaultDirName={localappdata}\booru-viewer
|
DefaultDirName={localappdata}\booru-viewer
|
||||||
|
|||||||
@ -4,14 +4,14 @@ build-backend = "hatchling.build"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "booru-viewer"
|
name = "booru-viewer"
|
||||||
version = "0.2.7"
|
version = "v0.2.5"
|
||||||
description = "Local booru image browser with Qt6 GUI"
|
description = "Local booru image browser with Qt6 GUI"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"httpx>=0.27,<1.0",
|
"httpx[http2]>=0.27",
|
||||||
"Pillow>=10.0,<12.0",
|
"Pillow>=10.0",
|
||||||
"PySide6>=6.6,<7.0",
|
"PySide6>=6.6",
|
||||||
"python-mpv>=1.0,<2.0",
|
"python-mpv>=1.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
|
|||||||
@ -454,89 +454,3 @@ class TestMaps:
|
|||||||
assert _GELBOORU_TYPE_MAP[4] == "Character"
|
assert _GELBOORU_TYPE_MAP[4] == "Character"
|
||||||
assert _GELBOORU_TYPE_MAP[5] == "Meta"
|
assert _GELBOORU_TYPE_MAP[5] == "Meta"
|
||||||
assert 2 not in _GELBOORU_TYPE_MAP # Deprecated intentionally omitted
|
assert 2 not in _GELBOORU_TYPE_MAP # Deprecated intentionally omitted
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# _do_ensure dispatch — regression cover for transient-error poisoning
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
class TestDoEnsureProbeRouting:
|
|
||||||
"""When _batch_api_works is None, _do_ensure must route through
|
|
||||||
_probe_batch_api so transient errors stay transient. The prior
|
|
||||||
implementation called fetch_via_tag_api directly and inferred
|
|
||||||
False from empty tag_categories — but fetch_via_tag_api swallows
|
|
||||||
per-chunk exceptions, so a network drop silently poisoned the
|
|
||||||
probe flag to False for the whole site."""
|
|
||||||
|
|
||||||
def test_transient_error_leaves_flag_none(self, tmp_db):
|
|
||||||
"""All chunks fail → _batch_api_works must stay None,
|
|
||||||
not flip to False."""
|
|
||||||
client = FakeClient(
|
|
||||||
tag_api_url="http://example.com/tags",
|
|
||||||
api_key="k",
|
|
||||||
api_user="u",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def raising_request(method, url, params=None):
|
|
||||||
raise RuntimeError("network down")
|
|
||||||
client._request = raising_request
|
|
||||||
|
|
||||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
|
||||||
assert fetcher._batch_api_works is None
|
|
||||||
post = FakePost(tags="miku 1girl")
|
|
||||||
|
|
||||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
|
||||||
|
|
||||||
assert fetcher._batch_api_works is None, (
|
|
||||||
"Transient error must not poison the probe flag"
|
|
||||||
)
|
|
||||||
# Persistence side: nothing was saved
|
|
||||||
reloaded = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
|
||||||
assert reloaded._batch_api_works is None
|
|
||||||
|
|
||||||
def test_clean_200_zero_matches_flips_to_false(self, tmp_db):
|
|
||||||
"""Clean HTTP 200 + no names matching the request → flips
|
|
||||||
the flag to False (structurally broken endpoint)."""
|
|
||||||
client = FakeClient(
|
|
||||||
tag_api_url="http://example.com/tags",
|
|
||||||
api_key="k",
|
|
||||||
api_user="u",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def empty_ok_request(method, url, params=None):
|
|
||||||
# 200 with a valid but empty tag list
|
|
||||||
return FakeResponse(
|
|
||||||
json.dumps({"@attributes": {"count": 0}, "tag": []}),
|
|
||||||
status_code=200,
|
|
||||||
)
|
|
||||||
client._request = empty_ok_request
|
|
||||||
|
|
||||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
|
||||||
post = FakePost(tags="definitely_not_a_real_tag")
|
|
||||||
|
|
||||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
|
||||||
|
|
||||||
assert fetcher._batch_api_works is False, (
|
|
||||||
"Clean 200 with zero matches must flip flag to False"
|
|
||||||
)
|
|
||||||
reloaded = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
|
||||||
assert reloaded._batch_api_works is False
|
|
||||||
|
|
||||||
def test_non_200_leaves_flag_none(self, tmp_db):
|
|
||||||
"""500-family responses are transient, must not poison."""
|
|
||||||
client = FakeClient(
|
|
||||||
tag_api_url="http://example.com/tags",
|
|
||||||
api_key="k",
|
|
||||||
api_user="u",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def five_hundred(method, url, params=None):
|
|
||||||
return FakeResponse("", status_code=503)
|
|
||||||
client._request = five_hundred
|
|
||||||
|
|
||||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
|
||||||
post = FakePost(tags="miku")
|
|
||||||
|
|
||||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
|
||||||
|
|
||||||
assert fetcher._batch_api_works is None
|
|
||||||
|
|||||||
@ -1,217 +0,0 @@
|
|||||||
"""Tests for the shared network-safety helpers (SSRF guard + secret redaction)."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import socket
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from booru_viewer.core.api._safety import (
|
|
||||||
SECRET_KEYS,
|
|
||||||
check_public_host,
|
|
||||||
redact_params,
|
|
||||||
redact_url,
|
|
||||||
validate_public_request,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ======================================================================
|
|
||||||
# SSRF guard — finding #1
|
|
||||||
# ======================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def test_public_v4_literal_passes():
|
|
||||||
check_public_host("8.8.8.8")
|
|
||||||
check_public_host("1.1.1.1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_loopback_v4_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("127.0.0.1")
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("127.0.0.53")
|
|
||||||
|
|
||||||
|
|
||||||
def test_cloud_metadata_ip_rejected():
|
|
||||||
"""169.254.169.254 — AWS/GCE/Azure metadata service."""
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("169.254.169.254")
|
|
||||||
|
|
||||||
|
|
||||||
def test_rfc1918_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("10.0.0.1")
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("172.16.5.4")
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("192.168.1.1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_cgnat_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("100.64.0.1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_multicast_v4_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("224.0.0.1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ipv6_loopback_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("::1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ipv6_unique_local_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("fc00::1")
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("fd12:3456:789a::1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ipv6_link_local_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("fe80::1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ipv6_multicast_rejected():
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("ff02::1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_public_v6_passes():
|
|
||||||
# Google DNS
|
|
||||||
check_public_host("2001:4860:4860::8888")
|
|
||||||
|
|
||||||
|
|
||||||
def test_hostname_dns_failure_raises():
|
|
||||||
def _gaierror(*a, **kw):
|
|
||||||
raise socket.gaierror(-2, "Name or service not known")
|
|
||||||
with patch("socket.getaddrinfo", _gaierror):
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("nonexistent.test.invalid")
|
|
||||||
|
|
||||||
|
|
||||||
def test_hostname_resolving_to_loopback_rejected():
|
|
||||||
def _fake(*a, **kw):
|
|
||||||
return [(socket.AF_INET, 0, 0, "", ("127.0.0.1", 0))]
|
|
||||||
with patch("socket.getaddrinfo", _fake):
|
|
||||||
with pytest.raises(httpx.RequestError, match="blocked request target"):
|
|
||||||
check_public_host("mean.example")
|
|
||||||
|
|
||||||
|
|
||||||
def test_hostname_resolving_to_metadata_rejected():
|
|
||||||
def _fake(*a, **kw):
|
|
||||||
return [(socket.AF_INET, 0, 0, "", ("169.254.169.254", 0))]
|
|
||||||
with patch("socket.getaddrinfo", _fake):
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("stolen.example")
|
|
||||||
|
|
||||||
|
|
||||||
def test_hostname_resolving_to_public_passes():
|
|
||||||
def _fake(*a, **kw):
|
|
||||||
return [(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0))]
|
|
||||||
with patch("socket.getaddrinfo", _fake):
|
|
||||||
check_public_host("dns.google")
|
|
||||||
|
|
||||||
|
|
||||||
def test_hostname_with_mixed_results_rejected_on_any_private():
|
|
||||||
"""If any resolved address is private, reject — conservative."""
|
|
||||||
def _fake(*a, **kw):
|
|
||||||
return [
|
|
||||||
(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0)),
|
|
||||||
(socket.AF_INET, 0, 0, "", ("127.0.0.1", 0)),
|
|
||||||
]
|
|
||||||
with patch("socket.getaddrinfo", _fake):
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
check_public_host("dualhomed.example")
|
|
||||||
|
|
||||||
|
|
||||||
def test_empty_host_passes():
|
|
||||||
"""Edge case: httpx can call us with a relative URL mid-redirect."""
|
|
||||||
check_public_host("")
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_public_request_hook_rejects_metadata():
|
|
||||||
"""The async hook is invoked via asyncio.run() instead of
|
|
||||||
pytest-asyncio so the test runs on CI (which only installs
|
|
||||||
httpx + Pillow + pytest)."""
|
|
||||||
request = httpx.Request("GET", "http://169.254.169.254/latest/meta-data/")
|
|
||||||
with pytest.raises(httpx.RequestError):
|
|
||||||
asyncio.run(validate_public_request(request))
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_public_request_hook_allows_public():
|
|
||||||
def _fake(*a, **kw):
|
|
||||||
return [(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0))]
|
|
||||||
with patch("socket.getaddrinfo", _fake):
|
|
||||||
request = httpx.Request("GET", "https://example.test/")
|
|
||||||
asyncio.run(validate_public_request(request)) # must not raise
|
|
||||||
|
|
||||||
|
|
||||||
# ======================================================================
|
|
||||||
# Credential redaction — finding #3
|
|
||||||
# ======================================================================
|
|
||||||
|
|
||||||
|
|
||||||
def test_secret_keys_covers_all_booru_client_params():
|
|
||||||
"""Every secret query param used by any booru client must be in SECRET_KEYS."""
|
|
||||||
# Danbooru: login + api_key
|
|
||||||
# e621: login + api_key
|
|
||||||
# Gelbooru: api_key + user_id
|
|
||||||
# Moebooru: login + password_hash
|
|
||||||
for key in ("login", "api_key", "user_id", "password_hash"):
|
|
||||||
assert key in SECRET_KEYS
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_url_replaces_secrets():
|
|
||||||
redacted = redact_url("https://x.test/posts.json?login=alice&api_key=supersecret&tags=cats")
|
|
||||||
assert "alice" not in redacted
|
|
||||||
assert "supersecret" not in redacted
|
|
||||||
assert "tags=cats" in redacted
|
|
||||||
assert "login=%2A%2A%2A" in redacted
|
|
||||||
assert "api_key=%2A%2A%2A" in redacted
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_url_leaves_non_secret_params_alone():
|
|
||||||
redacted = redact_url("https://x.test/posts.json?tags=cats&limit=50")
|
|
||||||
assert redacted == "https://x.test/posts.json?tags=cats&limit=50"
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_url_no_query_passthrough():
|
|
||||||
assert redact_url("https://x.test/") == "https://x.test/"
|
|
||||||
assert redact_url("https://x.test/posts.json") == "https://x.test/posts.json"
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_url_password_hash_and_user_id():
|
|
||||||
redacted = redact_url(
|
|
||||||
"https://x.test/post.json?login=a&password_hash=b&user_id=42&tags=cats"
|
|
||||||
)
|
|
||||||
assert "password_hash=%2A%2A%2A" in redacted
|
|
||||||
assert "user_id=%2A%2A%2A" in redacted
|
|
||||||
assert "tags=cats" in redacted
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_url_preserves_fragment_and_path():
|
|
||||||
redacted = redact_url("https://x.test/a/b/c?api_key=secret#frag")
|
|
||||||
assert redacted.startswith("https://x.test/a/b/c?")
|
|
||||||
assert redacted.endswith("#frag")
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_params_replaces_secrets():
|
|
||||||
out = redact_params({"api_key": "s", "tags": "cats", "login": "alice"})
|
|
||||||
assert out["api_key"] == "***"
|
|
||||||
assert out["login"] == "***"
|
|
||||||
assert out["tags"] == "cats"
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_params_empty():
|
|
||||||
assert redact_params({}) == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_redact_params_no_secrets():
|
|
||||||
out = redact_params({"tags": "cats", "limit": 50})
|
|
||||||
assert out == {"tags": "cats", "limit": 50}
|
|
||||||
@ -212,102 +212,6 @@ def test_download_cap_running_total_aborts(tmp_path, monkeypatch):
|
|||||||
assert not local.exists()
|
assert not local.exists()
|
||||||
|
|
||||||
|
|
||||||
# -- _looks_like_media (audit finding #10) --
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_jpeg_magic_recognised():
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
assert _looks_like_media(b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_png_magic_recognised():
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
assert _looks_like_media(b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_webm_magic_recognised():
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
# EBML header (Matroska/WebM): 1A 45 DF A3
|
|
||||||
assert _looks_like_media(b"\x1aE\xdf\xa3" + b"\x00" * 20) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_html_rejected():
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
assert _looks_like_media(b"<!doctype html><html><body>") is False
|
|
||||||
assert _looks_like_media(b"<html><head>") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_empty_rejected():
|
|
||||||
"""An empty buffer means the server returned nothing useful — fail
|
|
||||||
closed (rather than the on-disk validator's open-on-error fallback)."""
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
assert _looks_like_media(b"") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_looks_like_media_unknown_magic_accepted():
|
|
||||||
"""Non-HTML, non-magic bytes are conservative-OK — some boorus
|
|
||||||
serve exotic-but-legal containers we don't enumerate."""
|
|
||||||
from booru_viewer.core.cache import _looks_like_media
|
|
||||||
assert _looks_like_media(b"random non-html data ") is True
|
|
||||||
|
|
||||||
|
|
||||||
# -- _do_download early header validation (audit finding #10) --
|
|
||||||
|
|
||||||
|
|
||||||
def test_do_download_early_rejects_html_payload(tmp_path):
|
|
||||||
"""A hostile server that returns HTML in the body (omitting
|
|
||||||
Content-Type so the early text/html guard doesn't fire) must be
|
|
||||||
caught by the magic-byte check before any bytes land on disk.
|
|
||||||
Audit finding #10: this used to wait for the full download to
|
|
||||||
complete before _is_valid_media rejected, wasting bandwidth."""
|
|
||||||
response = _FakeResponse(
|
|
||||||
headers={"content-length": "0"}, # no Content-Type, no length
|
|
||||||
chunks=[b"<!doctype html><html><body>500</body></html>"],
|
|
||||||
)
|
|
||||||
client = _FakeClient(response)
|
|
||||||
local = tmp_path / "out.jpg"
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="not valid media"):
|
|
||||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
|
||||||
|
|
||||||
assert not local.exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_do_download_early_rejects_html_across_tiny_chunks(tmp_path):
|
|
||||||
"""The accumulator must combine chunks smaller than the 16-byte
|
|
||||||
minimum so a server delivering one byte at a time can't slip
|
|
||||||
past the magic-byte check."""
|
|
||||||
response = _FakeResponse(
|
|
||||||
headers={"content-length": "0"},
|
|
||||||
chunks=[b"<!", b"do", b"ct", b"yp", b"e ", b"ht", b"ml", b">", b"x" * 100],
|
|
||||||
)
|
|
||||||
client = _FakeClient(response)
|
|
||||||
local = tmp_path / "out.jpg"
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="not valid media"):
|
|
||||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
|
||||||
|
|
||||||
assert not local.exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_do_download_writes_valid_jpeg_after_early_validation(tmp_path):
|
|
||||||
"""A real JPEG-like header passes the early check and the rest
|
|
||||||
of the stream is written through to disk. Header bytes must
|
|
||||||
appear in the final file (not be silently dropped)."""
|
|
||||||
body = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01" + b"PAYLOAD" + b"\xff\xd9"
|
|
||||||
response = _FakeResponse(
|
|
||||||
headers={"content-length": str(len(body)), "content-type": "image/jpeg"},
|
|
||||||
chunks=[body[:8], body[8:]], # split mid-magic
|
|
||||||
)
|
|
||||||
client = _FakeClient(response)
|
|
||||||
local = tmp_path / "out.jpg"
|
|
||||||
|
|
||||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
|
||||||
|
|
||||||
assert local.exists()
|
|
||||||
assert local.read_bytes() == body
|
|
||||||
|
|
||||||
|
|
||||||
# -- _is_valid_media OSError fallback --
|
# -- _is_valid_media OSError fallback --
|
||||||
|
|
||||||
def test_is_valid_media_returns_true_on_oserror(tmp_path):
|
def test_is_valid_media_returns_true_on_oserror(tmp_path):
|
||||||
@ -318,71 +222,3 @@ def test_is_valid_media_returns_true_on_oserror(tmp_path):
|
|||||||
OS issue persisted."""
|
OS issue persisted."""
|
||||||
nonexistent = tmp_path / "definitely-not-here.jpg"
|
nonexistent = tmp_path / "definitely-not-here.jpg"
|
||||||
assert _is_valid_media(nonexistent) is True
|
assert _is_valid_media(nonexistent) is True
|
||||||
|
|
||||||
|
|
||||||
# -- _url_locks LRU cap (audit finding #5) --
|
|
||||||
|
|
||||||
def test_url_locks_capped_at_max():
|
|
||||||
"""The per-URL coalesce lock table must not grow beyond _URL_LOCKS_MAX
|
|
||||||
entries. Without the cap, a long browsing session or an adversarial
|
|
||||||
booru returning cache-buster query strings would leak one Lock per
|
|
||||||
unique URL until OOM."""
|
|
||||||
cache._url_locks.clear()
|
|
||||||
try:
|
|
||||||
for i in range(cache._URL_LOCKS_MAX + 500):
|
|
||||||
cache._get_url_lock(f"hash{i}")
|
|
||||||
assert len(cache._url_locks) <= cache._URL_LOCKS_MAX
|
|
||||||
finally:
|
|
||||||
cache._url_locks.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def test_url_locks_returns_same_lock_for_same_hash():
|
|
||||||
"""Two get_url_lock calls with the same hash must return the same
|
|
||||||
Lock object — that's the whole point of the coalesce table."""
|
|
||||||
cache._url_locks.clear()
|
|
||||||
try:
|
|
||||||
lock_a = cache._get_url_lock("hashA")
|
|
||||||
lock_b = cache._get_url_lock("hashA")
|
|
||||||
assert lock_a is lock_b
|
|
||||||
finally:
|
|
||||||
cache._url_locks.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def test_url_locks_lru_keeps_recently_used():
|
|
||||||
"""LRU semantics: a hash that gets re-touched moves to the end of
|
|
||||||
the OrderedDict and is the youngest, so eviction picks an older
|
|
||||||
entry instead."""
|
|
||||||
cache._url_locks.clear()
|
|
||||||
try:
|
|
||||||
cache._get_url_lock("oldest")
|
|
||||||
cache._get_url_lock("middle")
|
|
||||||
cache._get_url_lock("oldest") # touch — now youngest
|
|
||||||
# The dict should now be: middle, oldest (insertion order with
|
|
||||||
# move_to_end on the touch).
|
|
||||||
keys = list(cache._url_locks.keys())
|
|
||||||
assert keys == ["middle", "oldest"]
|
|
||||||
finally:
|
|
||||||
cache._url_locks.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def test_url_locks_eviction_skips_held_locks():
|
|
||||||
"""A held lock (one a coroutine is mid-`async with` on) must NOT be
|
|
||||||
evicted; popping it would break the coroutine's __aexit__. The
|
|
||||||
eviction loop sees `lock.locked()` and skips it."""
|
|
||||||
cache._url_locks.clear()
|
|
||||||
try:
|
|
||||||
# Seed an entry and hold it.
|
|
||||||
held = cache._get_url_lock("held_hash")
|
|
||||||
|
|
||||||
async def hold_and_fill():
|
|
||||||
async with held:
|
|
||||||
# While we're holding the lock, force eviction by
|
|
||||||
# filling past the cap.
|
|
||||||
for i in range(cache._URL_LOCKS_MAX + 100):
|
|
||||||
cache._get_url_lock(f"new{i}")
|
|
||||||
# The held lock must still be present.
|
|
||||||
assert "held_hash" in cache._url_locks
|
|
||||||
|
|
||||||
asyncio.run(hold_and_fill())
|
|
||||||
finally:
|
|
||||||
cache._url_locks.clear()
|
|
||||||
|
|||||||
@ -6,14 +6,10 @@ Locks in:
|
|||||||
depth alongside `_validate_folder_name`)
|
depth alongside `_validate_folder_name`)
|
||||||
- `find_library_files` matching exactly the root + 1-level subdirectory
|
- `find_library_files` matching exactly the root + 1-level subdirectory
|
||||||
layout that the library uses, with the right MEDIA_EXTENSIONS filter
|
layout that the library uses, with the right MEDIA_EXTENSIONS filter
|
||||||
- `data_dir` chmods its directory to 0o700 on POSIX (audit #4)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from booru_viewer.core import config
|
from booru_viewer.core import config
|
||||||
@ -59,87 +55,3 @@ def test_find_library_files_walks_root_and_one_level(tmp_library):
|
|||||||
match_names = {p.name for p in matches}
|
match_names = {p.name for p in matches}
|
||||||
|
|
||||||
assert match_names == {"123.jpg", "123.png"}
|
assert match_names == {"123.jpg", "123.png"}
|
||||||
|
|
||||||
|
|
||||||
# -- data_dir permissions (audit finding #4) --
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
|
||||||
def test_data_dir_chmod_700(tmp_path, monkeypatch):
|
|
||||||
"""`data_dir()` chmods the platform data dir to 0o700 on POSIX so the
|
|
||||||
SQLite DB and api_key columns inside aren't readable by other local
|
|
||||||
users on shared machines or networked home dirs."""
|
|
||||||
monkeypatch.setenv("XDG_DATA_HOME", str(tmp_path))
|
|
||||||
path = config.data_dir()
|
|
||||||
mode = os.stat(path).st_mode & 0o777
|
|
||||||
assert mode == 0o700, f"expected 0o700, got {oct(mode)}"
|
|
||||||
# Idempotent: a second call leaves the mode at 0o700.
|
|
||||||
config.data_dir()
|
|
||||||
mode2 = os.stat(path).st_mode & 0o777
|
|
||||||
assert mode2 == 0o700
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
|
||||||
def test_data_dir_tightens_loose_existing_perms(tmp_path, monkeypatch):
|
|
||||||
"""If a previous version (or external tooling) left the dir at 0o755,
|
|
||||||
the next data_dir() call must tighten it back to 0o700."""
|
|
||||||
monkeypatch.setenv("XDG_DATA_HOME", str(tmp_path))
|
|
||||||
pre = tmp_path / config.APPNAME
|
|
||||||
pre.mkdir()
|
|
||||||
os.chmod(pre, 0o755)
|
|
||||||
config.data_dir()
|
|
||||||
mode = os.stat(pre).st_mode & 0o777
|
|
||||||
assert mode == 0o700
|
|
||||||
|
|
||||||
|
|
||||||
# -- render_filename_template Windows reserved names (finding #7) --
|
|
||||||
|
|
||||||
|
|
||||||
def _fake_post(tag_categories=None, **overrides):
|
|
||||||
"""Build a minimal Post-like object suitable for render_filename_template.
|
|
||||||
|
|
||||||
A real Post needs file_url + tag_categories; defaults are fine for the
|
|
||||||
reserved-name tests since they only inspect the artist/character tokens.
|
|
||||||
"""
|
|
||||||
from booru_viewer.core.api.base import Post
|
|
||||||
return Post(
|
|
||||||
id=overrides.get("id", 999),
|
|
||||||
file_url=overrides.get("file_url", "https://x.test/abc.jpg"),
|
|
||||||
preview_url=None,
|
|
||||||
tags="",
|
|
||||||
score=0,
|
|
||||||
rating=None,
|
|
||||||
source=None,
|
|
||||||
tag_categories=tag_categories or {},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("reserved", [
|
|
||||||
"con", "CON", "prn", "PRN", "aux", "AUX", "nul", "NUL",
|
|
||||||
"com1", "COM1", "com9", "lpt1", "LPT1", "lpt9",
|
|
||||||
])
|
|
||||||
def test_render_filename_template_prefixes_reserved_names(reserved):
|
|
||||||
"""A tag whose value renders to a Windows reserved device name must
|
|
||||||
be prefixed with `_` so the resulting filename can't redirect to a
|
|
||||||
device on Windows. Audit finding #7."""
|
|
||||||
post = _fake_post(tag_categories={"Artist": [reserved]})
|
|
||||||
out = config.render_filename_template("%artist%", post, ext=".jpg")
|
|
||||||
# Stem (before extension) must NOT be a reserved name.
|
|
||||||
stem = out.split(".", 1)[0]
|
|
||||||
assert stem.lower() != reserved.lower()
|
|
||||||
assert stem.startswith("_")
|
|
||||||
|
|
||||||
|
|
||||||
def test_render_filename_template_passes_normal_names_unchanged():
|
|
||||||
"""Non-reserved tags must NOT be prefixed."""
|
|
||||||
post = _fake_post(tag_categories={"Artist": ["miku"]})
|
|
||||||
out = config.render_filename_template("%artist%", post, ext=".jpg")
|
|
||||||
assert out == "miku.jpg"
|
|
||||||
|
|
||||||
|
|
||||||
def test_render_filename_template_reserved_with_extension_in_template():
|
|
||||||
"""`con.jpg` from a tag-only stem must still be caught — the dot in
|
|
||||||
the stem is irrelevant; CON is reserved regardless of extension."""
|
|
||||||
post = _fake_post(tag_categories={"Artist": ["con"]})
|
|
||||||
out = config.render_filename_template("%artist%.%ext%", post, ext=".jpg")
|
|
||||||
assert not out.startswith("con")
|
|
||||||
assert out.startswith("_con")
|
|
||||||
|
|||||||
@ -13,9 +13,6 @@ These tests lock in the `54ccc40` security/correctness fixes:
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from booru_viewer.core.db import _validate_folder_name
|
from booru_viewer.core.db import _validate_folder_name
|
||||||
@ -45,34 +42,6 @@ def test_validate_folder_name_rejects_traversal():
|
|||||||
_validate_folder_name("~user") # leading tilde
|
_validate_folder_name("~user") # leading tilde
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
|
||||||
def test_db_file_chmod_600(tmp_db):
|
|
||||||
"""Audit finding #4: the SQLite file must be 0o600 on POSIX so the
|
|
||||||
plaintext api_key/api_user columns aren't readable by other local
|
|
||||||
users on shared workstations."""
|
|
||||||
# The conn property triggers _restrict_perms() the first time it's
|
|
||||||
# accessed; tmp_db calls it via add_site/etc., but a defensive
|
|
||||||
# access here makes the assertion order-independent.
|
|
||||||
_ = tmp_db.conn
|
|
||||||
mode = os.stat(tmp_db._path).st_mode & 0o777
|
|
||||||
assert mode == 0o600, f"expected 0o600, got {oct(mode)}"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
|
||||||
def test_db_wal_sidecar_chmod_600(tmp_db):
|
|
||||||
"""The -wal sidecar created by PRAGMA journal_mode=WAL must also
|
|
||||||
be 0o600. It carries in-flight transactions including the most
|
|
||||||
recent api_key writes — same exposure as the main DB file."""
|
|
||||||
# Force a write so the WAL file actually exists.
|
|
||||||
tmp_db.add_site("test", "http://example.test", "danbooru")
|
|
||||||
# Re-trigger the chmod pass now that the sidecar exists.
|
|
||||||
tmp_db._restrict_perms()
|
|
||||||
wal = type(tmp_db._path)(str(tmp_db._path) + "-wal")
|
|
||||||
if wal.exists():
|
|
||||||
mode = os.stat(wal).st_mode & 0o777
|
|
||||||
assert mode == 0o600, f"expected 0o600 on WAL sidecar, got {oct(mode)}"
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_folder_name_accepts_unicode_and_punctuation():
|
def test_validate_folder_name_accepts_unicode_and_punctuation():
|
||||||
"""Common real-world folder names must pass through unchanged. The
|
"""Common real-world folder names must pass through unchanged. The
|
||||||
guard is meant to block escape shapes, not normal naming."""
|
guard is meant to block escape shapes, not normal naming."""
|
||||||
|
|||||||
@ -1,128 +0,0 @@
|
|||||||
"""Tests for save_post_file.
|
|
||||||
|
|
||||||
Pins the contract that category_fetcher is a *required* keyword arg
|
|
||||||
(no silent default) so a forgotten plumb can't result in a save that
|
|
||||||
drops category tokens from the filename template.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import inspect
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from booru_viewer.core.library_save import save_post_file
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FakePost:
|
|
||||||
id: int = 12345
|
|
||||||
tags: str = "1girl greatartist"
|
|
||||||
tag_categories: dict = field(default_factory=dict)
|
|
||||||
score: int = 0
|
|
||||||
rating: str = ""
|
|
||||||
source: str = ""
|
|
||||||
file_url: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
class PopulatingFetcher:
|
|
||||||
"""ensure_categories fills in the artist category from scratch,
|
|
||||||
emulating the HTML-scrape/batch-API happy path."""
|
|
||||||
|
|
||||||
def __init__(self, categories: dict[str, list[str]]):
|
|
||||||
self._categories = categories
|
|
||||||
self.calls = 0
|
|
||||||
|
|
||||||
async def ensure_categories(self, post) -> None:
|
|
||||||
self.calls += 1
|
|
||||||
post.tag_categories = dict(self._categories)
|
|
||||||
|
|
||||||
|
|
||||||
def _run(coro):
|
|
||||||
return asyncio.new_event_loop().run_until_complete(coro)
|
|
||||||
|
|
||||||
|
|
||||||
def test_category_fetcher_is_keyword_only_and_required():
|
|
||||||
"""Signature check: category_fetcher must be explicit at every
|
|
||||||
call site — no ``= None`` default that callers can forget."""
|
|
||||||
sig = inspect.signature(save_post_file)
|
|
||||||
param = sig.parameters["category_fetcher"]
|
|
||||||
assert param.kind == inspect.Parameter.KEYWORD_ONLY, (
|
|
||||||
"category_fetcher should be keyword-only"
|
|
||||||
)
|
|
||||||
assert param.default is inspect.Parameter.empty, (
|
|
||||||
"category_fetcher must not have a default — forcing every caller "
|
|
||||||
"to pass it (even as None) is the whole point of this contract"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_template_category_populated_via_fetcher(tmp_path, tmp_db):
|
|
||||||
"""Post with empty tag_categories + a template using %artist% +
|
|
||||||
a working fetcher → saved filename includes the fetched artist
|
|
||||||
instead of falling back to the bare id."""
|
|
||||||
src = tmp_path / "src.jpg"
|
|
||||||
src.write_bytes(b"fake-image-bytes")
|
|
||||||
dest_dir = tmp_path / "dest"
|
|
||||||
|
|
||||||
tmp_db.set_setting("library_filename_template", "%artist%_%id%")
|
|
||||||
|
|
||||||
post = FakePost(id=12345, tag_categories={})
|
|
||||||
fetcher = PopulatingFetcher({"Artist": ["greatartist"]})
|
|
||||||
|
|
||||||
result = _run(save_post_file(
|
|
||||||
src, post, dest_dir, tmp_db,
|
|
||||||
category_fetcher=fetcher,
|
|
||||||
))
|
|
||||||
|
|
||||||
assert fetcher.calls == 1, "fetcher should be invoked exactly once"
|
|
||||||
assert result.name == "greatartist_12345.jpg", (
|
|
||||||
f"expected templated filename, got {result.name!r}"
|
|
||||||
)
|
|
||||||
assert result.exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_none_fetcher_accepted_when_categories_prepopulated(tmp_path, tmp_db):
|
|
||||||
"""Pass-None contract: sites like Danbooru/e621 return ``None``
|
|
||||||
from ``_get_category_fetcher`` because Post already arrives with
|
|
||||||
tag_categories populated. ``save_post_file`` must accept None
|
|
||||||
explicitly — the change is about forcing callers to think, not
|
|
||||||
about forbidding None."""
|
|
||||||
src = tmp_path / "src.jpg"
|
|
||||||
src.write_bytes(b"x")
|
|
||||||
dest_dir = tmp_path / "dest"
|
|
||||||
|
|
||||||
tmp_db.set_setting("library_filename_template", "%artist%_%id%")
|
|
||||||
|
|
||||||
post = FakePost(id=999, tag_categories={"Artist": ["inlineartist"]})
|
|
||||||
|
|
||||||
result = _run(save_post_file(
|
|
||||||
src, post, dest_dir, tmp_db,
|
|
||||||
category_fetcher=None,
|
|
||||||
))
|
|
||||||
|
|
||||||
assert result.name == "inlineartist_999.jpg"
|
|
||||||
assert result.exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetcher_not_called_when_template_has_no_category_tokens(tmp_path, tmp_db):
|
|
||||||
"""Purely-id template → fetcher ``ensure_categories`` never
|
|
||||||
invoked, even when categories are empty (the fetch is expensive
|
|
||||||
and would be wasted)."""
|
|
||||||
src = tmp_path / "src.jpg"
|
|
||||||
src.write_bytes(b"x")
|
|
||||||
dest_dir = tmp_path / "dest"
|
|
||||||
|
|
||||||
tmp_db.set_setting("library_filename_template", "%id%")
|
|
||||||
|
|
||||||
post = FakePost(id=42, tag_categories={})
|
|
||||||
fetcher = PopulatingFetcher({"Artist": ["unused"]})
|
|
||||||
|
|
||||||
_run(save_post_file(
|
|
||||||
src, post, dest_dir, tmp_db,
|
|
||||||
category_fetcher=fetcher,
|
|
||||||
))
|
|
||||||
|
|
||||||
assert fetcher.calls == 0
|
|
||||||
@ -1,58 +0,0 @@
|
|||||||
"""Tests for the project-wide PIL decompression-bomb cap (audit #8).
|
|
||||||
|
|
||||||
The cap lives in `booru_viewer/core/__init__.py` so any import of
|
|
||||||
any `booru_viewer.core.*` submodule installs it first — independent
|
|
||||||
of whether `core.cache` is on the import path. Both checks are run
|
|
||||||
in a fresh subprocess so the assertion isn't masked by some other
|
|
||||||
test's previous import.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
EXPECTED = 256 * 1024 * 1024
|
|
||||||
|
|
||||||
|
|
||||||
def _run(code: str) -> str:
|
|
||||||
result = subprocess.run(
|
|
||||||
[sys.executable, "-c", code],
|
|
||||||
capture_output=True,
|
|
||||||
text=True,
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
return result.stdout.strip()
|
|
||||||
|
|
||||||
|
|
||||||
def test_core_package_import_installs_cap():
|
|
||||||
"""Importing the core package alone must set MAX_IMAGE_PIXELS."""
|
|
||||||
out = _run(
|
|
||||||
"import booru_viewer.core; "
|
|
||||||
"from PIL import Image; "
|
|
||||||
"print(Image.MAX_IMAGE_PIXELS)"
|
|
||||||
)
|
|
||||||
assert int(out) == EXPECTED
|
|
||||||
|
|
||||||
|
|
||||||
def test_core_submodule_import_installs_cap():
|
|
||||||
"""Importing any non-cache core submodule must still set the cap —
|
|
||||||
the invariant is that the package __init__.py runs before any
|
|
||||||
submodule code, regardless of which submodule is the entry point."""
|
|
||||||
out = _run(
|
|
||||||
"from booru_viewer.core import config; "
|
|
||||||
"from PIL import Image; "
|
|
||||||
"print(Image.MAX_IMAGE_PIXELS)"
|
|
||||||
)
|
|
||||||
assert int(out) == EXPECTED
|
|
||||||
|
|
||||||
|
|
||||||
def test_core_cache_import_still_installs_cap():
|
|
||||||
"""Regression: the old code path (importing cache first) must keep
|
|
||||||
working after the move."""
|
|
||||||
out = _run(
|
|
||||||
"from booru_viewer.core import cache; "
|
|
||||||
"from PIL import Image; "
|
|
||||||
"print(Image.MAX_IMAGE_PIXELS)"
|
|
||||||
)
|
|
||||||
assert int(out) == EXPECTED
|
|
||||||
@ -1,88 +0,0 @@
|
|||||||
"""Tests for the pure mpv kwargs builder.
|
|
||||||
|
|
||||||
Pure Python. No Qt, no mpv, no network. The helper is importable
|
|
||||||
from the CI environment that installs only httpx + Pillow + pytest.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from booru_viewer.gui.media._mpv_options import (
|
|
||||||
LAVF_PROTOCOL_WHITELIST,
|
|
||||||
build_mpv_kwargs,
|
|
||||||
lavf_options,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ytdl_disabled():
|
|
||||||
"""Finding #2 — mpv must not delegate URLs to yt-dlp."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=False)
|
|
||||||
assert kwargs["ytdl"] == "no"
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_scripts_disabled():
|
|
||||||
"""Finding #2 — no auto-loading of ~/.config/mpv/scripts."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=False)
|
|
||||||
assert kwargs["load_scripts"] == "no"
|
|
||||||
|
|
||||||
|
|
||||||
def test_protocol_whitelist_not_in_init_kwargs():
|
|
||||||
"""Finding #2 — the lavf protocol whitelist must NOT be in the
|
|
||||||
init kwargs dict. python-mpv's init path uses
|
|
||||||
``mpv_set_option_string``, which trips on the comma-laden value
|
|
||||||
with -7 OPT_FORMAT. The whitelist is applied separately via the
|
|
||||||
property API in ``mpv_gl.py`` (see ``lavf_options``)."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=False)
|
|
||||||
assert "demuxer_lavf_o" not in kwargs
|
|
||||||
assert "demuxer-lavf-o" not in kwargs
|
|
||||||
|
|
||||||
|
|
||||||
def test_lavf_options_protocol_whitelist():
|
|
||||||
"""Finding #2 — lavf demuxer must only accept file + HTTP(S) + TLS/TCP.
|
|
||||||
|
|
||||||
Returned as a dict so callers can pass it through the python-mpv
|
|
||||||
property API (which uses the node API and handles comma-laden
|
|
||||||
values cleanly).
|
|
||||||
"""
|
|
||||||
opts = lavf_options()
|
|
||||||
assert opts.keys() == {"protocol_whitelist"}
|
|
||||||
allowed = set(opts["protocol_whitelist"].split(","))
|
|
||||||
# `file` must be present — cached local clips and .part files use it.
|
|
||||||
assert "file" in allowed
|
|
||||||
# HTTP(S) + supporting protocols for network videos.
|
|
||||||
assert "http" in allowed
|
|
||||||
assert "https" in allowed
|
|
||||||
assert "tls" in allowed
|
|
||||||
assert "tcp" in allowed
|
|
||||||
# Dangerous protocols must NOT appear.
|
|
||||||
for banned in ("concat", "subfile", "data", "udp", "rtp", "crypto"):
|
|
||||||
assert banned not in allowed
|
|
||||||
# The constant and the helper return the same value.
|
|
||||||
assert opts["protocol_whitelist"] == LAVF_PROTOCOL_WHITELIST
|
|
||||||
|
|
||||||
|
|
||||||
def test_input_conf_nulled_on_posix():
|
|
||||||
"""Finding #2 — on POSIX, skip loading ~/.config/mpv/input.conf."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=False)
|
|
||||||
assert kwargs["input_conf"] == "/dev/null"
|
|
||||||
|
|
||||||
|
|
||||||
def test_input_conf_skipped_on_windows():
|
|
||||||
"""Finding #2 — input_conf gate is POSIX-only; Windows omits the key."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=True)
|
|
||||||
assert "input_conf" not in kwargs
|
|
||||||
|
|
||||||
|
|
||||||
def test_existing_options_preserved():
|
|
||||||
"""Regression: pre-audit playback/audio tuning must remain."""
|
|
||||||
kwargs = build_mpv_kwargs(is_windows=False)
|
|
||||||
# Discord screen-share audio fix (see mpv_gl.py comment).
|
|
||||||
assert kwargs["ao"] == "pulse,wasapi,"
|
|
||||||
assert kwargs["audio_client_name"] == "booru-viewer"
|
|
||||||
# Network tuning from the uncached-video fast path.
|
|
||||||
assert kwargs["cache"] == "yes"
|
|
||||||
assert kwargs["cache_pause"] == "no"
|
|
||||||
assert kwargs["demuxer_max_bytes"] == "50MiB"
|
|
||||||
assert kwargs["network_timeout"] == "10"
|
|
||||||
# Existing input lockdown (primary — input_conf is defense-in-depth).
|
|
||||||
assert kwargs["input_default_bindings"] is False
|
|
||||||
assert kwargs["input_vo_keyboard"] is False
|
|
||||||
@ -1,87 +0,0 @@
|
|||||||
"""Tests for the pure info-panel source HTML builder.
|
|
||||||
|
|
||||||
Pure Python. No Qt, no network. Validates audit finding #6 — that the
|
|
||||||
helper escapes booru-controlled `post.source` before it's interpolated
|
|
||||||
into a QTextBrowser RichText document.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from booru_viewer.gui._source_html import build_source_html
|
|
||||||
|
|
||||||
|
|
||||||
def test_none_returns_literal_none():
|
|
||||||
assert build_source_html(None) == "none"
|
|
||||||
assert build_source_html("") == "none"
|
|
||||||
|
|
||||||
|
|
||||||
def test_plain_https_url_renders_escaped_anchor():
|
|
||||||
out = build_source_html("https://example.test/post/1")
|
|
||||||
assert out.startswith('<a href="https://example.test/post/1"')
|
|
||||||
assert ">https://example.test/post/1</a>" in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_long_url_display_text_truncated_but_href_full():
|
|
||||||
long_url = "https://example.test/" + "a" * 200
|
|
||||||
out = build_source_html(long_url)
|
|
||||||
# href contains the full URL
|
|
||||||
assert long_url in out.replace("&", "&")
|
|
||||||
# Display text is truncated to 57 chars + "..."
|
|
||||||
assert "..." in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_double_quote_in_url_escaped():
|
|
||||||
"""A `"` in the source must not break out of the href attribute."""
|
|
||||||
hostile = 'https://attacker.test/"><img src=x>'
|
|
||||||
out = build_source_html(hostile)
|
|
||||||
# Raw <img> must NOT appear — html.escape converts < to <
|
|
||||||
assert "<img" not in out
|
|
||||||
# The display text must also have the raw markup escaped.
|
|
||||||
assert ">" in out or """ in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_html_tags_in_url_escaped():
|
|
||||||
hostile = 'https://attacker.test/<script>alert(1)</script>'
|
|
||||||
out = build_source_html(hostile)
|
|
||||||
assert "<script>" not in out
|
|
||||||
assert "<script>" in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_non_url_source_rendered_as_escaped_plain_text():
|
|
||||||
"""A source string that isn't an http(s) URL is rendered as plain
|
|
||||||
text — no <a> tag, but still HTML-escaped."""
|
|
||||||
out = build_source_html("not a url <b>at all</b>")
|
|
||||||
assert "<a" not in out
|
|
||||||
assert "<b>" not in out
|
|
||||||
assert "<b>" in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_javascript_url_does_not_become_anchor():
|
|
||||||
"""Sources that don't start with http(s) — including `javascript:` —
|
|
||||||
must NOT be wrapped in an <a> tag where they'd become a clickable
|
|
||||||
link target."""
|
|
||||||
out = build_source_html("javascript:alert(1)")
|
|
||||||
assert "<a " not in out
|
|
||||||
assert "alert(1)" in out # text content preserved (escaped)
|
|
||||||
|
|
||||||
|
|
||||||
def test_data_url_does_not_become_anchor():
|
|
||||||
out = build_source_html("data:text/html,<script>x</script>")
|
|
||||||
assert "<a " not in out
|
|
||||||
assert "<script>" not in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_ampersand_in_url_escaped():
|
|
||||||
out = build_source_html("https://example.test/?a=1&b=2")
|
|
||||||
# `&` must be `&` inside the href attribute
|
|
||||||
assert "&" in out
|
|
||||||
# Raw `&b=` is NOT acceptable as an attribute value
|
|
||||||
assert 'href="https://example.test/?a=1&b=2"' in out
|
|
||||||
|
|
||||||
|
|
||||||
def test_pixiv_real_world_source_unchanged_visually():
|
|
||||||
"""Realistic input — a normal pixiv link — should pass through with
|
|
||||||
no surprising changes."""
|
|
||||||
out = build_source_html("https://www.pixiv.net/artworks/12345")
|
|
||||||
assert 'href="https://www.pixiv.net/artworks/12345"' in out
|
|
||||||
assert "https://www.pixiv.net/artworks/12345</a>" in out
|
|
||||||
Loading…
x
Reference in New Issue
Block a user