Compare commits
357 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 83a0637750 | |||
| 04e85e000c | |||
| 7a32dc931a | |||
| e0146a4681 | |||
| 1941cb35e8 | |||
| c16c3a794a | |||
| 21ac77ab7b | |||
| cd688be893 | |||
| 7c4215c5d7 | |||
| eab805e705 | |||
| db4348c077 | |||
| deec81fc12 | |||
| 585979a0d1 | |||
| b63341fec1 | |||
| 873dcd8998 | |||
| cec93545ad | |||
| 9ec034f7ef | |||
| ab44735f28 | |||
| 90b27fe36a | |||
| 730b2a7b7e | |||
| 0f26475f52 | |||
| cf8bc0ad89 | |||
| bbf0d3107b | |||
| ec9e44efbe | |||
| 24f398795b | |||
| 3b3de35689 | |||
| 21bb3aa979 | |||
| 289e4c2fdb | |||
| 3c2aa5820d | |||
| a2609199bd | |||
| c3efcf9f89 | |||
| 22f09c3cdb | |||
| 70a7903f85 | |||
| e004add28f | |||
| 9713794633 | |||
| 860c8dcd50 | |||
| 0d75b8a3c8 | |||
| 94a64dcd25 | |||
| 3d26e40e0f | |||
| 2cdab574ca | |||
| 57108cd0b5 | |||
| 667ee87641 | |||
| 2e436af4e8 | |||
| a7586a9e43 | |||
| ad6f876f40 | |||
| 56c5eac870 | |||
| 11cc26479b | |||
| 14c81484c9 | |||
| 0d72b0ec8a | |||
| 445d3c7a0f | |||
| 0583f962d1 | |||
| 3868858811 | |||
| 7ef517235f | |||
| 2824840b07 | |||
| 61403c8acc | |||
| 2e9b99e4b8 | |||
| 73206994ec | |||
| 738e1329b8 | |||
| a3cb563ae0 | |||
| 60cf4e0beb | |||
| 692a0c1569 | |||
| b964a77688 | |||
| 10f1b3fd10 | |||
| 5564f4cf0a | |||
| b055cdd1a2 | |||
| 45b87adb33 | |||
| c11cca1134 | |||
| fa8c5b84cf | |||
| c3258c1d53 | |||
| 3a95b6817d | |||
| b00f3ff95c | |||
| 172fae9583 | |||
| 12ec94b4b1 | |||
| f83435904a | |||
| a73c2d6b02 | |||
| 738ece9cd5 | |||
| 3d288a909f | |||
| a8dfff90c5 | |||
| 14033b57b5 | |||
| 9592830e67 | |||
| d895c28608 | |||
| 53a8622020 | |||
| 88f6d769c8 | |||
| 5812f54877 | |||
| 0a046bf936 | |||
| 0c0dd55907 | |||
| 710839387a | |||
| d355f24394 | |||
| f687141f80 | |||
| d64b1d6465 | |||
| 558c19bdb5 | |||
| 4bcff35708 | |||
| 79419794f6 | |||
| 5e8035cb1d | |||
| 52b76dfc83 | |||
| c210c4b44a | |||
| fd21f735fb | |||
| e9d1ca7b3a | |||
| 21f2fa1513 | |||
| ebaacb8a25 | |||
| 553734fe79 | |||
| c1af3f2e02 | |||
| 7046f9b94e | |||
| ac3939ef61 | |||
| e939085ac9 | |||
| b28cc0d104 | |||
| 37f89c0bf8 | |||
| 925e8c1001 | |||
| a760b39c07 | |||
| 77e49268ae | |||
| e262a2d3bb | |||
| a51c9a1fda | |||
| 7249d57852 | |||
| e31ca07973 | |||
| 58cbeec2e4 | |||
| 2186f50065 | |||
| 07665942db | |||
| 1864cfb088 | |||
| a849b8f900 | |||
| af0d8facb8 | |||
| 1531db27b7 | |||
| 278d4a291d | |||
| 5858c274c8 | |||
| 4db7943ac7 | |||
| 160db1f12a | |||
| ec781141b3 | |||
| 5a511338c8 | |||
| b65f8da837 | |||
| fef3c237f1 | |||
| 8f9e4f7e65 | |||
| 2bb6352141 | |||
| 6ff1f726d4 | |||
| b8cb47badb | |||
| fa4f2cb270 | |||
| 5d348fa8be | |||
| a6a73fed61 | |||
| 6801a0b45e | |||
| 19a22be59c | |||
| 49fa2c5b7a | |||
| c0c8fdadbf | |||
| 9a3bb697ec | |||
| d6909bf4d7 | |||
| c735db0c68 | |||
| ef95509551 | |||
| ec79be9c83 | |||
| 6eebb77ae5 | |||
| 013fe43f95 | |||
| 72803f0b14 | |||
| 22744c48af | |||
| 0aa3d8113d | |||
| 75bbcc5d76 | |||
| c91326bf4b | |||
| b1e4efdd0b | |||
| 836e2a97e3 | |||
| 4bc7037222 | |||
| cb4d0ac851 | |||
| 10c2dcb8aa | |||
| a90aa2dc77 | |||
| 5bf85f223b | |||
| 5e6361c31b | |||
| 35135c9a5b | |||
| fa9fcc3db0 | |||
| c440065513 | |||
| 00b8e352ea | |||
| c8b21305ba | |||
| 9081208170 | |||
| b541f64374 | |||
| 9c42b4fdd7 | |||
| a1ea2b8727 | |||
| 4ba9990f3a | |||
| 868b1a7708 | |||
| 09fadcf3c2 | |||
| 88a3fe9528 | |||
| e28ae6f4af | |||
| 6aa8677a2d | |||
| cc616d1cf4 | |||
| 42e7f2b529 | |||
| 0b4fc9fa49 | |||
| 0f2e800481 | |||
| 15870daae5 | |||
| 27c53cb237 | |||
| b1139cbea6 | |||
| 93459dfff6 | |||
| d7b3c304d7 | |||
| 28c40bc1f5 | |||
| 094a22db25 | |||
| faf9657ed9 | |||
| 5261fa176d | |||
| 94588e324c | |||
| 9cc294a16a | |||
| 08f99a6101 | |||
| ba49a59385 | |||
| aac7b08787 | |||
| d4bad47d42 | |||
| df301c754c | |||
| de6961da37 | |||
| f9977b61e6 | |||
| 562c03071b | |||
| b858b4ac43 | |||
| 87be4eb2a6 | |||
| 8e9dda8671 | |||
| 0a8d392158 | |||
| 20fc6f551e | |||
| 71d426e0cf | |||
| 446abe6ba9 | |||
| cb2445a90a | |||
| 321ba8edfa | |||
| 3f7981a8c6 | |||
| d66dc14454 | |||
| e5a33739c9 | |||
| 60867cfa37 | |||
| df3b1d06d8 | |||
| 127ee4315c | |||
| 48feafa977 | |||
| 38c5aefa27 | |||
| a632f1b961 | |||
| 80607835d1 | |||
| 8c1266ab0d | |||
| a90d71da47 | |||
| ecda09152c | |||
| 9a8e6037c3 | |||
| 33227f3795 | |||
| ee9d67e853 | |||
| 8ee7a2704b | |||
| bda21a2615 | |||
| 9b30e742c7 | |||
| 31089adf7d | |||
| 64f0096f32 | |||
| c02cc4fc38 | |||
| f63ac4c6d8 | |||
| 6833ae701d | |||
| cc7ac67cac | |||
| 762718be6d | |||
| f382a2ebe2 | |||
| dfe8fd3815 | |||
| 272a84a0ab | |||
| 84d39b3cda | |||
| 3a87d24631 | |||
| fa06eb16be | |||
| 09485884de | |||
| 19423776bc | |||
| d9830d0f68 | |||
| a01ac34944 | |||
| 264c421dff | |||
| acfcb88aca | |||
| 8c5c2e37d3 | |||
| 510b423327 | |||
| 82e7c77251 | |||
| 4c490498e0 | |||
| a86941decf | |||
| 57a19f87ba | |||
| 403c099bed | |||
| 912be0bc80 | |||
| f168bece00 | |||
| 35424ff89d | |||
| 7d11aeab06 | |||
| 1547cbe55a | |||
| 762d73dc4f | |||
| f0fe52c886 | |||
| 165733c6e0 | |||
| af9b68273c | |||
| e2a666885f | |||
| 8f8db62a5a | |||
| fa1222a774 | |||
| 9a05286f06 | |||
| f5954d1387 | |||
| 834deecf57 | |||
| 7f897df4b2 | |||
| 5ba0441be7 | |||
| 9001808951 | |||
| 8f298e51fc | |||
| e00d88e1ec | |||
| 5395569213 | |||
| 81fc4d93eb | |||
| a27672b95e | |||
| 3ef1a0bbd3 | |||
| 150970b56f | |||
| 5976a81bb6 | |||
| 6f59de0c64 | |||
| 28348fa9ab | |||
| f0b1fc9052 | |||
| 98ac31079a | |||
| d05a9cd368 | |||
| f6c5c6780d | |||
| b7cb021d1b | |||
| b72f3a54c0 | |||
| 38937528ef | |||
| 9248dd77aa | |||
| 6075f31917 | |||
| 003a2b221e | |||
| 03102090e5 | |||
| 75caf8c520 | |||
| 23828e7d0c | |||
| 77a53a42c9 | |||
| af265c6077 | |||
| 69f75fc98f | |||
| 0ef3643b32 | |||
| d48435db1c | |||
| 1b66b03a30 | |||
| a2b759be90 | |||
| ec238f3aa4 | |||
| 69d25b325e | |||
| 609066cf87 | |||
| 35d80c32f2 | |||
| 45e6042ebb | |||
| 095942c524 | |||
| 06f8f3d752 | |||
| 3ade3a71c1 | |||
| 4fb17151b1 | |||
| 527cb3489b | |||
| a03d0e9dc8 | |||
| d75076c14b | |||
| 664d4e9cda | |||
| a9ce01e6c1 | |||
| 7fdc67c613 | |||
| f2f7d64759 | |||
| 39816144fe | |||
| 9cba7d5583 | |||
| bf14466382 | |||
| 80001e64fe | |||
| 0a6818260e | |||
| 44a20ac057 | |||
| 553e31075d | |||
| 92c1824720 | |||
| b571c9a486 | |||
| c4061b0d20 | |||
| 9455ff0f03 | |||
| dbc530bb3c | |||
| db774fc33e | |||
| c4efdb76f8 | |||
| af1715708b | |||
| da36c4a8f2 | |||
| eded7790af | |||
| 9d99ecfcb5 | |||
| 702fd5ca7b | |||
| 591c7c3118 | |||
| 4c166ac725 | |||
| 8637202110 | |||
| fa2d31243c | |||
| aacae06406 | |||
| 2865be4826 | |||
| 18a86358e2 | |||
| cd7b8a3cca | |||
| 31d02d3c7b | |||
| fda3b10beb | |||
| 7d195558f6 | |||
| ba5a47f8af | |||
| 987d987512 | |||
| 7b61d36718 | |||
| 5a44593a6a | |||
| baa910ac81 | |||
| b89baaae34 | |||
| 250b144806 | |||
| 3f2c8aefe3 | |||
| bad3e897a1 | |||
| eb58d76bc0 | |||
| 54ccc40477 |
55
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
55
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
name: Bug Report
|
||||
description: Something broken or misbehaving
|
||||
title: "[BUG] "
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: Summary
|
||||
description: What's broken?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected vs actual behavior
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS
|
||||
options: [Linux, Windows, Other]
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: booru-viewer version / commit
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: python
|
||||
attributes:
|
||||
label: Python & PySide6 version
|
||||
- type: dropdown
|
||||
id: backend
|
||||
attributes:
|
||||
label: Booru backend
|
||||
options: [Danbooru, Gelbooru, Safebooru, e621, Other]
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs / traceback
|
||||
render: shell
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Questions and general discussion
|
||||
url: https://github.com/pxlwh/booru-viewer/discussions
|
||||
about: For usage questions, setup help, and general chat that isn't a bug
|
||||
- name: Gitea mirror
|
||||
url: https://git.pax.moe/pax/booru-viewer
|
||||
about: Primary development repo — same codebase, also accepts issues
|
||||
22
.github/ISSUE_TEMPLATE/docs.yaml
vendored
Normal file
22
.github/ISSUE_TEMPLATE/docs.yaml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Documentation Issue
|
||||
description: Typos, unclear sections, missing docs, broken links
|
||||
title: "[DOCS] "
|
||||
labels: ["documentation"]
|
||||
body:
|
||||
- type: input
|
||||
id: file
|
||||
attributes:
|
||||
label: File or page
|
||||
description: README.md, themes/README.md, HYPRLAND.md, KEYBINDS.md, in-app help, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: What's wrong or missing?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: suggestion
|
||||
attributes:
|
||||
label: Suggested fix or addition
|
||||
28
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
28
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Feature Request
|
||||
description: Suggest a new feature or enhancement
|
||||
title: "[FEAT] "
|
||||
labels: ["enhancement"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Problem
|
||||
description: What's the use case or pain point?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: proposal
|
||||
attributes:
|
||||
label: Proposed solution
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: alternatives
|
||||
attributes:
|
||||
label: Alternatives considered
|
||||
- type: checkboxes
|
||||
id: scope
|
||||
attributes:
|
||||
label: Scope check
|
||||
options:
|
||||
- label: I've checked this isn't already implemented or tracked
|
||||
70
.github/ISSUE_TEMPLATE/hyprland_wayland.yaml
vendored
Normal file
70
.github/ISSUE_TEMPLATE/hyprland_wayland.yaml
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
name: Hyprland / Wayland Issue
|
||||
description: Compositor-specific issues (window positioning, popout math, Waybar, multi-monitor)
|
||||
title: "[HYPR] "
|
||||
labels: ["hyprland", "wayland"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: What's happening?
|
||||
description: Describe the compositor-specific behavior you're seeing
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: compositor
|
||||
attributes:
|
||||
label: Compositor
|
||||
options: [Hyprland, Sway, KDE/KWin Wayland, GNOME/Mutter Wayland, Other Wayland, Other]
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: compositor_version
|
||||
attributes:
|
||||
label: Compositor version
|
||||
description: e.g. Hyprland v0.42.0
|
||||
- type: dropdown
|
||||
id: monitors
|
||||
attributes:
|
||||
label: Monitor setup
|
||||
options: [Single monitor, Dual monitor, 3+ monitors, Mixed scaling, Mixed refresh rates]
|
||||
- type: dropdown
|
||||
id: area
|
||||
attributes:
|
||||
label: What area is affected?
|
||||
options:
|
||||
- Main window geometry / position
|
||||
- Popout window positioning
|
||||
- Popout aspect-ratio lock
|
||||
- Popout anchor (resize pivot)
|
||||
- Context menu / popup positioning
|
||||
- Waybar exclusive zone handling
|
||||
- Fullscreen (F11)
|
||||
- Privacy screen overlay
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: envvars
|
||||
attributes:
|
||||
label: Relevant env vars set
|
||||
description: BOORU_VIEWER_NO_HYPR_RULES, BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK, etc.
|
||||
placeholder: "BOORU_VIEWER_NO_HYPR_RULES=1"
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: windowrules
|
||||
attributes:
|
||||
label: Any windowrules targeting booru-viewer?
|
||||
description: Paste relevant rules from your compositor config
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: hyprctl
|
||||
attributes:
|
||||
label: hyprctl output (if applicable)
|
||||
description: "`hyprctl monitors -j`, `hyprctl clients -j` filtered to booru-viewer"
|
||||
render: json
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: booru-viewer version / commit
|
||||
validations:
|
||||
required: true
|
||||
72
.github/ISSUE_TEMPLATE/performance.yaml
vendored
Normal file
72
.github/ISSUE_TEMPLATE/performance.yaml
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
name: Performance Issue
|
||||
description: Slowdowns, lag, high memory/CPU, UI freezes (distinct from broken features)
|
||||
title: "[PERF] "
|
||||
labels: ["performance"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: What's slow?
|
||||
description: Describe what feels sluggish and what you'd expect
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: area
|
||||
attributes:
|
||||
label: What area?
|
||||
options:
|
||||
- Grid scroll / infinite scroll
|
||||
- Thumbnail loading
|
||||
- Search / API requests
|
||||
- Image preview / pan-zoom
|
||||
- Video playback
|
||||
- Popout open / close
|
||||
- Popout navigation
|
||||
- Settings / dialogs
|
||||
- Startup
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: timings
|
||||
attributes:
|
||||
label: Approximate timings
|
||||
description: How long does the slow operation take? How long would you expect?
|
||||
- type: input
|
||||
id: library_size
|
||||
attributes:
|
||||
label: Library / bookmark size
|
||||
description: Number of saved files and/or bookmarks, if relevant
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS
|
||||
options: [Linux, Windows, Other]
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: hardware
|
||||
attributes:
|
||||
label: Hardware (CPU / RAM / GPU)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant DEBUG logs
|
||||
description: Launch with Ctrl+L open and reproduce — paste anything that looks slow
|
||||
render: shell
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: booru-viewer version / commit
|
||||
validations:
|
||||
required: true
|
||||
26
.github/ISSUE_TEMPLATE/site_support.yaml
vendored
Normal file
26
.github/ISSUE_TEMPLATE/site_support.yaml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: Site Support Request
|
||||
description: Request support for a new booru backend
|
||||
title: "[SITE] "
|
||||
labels: ["site-support"]
|
||||
body:
|
||||
- type: input
|
||||
id: site
|
||||
attributes:
|
||||
label: Site name and URL
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: api
|
||||
attributes:
|
||||
label: API type
|
||||
options: [Danbooru-compatible, Gelbooru-compatible, Moebooru, Shimmie2, Unknown, Other]
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: api_docs
|
||||
attributes:
|
||||
label: Link to API documentation (if any)
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Auth, rate limits, or quirks worth knowing
|
||||
30
.github/ISSUE_TEMPLATE/theme_submission.yaml
vendored
Normal file
30
.github/ISSUE_TEMPLATE/theme_submission.yaml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
name: Theme Submission
|
||||
description: Submit a palette for inclusion
|
||||
title: "[THEME] "
|
||||
labels: ["theme"]
|
||||
body:
|
||||
- type: input
|
||||
id: name
|
||||
attributes:
|
||||
label: Theme name
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: palette
|
||||
attributes:
|
||||
label: Palette file contents
|
||||
description: Paste the full @palette block or the complete .qss file
|
||||
render: css
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: screenshot
|
||||
attributes:
|
||||
label: Screenshot URL
|
||||
- type: checkboxes
|
||||
id: license
|
||||
attributes:
|
||||
label: Licensing
|
||||
options:
|
||||
- label: I'm okay with this being distributed under the project's license
|
||||
required: true
|
||||
39
.github/ISSUE_TEMPLATE/ux_feedback.yaml
vendored
Normal file
39
.github/ISSUE_TEMPLATE/ux_feedback.yaml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: UX Feedback
|
||||
description: Non-bug UX suggestions, workflow friction, small polish
|
||||
title: "[UX] "
|
||||
labels: ["ux"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: What were you trying to do?
|
||||
description: The workflow or action where the friction happened
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: friction
|
||||
attributes:
|
||||
label: What felt awkward or wrong?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: suggestion
|
||||
attributes:
|
||||
label: What would feel better?
|
||||
description: Optional — a rough idea is fine
|
||||
- type: dropdown
|
||||
id: area
|
||||
attributes:
|
||||
label: Area
|
||||
options:
|
||||
- Grid / thumbnails
|
||||
- Preview pane
|
||||
- Popout window
|
||||
- Top bar / filters
|
||||
- Search
|
||||
- Bookmarks
|
||||
- Library
|
||||
- Settings
|
||||
- Keyboard shortcuts
|
||||
- Theming
|
||||
- Other
|
||||
14
.github/workflows/tests.yml
vendored
Normal file
14
.github/workflows/tests.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
name: tests
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install test deps
|
||||
run: pip install httpx[http2] Pillow pytest
|
||||
- name: Run tests
|
||||
run: PYTHONPATH=. pytest tests/ -v
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -7,6 +7,7 @@ build/
|
||||
*.egg
|
||||
.venv/
|
||||
venv/
|
||||
docs/
|
||||
project.md
|
||||
*.bak/
|
||||
*.dll
|
||||
|
||||
709
CHANGELOG.md
709
CHANGELOG.md
@ -1,8 +1,523 @@
|
||||
# Changelog
|
||||
|
||||
## 0.2.0
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
- Settings → Cache: **Clear Tag Cache** button — wipes the per-site `tag_types` rows (including the `__batch_api_probe__` sentinel) so Gelbooru/Moebooru backends re-probe and re-populate tag categories from scratch. Useful when a stale cache from an earlier build leaves some category types mis-labelled or missing
|
||||
|
||||
### Changed
|
||||
- Thumbnail drag-start threshold raised from 10px to 30px to match the rubber band's gate — small mouse wobbles on a thumb no longer trigger a file drag
|
||||
- Settings → Cache layout: Clear Tag Cache moved into row 1 alongside Clear Thumbnails and Clear Image Cache as a 3-wide non-destructive row; destructive Clear Everything + Evict stay in row 2
|
||||
|
||||
### Fixed
|
||||
- Grid blanked out after splitter drag or tile/float toggle until the next scroll — `ThumbnailGrid.resizeEvent` now re-runs `_recycle_offscreen` against the new geometry so thumbs whose pixmap was evicted by a column-count shift get refreshed into view. **Behavior change:** no more blank grid after resize
|
||||
- Status bar overwrote the per-post info set by `_on_post_selected` with `"N results — Loaded"` the moment the image finished downloading, hiding tag counts / post ID until the user re-clicked; `on_image_done` now preserves the incoming `info` string
|
||||
- `category_fetcher._do_ensure` no longer permanently flips `_batch_api_works` to False when a transient network error drops a tag-API request mid-call; the unprobed path now routes through `_probe_batch_api`, which distinguishes clean 200-with-zero-matches (structurally broken, flip) from timeout/HTTP-error (transient, retry next call)
|
||||
- Bookmark→library save and bookmark Save As now plumb the active site's `CategoryFetcher` through to the filename template, so `%artist%`/`%character%` tokens render correctly instead of silently dropping out when saving a post that wasn't previewed first
|
||||
- Info panel no longer silently drops tags that failed to land in a cached category — any tag from `post.tag_list` not rendered under a known category section now appears in an "Other" bucket, so partial cache coverage can't make individual tags invisible
|
||||
- `BooruClient._request` retries now cover `httpx.RemoteProtocolError` and `httpx.ReadError` in addition to the existing timeout/connect/network set — an overloaded booru that drops the TCP connection mid-response no longer fails the whole search on the first try
|
||||
- VRAM retained when no video is playing — `stop()` now frees the GL render context (textures + FBOs) instead of just dropping the hwdec surface pool. Context is recreated lazily on next `play_file()` via `ensure_gl_init()` (~5ms, invisible behind network fetch)
|
||||
|
||||
### Refactored
|
||||
- `category_fetcher` batch tag-API params are now built by a shared `_build_tag_api_params` helper instead of duplicated across `fetch_via_tag_api` and `_probe_batch_api`
|
||||
- `detect.detect_site_type` — removed the leftover `if True:` indent marker; no behavior change
|
||||
- `core.http.make_client` — single constructor for the three `httpx.AsyncClient` instances (cache download pool, API pool, detect probe). Each call site still keeps its own singleton and connection pool; only the construction is shared
|
||||
- Silent `except: pass` sites in `popout/window`, `video_player`, and `window_state` now carry one-line comments naming the absorbed failure and the graceful fallback (or were downgraded to `log.debug(..., exc_info=True)`). No behavior change
|
||||
- Popout docstrings purged of in-flight-refactor commit markers (`skeleton`, `14a`, `14b`, `future commit`) that referred to now-landed state-machine extraction; load-bearing commit 14b reference kept in `_dispatch_and_apply` as it still protects against reintroducing the bug
|
||||
- `core/cache.py` tempfile cleanup: `BaseException` catch now documents why it's intentionally broader than `Exception`
|
||||
- `api/e621` and `api/moebooru` JSON parse guards narrowed from bare `except` to `ValueError`
|
||||
- `gui/media/video_player.py` — `import time` hoisted to module top
|
||||
- `gui/post_actions.is_in_library` — dead `try/except` stripped
|
||||
|
||||
### Removed
|
||||
- Unused `Favorite` alias in `core/db.py` — callers migrated to `Bookmark` in 0.2.5, nothing referenced the fallback anymore
|
||||
|
||||
## v0.2.7
|
||||
|
||||
### Fixed
|
||||
- Popout always reopened as floating even when tiled at close — Hyprland tiled state is now persisted and restored via `settiled` on reopen
|
||||
- Video stutter on network streams — `cache_pause_initial` was blocking first frame, reverted cache_pause changes and kept larger demuxer buffer
|
||||
- Rubber band selection state getting stuck across interrupted drags
|
||||
- LIKE wildcards in `search_library_meta` not being escaped
|
||||
- Copy File to Clipboard broken in preview pane and popout; added Copy Image URL action
|
||||
- Thumbnail cleanup and Post ID sort broken for templated filenames in library
|
||||
- Save/unsave bookmark UX — no flash on toggle, correct dot indicators
|
||||
- Autocomplete broken for multi-tag queries
|
||||
- Search not resetting to page 1 on new query
|
||||
- Fade animation cleanup crashing `FlowLayout.clear`
|
||||
- Privacy toggle not preserving video pause state
|
||||
- Bookmarks grid not refreshing on unsave
|
||||
- `_cached_path` not set for streaming videos
|
||||
- Standard icon column showing in QMessageBox dialogs
|
||||
- Popout aspect lock for bookmarks now reads actual image dimensions instead of guessing
|
||||
- GPU resource leak on Mesa/Intel drivers — `mpv_render_context_free` now runs with the owning GL context current (NVIDIA tolerated the bug, other drivers did not)
|
||||
- Popout teardown `AttributeError` when `centralWidget()` or `QApplication.instance()` returned `None` during init/shutdown race
|
||||
- Category fetcher rejects XML responses containing `<!DOCTYPE` or `<!ENTITY` before parsing, blocking XXE and billion-laughs payloads from user-configured sites
|
||||
- VRAM not released on popout close — `video_player` now drops the hwdec surface pool on stop and popout runs explicit mpv cleanup before teardown
|
||||
- Popout open animation was being suppressed by the `no_anim` aspect-lock workaround — first fit after open now lets Hyprland's `windowsIn`/`popin` play; subsequent navigation fits still suppress anim to avoid resize flicker
|
||||
- Thumbnail grid blanking out after Hyprland tiled resize until a scroll/click — viewport is now force-updated at the end of `ThumbnailGrid.resizeEvent` so the Qt Wayland buffer stays in sync with the new geometry
|
||||
- Library video thumbnails captured from a black opening frame — mpv now seeks to 10% before the first frame decode so title cards, fade-ins, and codec warmup no longer produce a black thumbnail (delete `~/.cache/booru-viewer/thumbnails/library/` to regenerate existing entries)
|
||||
|
||||
### Changed
|
||||
- Uncached videos now download via httpx in parallel with mpv streaming — file is cached immediately for copy/paste without waiting for playback to finish
|
||||
- Library video thumbnails use mpv instead of ffmpeg — drops the ffmpeg dependency entirely
|
||||
- Save/Unsave from Library mutually exclusive in context menus, preview pane, and popout
|
||||
- S key guard consistent with B/F behavior
|
||||
- Tag count limits removed from info panel
|
||||
- Ctrl+S and Ctrl+D menu shortcuts removed (conflict-prone)
|
||||
- Thumbnail fade-in shortened from 200ms to 80ms
|
||||
- Default demuxer buffer reduced to 50MiB; streaming URLs still get 150MiB
|
||||
- Minimum width set on thumbnail grid
|
||||
- Popout overlay hover zone enlarged
|
||||
- Settings dialog gets an Apply button; thumbnail size and flip layout apply live
|
||||
- Tab selection preserved on view switch
|
||||
- Scroll delta accumulated for volume control and zoom (smoother with hi-res scroll wheels)
|
||||
- Force Fusion widget style when no `custom.qss` is present
|
||||
- Dark Fusion palette applied as fallback when no system Qt theme file (`Trolltech.conf`) is detected; KDE/GNOME users keep their own palette
|
||||
- **Behavior change:** popout re-fits window to current content's aspect and resets zoom when leaving a tiled layout to a different-aspect image or video; previously restored the old floating geometry with the wrong aspect lock
|
||||
|
||||
### Performance
|
||||
- Thumbnails re-decoded from disk on size change instead of holding full pixmaps in memory
|
||||
- Off-screen thumbnail pixmaps recycled (decoded on demand from cached path)
|
||||
- Lookup sets cached across infinite scroll appends; invalidated on bookmark/save
|
||||
- `auto_evict_cache` throttled to once per 30s
|
||||
- Stale prefetch spirals cancelled on new click
|
||||
- Single-pass directory walk in cache eviction functions
|
||||
- GTK dialog platform detection cached instead of recreating Database per call
|
||||
|
||||
### Removed
|
||||
- Dead code: `core/images.py`
|
||||
- `TODO.md`
|
||||
- Unused imports across `main_window`, `grid`, `settings`, `dialogs`, `sites`, `search_controller`, `video_player`, `info_panel`
|
||||
- Dead `mid` variable in `grid.paintEvent`, dead `get_connection_log` import in `settings._build_network_tab`
|
||||
|
||||
## v0.2.6
|
||||
|
||||
### Security: 2026-04-10 audit remediation
|
||||
|
||||
Closes 12 of the 16 findings from the read-only audit at `docs/SECURITY_AUDIT.md`. Two High, four Medium, four Low, and two Informational findings fixed; the four skipped Informational items are documented at the bottom. Each fix is its own commit on the `security/audit-2026-04-10` branch with an `Audit-Ref:` trailer.
|
||||
|
||||
- **#1 SSRF (High)**: every httpx client now installs an event hook that resolves the target host and rejects loopback, RFC1918, link-local (including the 169.254.169.254 cloud-metadata endpoint), CGNAT, unique-local v6, and multicast. Hook fires on every redirect hop, not just the initial request. **Behavior change:** user-configured boorus pointing at private/loopback addresses now fail with `blocked request target ...` instead of being probed. Test Connection on a local booru will be rejected.
|
||||
- **#2 mpv (High)**: the embedded mpv instance is constructed with `ytdl=no`, `load_scripts=no`, and `demuxer_lavf_o=protocol_whitelist=file,http,https,tls,tcp`, plus `input_conf=/dev/null` on POSIX. Closes the yt-dlp delegation surface (CVE-prone extractors invoked on attacker-supplied URLs) and the `concat:`/`subfile:` local-file-read gadget via ffmpeg's lavf demuxer. **Behavior change:** any `file_url` whose host is only handled by yt-dlp (youtube.com, reddit.com, ...) no longer plays. Boorus do not legitimately serve such URLs, so in practice this only affects hostile responses.
|
||||
- **#3 Credential logging (Medium)**: `login`, `api_key`, `user_id`, and `password_hash` are now stripped from URLs and params before any logging path emits them. Single redaction helper in `core/api/_safety.py`, called from the booru-base request hook and from each per-client `log.debug` line.
|
||||
- **#4 DB + data dir permissions (Medium)**: on POSIX, `~/.local/share/booru-viewer/` is now `0o700` and `booru.db` (plus the `-wal`/`-shm` sidecars) is `0o600`. **Behavior change:** existing installs are tightened on next launch. Windows is unchanged — NTFS ACLs handle this separately.
|
||||
- **#5 Lock leak (Medium)**: the per-URL coalesce lock table is capped at 4096 entries with LRU eviction. Eviction skips currently-held locks so a coroutine mid-`async with` can't be ripped out from under itself.
|
||||
- **#6 HTML injection (Medium)**: `post.source` is escaped before insertion into the info-panel rich text. Non-http(s) sources (including `javascript:` and `data:`) render as plain escaped text without an `<a>` tag, so they can't become click targets.
|
||||
- **#7 Windows reserved names (Low)**: `render_filename_template` now prefixes filenames whose stem matches a reserved Windows device name (`CON`, `PRN`, `AUX`, `NUL`, `COM1-9`, `LPT1-9`) with `_`, regardless of host platform. Cross-OS library copies stay safe.
|
||||
- **#8 PIL bomb cap (Low)**: `Image.MAX_IMAGE_PIXELS=256M` moved from `core/cache.py` (where it was a side-effect of import order) to `core/__init__.py`, so any `booru_viewer.core.*` import installs the cap first.
|
||||
- **#9 Dependency bounds (Low)**: upper bounds added to runtime deps in `pyproject.toml` (`httpx<1.0`, `Pillow<12.0`, `PySide6<7.0`, `python-mpv<2.0`). Lock-file generation deferred — see `TODO.md`.
|
||||
- **#10 Early content validation (Low)**: `_do_download` now accumulates the first 16 bytes of the response and validates magic bytes before committing to writing the rest. A hostile server omitting Content-Type previously could burn up to `MAX_DOWNLOAD_BYTES` (500MB) of bandwidth before the post-download check rejected.
|
||||
- **#14 Category fetcher body cap (Informational)**: HTML body the regex walks over in `CategoryFetcher.fetch_post` is truncated at 2MB. Defense in depth — the regex is linear-bounded but a multi-MB hostile body still pegs CPU.
|
||||
- **#16 Logging hook gap (Informational)**: e621 and detect_site_type clients now install the `_log_request` hook so their requests appear in the connection log alongside the base client. Absorbed into the #1 wiring commits since both files were already being touched.
|
||||
|
||||
**Skipped (Wontfix), with reason:**
|
||||
- **#11 64-bit hash truncation**: not exploitable in practice (audit's own words). Fix would change every cache path and require a migration.
|
||||
- **#12 Referer leak through CDN redirects**: intentional — booru CDNs gate downloads on Referer matching. Documented; not fixed.
|
||||
- **#13 hyprctl batch joining**: user is trusted in the threat model and Hyprland controls the field. Informational only.
|
||||
- **#15 dead code in `core/images.py`**: code quality, not security. Out of scope under the no-refactor constraint. Logged in `TODO.md`.
|
||||
|
||||
## v0.2.5
|
||||
|
||||
Full UI overhaul (icon buttons, compact top bar, responsive video controls), popout resize-pivot anchor, layout flip, and the main_window.py controller decomposition.
|
||||
|
||||
### Refactor: main_window.py controller decomposition
|
||||
|
||||
`main_window.py` went from a 3,318-line god-class to a 1,164-line coordinator plus 7 controller modules. Every other subsystem in the codebase had already been decomposed (popout state machine, library save, category fetcher) — BooruApp was the last monolith. 11 commits, pure refactor, no behavior change. Design doc at `docs/MAIN_WINDOW_REFACTOR.md`.
|
||||
|
||||
- New `gui/window_state.py` (293 lines) — geometry persistence, Hyprland IPC, splitter savers.
|
||||
- New `gui/privacy.py` (66 lines) — privacy overlay toggle + popout coordination.
|
||||
- New `gui/search_controller.py` (572 lines) — search orchestration, infinite scroll, backfill, blacklist filtering, tag building, autocomplete, thumbnail fetching.
|
||||
- New `gui/media_controller.py` (273 lines) — image/video loading, prefetch, download progress, video streaming fast-path, cache eviction.
|
||||
- New `gui/popout_controller.py` (204 lines) — popout lifecycle (open/close), state sync, geometry persistence, navigation delegation.
|
||||
- New `gui/post_actions.py` (561 lines) — bookmarks, save/library, batch download, unsave, bulk ops, blacklist actions from popout.
|
||||
- New `gui/context_menus.py` (246 lines) — single-post and multi-select context menu building + dispatch.
|
||||
- Controller-pattern: each takes `app: BooruApp` via constructor, accesses app internals as trusted collaborator via `self._app`. No mixins, no ABC, no dependency injection — just plain classes with one reference each. `TYPE_CHECKING` import for `BooruApp` avoids circular imports at runtime.
|
||||
- Cleaned up 14 dead imports from `main_window.py`.
|
||||
- The `_fullscreen_window` reference (52 sites across the codebase) was fully consolidated into `PopoutController.window`. No file outside `popout_controller.py` touches `_fullscreen_window` directly anymore.
|
||||
|
||||
### New: Phase 2 test suite (64 tests for extracted pure functions)
|
||||
|
||||
Each controller extraction also pulled decision-making code out into standalone module-level functions that take plain data in and return plain data out. Controllers call those functions; tests import them directly. Same structural forcing function as the popout state machine tests — the test files fail to collect if anyone adds a Qt import to a tested module.
|
||||
|
||||
- `tests/gui/test_search_controller.py` (24 tests): `build_search_tags` rating/score/media filter mapping per API type, `filter_posts` blacklist/dedup/seen-ids interaction, `should_backfill` termination conditions.
|
||||
- `tests/gui/test_window_state.py` (16 tests): `parse_geometry` / `format_geometry` round-trip, `parse_splitter_sizes` validation edge cases, `build_hyprctl_restore_cmds` for every floating/tiled permutation including the no_anim priming path.
|
||||
- `tests/gui/test_media_controller.py` (9 tests): `compute_prefetch_order` for Nearby (cardinals) and Aggressive (ring expansion) modes, including bounds, cap, and dedup invariants.
|
||||
- `tests/gui/test_post_actions.py` (10 tests): `is_batch_message` progress-pattern detection, `is_in_library` path-containment check.
|
||||
- `tests/gui/test_popout_controller.py` (3 tests): `build_video_sync_dict` shape.
|
||||
- Total suite: **186 tests** (57 core + 65 popout state machine + 64 new controller pure functions), ~0.3s runtime, all import-pure.
|
||||
- PySide6 imports in controller modules were made lazy (inside method bodies) so the Phase 2 tests can collect on CI, which only installs `httpx`, `Pillow`, and `pytest`.
|
||||
|
||||
### UI overhaul: icon buttons and responsive layout
|
||||
|
||||
Toolbar and video controls moved from fixed-width text buttons to 24x24 icon buttons. Preview toolbar uses Unicode symbols (☆/★ bookmark, ↓/✕ save, ⊘ blacklist tag, ⊗ blacklist post, ⧉ popout) — both the embedded preview and the popout toolbar share the same object names (`#_tb_bookmark`, `#_tb_save`, `#_tb_bl_tag`, `#_tb_bl_post`, `#_tb_popout`) so one QSS rule styles both. Video controls (play/pause, mute, loop, autoplay) render via QPainter using the palette's `buttonText` color so they match any theme automatically, with `1×` as bold text for the Once loop state.
|
||||
|
||||
- Responsive video controls bar: hides volume slider below 320px, duration label below 240px, current time label below 200px. Play/pause/seek/mute/loop always visible.
|
||||
- Compact top bar: combos use `AdjustToContents`, 3px spacing, top/nav bars wrapped in `#_top_bar` / `#_nav_bar` named containers for theme targeting.
|
||||
- Main window minimum size dropped from 900x600 to 740x400 — the hard floor was blocking Hyprland's keyboard resize mode on narrow floating windows.
|
||||
- Preview pane minimum width dropped from 380 to 200.
|
||||
- Info panel title + details use `QSizePolicy.Ignored` horizontally so long source URLs wrap within the splitter instead of pushing it wider.
|
||||
|
||||
### New: popout anchor setting (resize pivot)
|
||||
|
||||
Combo in Settings > General. Controls which point of the popout window stays fixed across navigations as the aspect ratio changes: `Center` (default, pins window center), or one of the four corners (pins that corner, window grows/shrinks from the opposite corner). The user can still drag the window anywhere — the anchor only controls the resize direction, not the screen position. Works on all platforms; on Hyprland the hyprctl dispatch path is used, elsewhere Qt's `setGeometry` fallback handles the same math.
|
||||
|
||||
- `Viewport.center_x`/`center_y` repurposed as anchor point coordinates — in center mode it's the window center, in corner modes it's the pinned corner. New `anchor_point()` helper in `viewport.py` extracts the right point from a window rect based on mode.
|
||||
- `_compute_window_rect` branches on anchor: center mode keeps the existing symmetric math, corner modes derive position from the anchor point + the new size.
|
||||
- Hyprland monitor reserved-area handling: reads `reserved` from `hyprctl monitors -j` so window positioning respects Waybar's exclusive zone (Qt's `screen.availableGeometry()` doesn't see layer-shell reservations on Wayland).
|
||||
|
||||
### New: layout flip setting
|
||||
|
||||
Checkbox in Settings > General (restart required). Swaps the main splitter — preview+info panel on the left, grid on the right. Useful for left-handed workflows or multi-monitor setups where you want the preview closer to your other reference windows.
|
||||
|
||||
### New: thumbnail fade-in animation
|
||||
|
||||
Thumbnails animate from 0 to 1 opacity over 200ms (OutCubic easing) as they load. Uses a `QPropertyAnimation` on a `thumbOpacity` Qt Property applied in `paintEvent`. The animation is stored on the widget instance to prevent Python garbage collection before the Qt event loop runs it.
|
||||
|
||||
### New: B / F / S keyboard shortcuts
|
||||
|
||||
- `B` or `F` — toggle bookmark on the selected post (works in main grid and popout).
|
||||
- `S` — toggle save to library (Unfiled). If already saved, unsaves. Works in main grid and popout.
|
||||
- The popout gained a new `toggle_save_requested` signal that routes to a shared `PostActionsController.toggle_save_from_preview` so both paths use the same toggle logic.
|
||||
|
||||
### UX: grid click behavior
|
||||
|
||||
- Clicking empty grid space (blue area around thumbnails, cell padding outside the pixmap, or the 2px gaps between cells) deselects everything. Cell padding clicks work via a direct parent-walk from `ThumbnailWidget.mousePressEvent` to the grid — Qt event propagation through `QScrollArea` swallows events too aggressively to rely on.
|
||||
- Rubber band drag selection now works from any empty space — not just the 2px gaps. 30px manhattan threshold gates activation so single clicks on padding just deselect without flashing a zero-size rubber band.
|
||||
- Hover highlight only appears when the cursor is actually over the pixmap, not the cell padding. Uses the same `_hit_pixmap` hit-test as clicks. Cursor swaps between pointing-hand (over pixmap) and arrow (over padding) via `mouseMoveEvent` tracking.
|
||||
- Clicking an already-showing post no longer restarts the video (fixes the click-to-drag case where the drag-start click was restarting mpv).
|
||||
- Escape clears the grid selection.
|
||||
- Stuck forbidden cursor after cancelled drag-and-drop is reset on mouse release. Stuck hover states on Wayland fast-exits are force-cleared in `ThumbnailGrid.leaveEvent`.
|
||||
|
||||
### Themes
|
||||
|
||||
All 12 bundled QSS themes were trimmed and regenerated:
|
||||
|
||||
- Removed 12 dead selector groups that the app never instantiates: `QRadioButton`, `QToolButton`, `QToolBar`, `QDockWidget`, `QTreeView`/`QTreeWidget`, `QTableView`/`QTableWidget`, `QHeaderView`, `QDoubleSpinBox`, `QPlainTextEdit`, `QFrame`.
|
||||
- Popout overlay buttons now use `font-size: 15px; font-weight: bold` so the icon symbols read well against the translucent-black overlay.
|
||||
- `themes/README.md` documents the new `#_tb_*` toolbar button object names and the popout overlay styling. Removed the old Nerd Font remapping note — QSS can't change button text, so that claim was incorrect.
|
||||
|
||||
## v0.2.4
|
||||
|
||||
Library filename templates, tag category fetching for all backends, and a popout video streaming overhaul. 50+ commits since v0.2.3.
|
||||
|
||||
### New: library filename templates
|
||||
|
||||
Save files with custom names instead of bare post IDs. Templates use `%id%`, `%artist%`, `%character%`, `%copyright%`, `%general%`, `%meta%`, `%species%`, `%md5%`, `%rating%`, `%score%`, `%ext%` tokens. Set in Settings > Paths.
|
||||
|
||||
- New `core/library_save.py` module with a single `save_post_file` entry point. All eight save sites (Save to Library, Save As, Bulk Save, Batch Download, and their bookmarks-tab equivalents) route through it.
|
||||
- DB-backed `library_meta.filename` column tracks the rendered name per post. Non-breaking migration for existing databases.
|
||||
- Sequential collision suffixes (`_1`, `_2`, `_3`) when multiple posts render to the same filename (e.g. same artist).
|
||||
- Same-post idempotency via `get_library_post_id_by_filename` lookup. Re-saving a post that already exists under a different template returns the existing path.
|
||||
- `find_library_files` and `delete_from_library` updated to match templated filenames alongside legacy digit-stem files.
|
||||
- `is_post_in_library` / `get_saved_post_ids` DB helpers replace filesystem walks for saved-dot indicators. Format-agnostic.
|
||||
- `reconcile_library_meta` cleans up orphan meta rows on startup.
|
||||
- Saved-dot indicators fixed across all tabs for templated filenames.
|
||||
- Library tab single-delete and multi-delete now clean up `library_meta` rows (was leaking orphan rows for templated files).
|
||||
- Save As dialog default filename comes from the rendered template instead of the old hardcoded `post_` prefix.
|
||||
- Batch downloads into library folders now register `library_meta` (was silently skipping it).
|
||||
- Bookmark-to-library copies now register `library_meta` (was invisible to Library tag search).
|
||||
- Cross-folder re-save is now copy, not move (the atomic rename was a workaround for not having a DB-backed filename column).
|
||||
|
||||
### New: tag category fetching
|
||||
|
||||
Tag categories (Artist, Character, Copyright, General, Meta, Species) now work across all four backends, not just Danbooru and e621.
|
||||
|
||||
- New `CategoryFetcher` module with two strategies: batch tag API (Gelbooru proper with auth) and per-post HTML scrape (Rule34, Safebooru.org, Moebooru sites).
|
||||
- DB-backed `tag_types` cache table. Tags are fetched once per site and cached across sessions. `clear_tag_cache` in Settings wipes it.
|
||||
- Batch API probe result persisted per site. First session probes once; subsequent sessions skip the probe.
|
||||
- Background prefetch for Gelbooru batch API path only. search() fires `prefetch_batch` in the background when `_batch_api_works` is True, so the cache is warm before the user clicks.
|
||||
- Danbooru and e621 `get_post` now populates `tag_categories` inline (latent bug: was returning empty categories on re-fetch).
|
||||
- `categories_updated` signal re-renders the info panel when categories arrive asynchronously.
|
||||
- `_categories_pending` flag on the info panel suppresses the flat-tag fallback flash when a fetch is in progress. Tags area stays empty until categories arrive and render in one pass.
|
||||
- HTML parser two-pass rewrite: Pass 1 finds tag-type elements by class, Pass 2 extracts tag names from `tags=NAME` URL parameters in search links. Works on Rule34, Safebooru.org, and Moebooru.
|
||||
- `save_post_file` ensures categories before template render so `%artist%` / `%character%` tokens resolve on Gelbooru-style sites.
|
||||
- On-demand fetch model for Rule34 / Safebooru.org / Moebooru: ~200ms HTML scrape on first click, instant from cache on re-click.
|
||||
|
||||
### Improved: popout video streaming
|
||||
|
||||
Click-to-first-frame latency on uncached video posts with the popout open is roughly halved. Single HTTP connection per video instead of two.
|
||||
|
||||
- **Stream-record.** mpv's `stream-record` per-file option tees the network stream to a `.part` temp file as it plays. On clean EOF the `.part` is promoted to the real cache path. The parallel httpx download that used to race with mpv for the same bytes is eliminated. Seeks during playback invalidate the recording (mpv may skip byte ranges); the `.part` is discarded on seek, stop, popout close, or rapid click.
|
||||
- **Redundant stops removed.** `_on_video_stream` no longer stops the embedded preview's mpv when the popout is the visible target (was wasting ~50-100ms of synchronous `command('stop')` time). `_apply_load_video` no longer calls `stop()` before `play_file` (`loadfile("replace")` subsumes it).
|
||||
- **Stack switch reordered.** `_apply_load_video` now switches to the video surface before calling `play_file`, so mpv's first frame lands on a visible widget instead of a cleared image viewer.
|
||||
- **mpv network tuning.** `cache_pause=no` (stutter over pause for short clips), 50 MiB demuxer buffer cap, 20s read-ahead, 10s network timeout (down from ~60s).
|
||||
- **Cache eviction safety.** `evict_oldest` skips `.part` files so eviction doesn't delete a temp file mpv is actively writing to.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- **Popout close preserves video position.** `closeEvent` now snapshots `position_ms` before dispatching `CloseRequested` (whose `StopMedia` effect destroys mpv's `time_pos`). The embedded preview resumes at the correct position instead of restarting from 0.
|
||||
- **Library popout aspect lock for images.** Library items' Post objects were constructed without width/height, so the popout got 0/0 and `_fit_to_content` returned early without setting `keep_aspect_ratio`. Now reads actual pixel dimensions via `QPixmap` before constructing the Post.
|
||||
|
||||
### Other
|
||||
|
||||
- README updated, unused Windows screenshots dropped from the repo.
|
||||
- Tightened thumbnail spacing in the grid from 8px to 2px.
|
||||
- Max thumbnail size at 200px.
|
||||
|
||||
## v0.2.3
|
||||
|
||||
A refactor + cleanup release. The two largest source files (`gui/app.py` 3608 lines + `gui/preview.py` 2273 lines) are gone, replaced by a module-per-concern layout. The popout viewer's internal state was rebuilt as an explicit state machine with the historical race bugs locked out structurally instead of by suppression windows. The slider drag-back race that no one had named is finally fixed. A handful of latent bugs got caught and resolved on the way through.
|
||||
|
||||
### Structural refactor: gui/app.py + gui/preview.py split
|
||||
|
||||
The two largest source files were doing too much. `gui/app.py` was 3608 lines mixing async dispatch, signal wiring, tab switching, popout coordination, splitter persistence, context menus, bulk actions, batch download, fullscreen, privacy, and a dozen other concerns. `gui/preview.py` was 2273 lines holding the embedded preview, the popout, the image viewer, the video player, an OpenGL surface, and a click-to-seek slider. Both files had reached the point where almost every commit cited "the staging surface doesn't split cleanly" as the reason for bundling unrelated fixes.
|
||||
|
||||
This release pays that cost down with a structural carve into 12 module-per-concern files plus 2 oversize-by-design god-class files. 14 commits, every commit byte-identical except for relative-import depth corrections, app runnable at every commit boundary.
|
||||
|
||||
- **`gui/app.py` (3608 lines) gone.** Carved into:
|
||||
- `app_runtime.py`: `run()`, `_apply_windows_dark_mode()`, `_load_user_qss()` (`@palette` preprocessor), `_BASE_POPOUT_OVERLAY_QSS`. The QApplication setup, custom QSS load, icon resolution, BooruApp instantiation, and exec loop.
|
||||
- `main_window.py`: `BooruApp(QMainWindow)`, ~3200 lines. The class is one indivisible unit because every method shares instance attributes with every other method. Splitting it across files would have required either inheritance, composition, or method-as-attribute injection, and none of those were worth introducing for a refactor that was supposed to be a pure structural move with no logic changes.
|
||||
- `info_panel.py`: `InfoPanel(QWidget)` toggleable info panel.
|
||||
- `log_handler.py`: `LogHandler(logging.Handler, QObject)` Qt-aware logger adapter.
|
||||
- `async_signals.py`: `AsyncSignals(QObject)` signal hub for async worker results.
|
||||
- `search_state.py`: `SearchState` dataclass.
|
||||
- **`gui/preview.py` (2273 lines) gone.** Carved into:
|
||||
- `preview_pane.py`: `ImagePreview(QWidget)` embedded preview pane.
|
||||
- `popout/window.py`: `FullscreenPreview(QMainWindow)` popout. Initially a single 1136-line file; further carved by the popout state machine refactor below.
|
||||
- `media/constants.py`: `VIDEO_EXTENSIONS`, `_is_video()`.
|
||||
- `media/image_viewer.py`: `ImageViewer(QWidget)` zoom/pan image viewer.
|
||||
- `media/mpv_gl.py`: `_MpvGLWidget` + `_MpvOpenGLSurface`.
|
||||
- `media/video_player.py`: `VideoPlayer(QWidget)` + `_ClickSeekSlider`.
|
||||
- `popout/viewport.py`: `Viewport(NamedTuple)` + `_DRIFT_TOLERANCE`.
|
||||
- **Re-export shim pattern.** Each move added a `from .new_location import MovedClass # re-export for refactor compat` line at the bottom of the old file so existing imports kept resolving the same class object during the migration. The final cleanup commit updated the importer call sites to canonical paths and deleted the now-empty `app.py` and `preview.py`.
|
||||
|
||||
### Bug fixes surfaced by the refactor
|
||||
|
||||
The refactor's "manually verify after every commit" rule exposed 10 latent bugs that had been lurking in the original god-files. Every one of these is a preexisting issue, not something the refactor caused.
|
||||
|
||||
- **Browse multi-select reshape.** Split library and bookmark actions into four distinct entries (Save All / Unsave All / Bookmark All / Remove All Bookmarks), each shown only when the selection actually contains posts the action would affect. The original combined action did both library and bookmark operations under a misleading bookmark-only label, with no way to bulk-unsave without also stripping bookmarks. The reshape resolves the actual need.
|
||||
- **Infinite scroll page_size clamp.** One-character fix at `_on_reached_bottom`'s `search_append.emit` call site (`collected` becomes `collected[:limit]`) to mirror the non-infinite path's slice in `_do_search`. The backfill loop's `>=` break condition allowed the last full batch to push collected past the configured page size.
|
||||
- **Batch download: incremental saved-dot updates and browse-tab-only gating.** Two-part fix. (1) Stash the chosen destination, light saved-dots incrementally as each file lands when the destination is inside `saved_dir()`. (2) Disable the Batch Download menu and Ctrl+D shortcut on the Bookmarks and Library tabs, where it didn't make sense.
|
||||
- **F11 round-trip preserves zoom and position.** Two preservation bugs. (1) `ImageViewer.resizeEvent` no longer clobbers the user's explicit zoom and pan on F11 enter/exit; it uses `event.oldSize()` to detect whether the user was at fit-to-view at the previous size and only re-fits in that case. (2) The popout's F11 enter writes the current Hyprland window state directly into its viewport tracking so F11 exit lands at the actual pre-fullscreen position regardless of how the user got there (drag, drag+nav, drag+F11). The previous drift detection only fired during a fit and missed the "drag then F11 with no nav between" sequence.
|
||||
- **Remove O keybind for Open in Default App.** Five-line block deleted from the main keypress handler. Right-click menu actions stay; only the keyboard shortcut is gone.
|
||||
- **Privacy screen resumes video on un-hide.** `_toggle_privacy` now calls `resume()` on the active video player on the privacy-off branch, mirroring the existing `pause()` calls on the privacy-on branch. The popout's privacy overlay also moved from "hide the popout window" to "raise an in-place black overlay over the popout's central widget" because Wayland's hide → show round-trip drops window position when the compositor unmaps and remaps; an in-place overlay sidesteps the issue.
|
||||
- **VideoPlayer mute state preservation.** When the popout opens, the embedded preview's mute state was synced into the popout's `VideoPlayer` before the popout's mpv instance was created (mpv is wired lazily on first `set_media`). The sync silently disappeared because the `is_muted` setter only forwarded to mpv if mpv existed. Now there's a `_pending_mute` field that the setter writes to unconditionally; `_ensure_mpv` replays it into the freshly-created mpv. Same pattern as the existing volume-from-slider replay.
|
||||
- **Search count + end-of-results instrumentation.** `_do_search` and `_on_reached_bottom` now log per-filter drop counts (`bl_tags`, `bl_posts`, `dedup`), `api_returned`, `kept`, and the `at_end` decision at DEBUG level. Distinguishes "API ran out of posts" from "client-side filters trimmed the page" for the next reproduction. This is instrumentation, not a fix; the underlying intermittent end-of-results bug is still under investigation.
|
||||
|
||||
### Popout state machine refactor
|
||||
|
||||
In the past two weeks, five popout race fixes had landed (`baa910a`, `5a44593`, `7d19555`, `fda3b10`, `31d02d3`), each correct in isolation but fitting the same pattern: a perf round shifted timing, a latent race surfaced, a defensive layer was added. The pattern was emergent from the popout's signal-and-callback architecture, not from any one specific bug. Every defensive layer added a timestamp-based suppression window that the next race fix would have to navigate around.
|
||||
|
||||
This release rebuilds the popout's internal state as an explicit state machine. The 1136-line `FullscreenPreview` god-class became a thin Qt adapter on top of a pure-Python state machine, with the historical race fixes enforced structurally instead of by suppression windows. 16 commits.
|
||||
|
||||
The state machine has 6 states (`AwaitingContent`, `DisplayingImage`, `LoadingVideo`, `PlayingVideo`, `SeekingVideo`, `Closing`), 17 events, and 14 effects. The pure-Python core lives in `popout/state.py` and `popout/effects.py` and imports nothing from PySide6, mpv, or httpx. The Qt-side adapter in `popout/window.py` translates Qt events into state machine events and applies the returned effects to widgets; it never makes decisions about what to do.
|
||||
|
||||
The race fixes that were timestamp windows in the previous code are now structural transitions:
|
||||
|
||||
- **EOF race.** `VideoEofReached` is only legal in `PlayingVideo`. In every other state (most importantly `LoadingVideo`, where the stale-eof race lived), the event is dropped at the dispatch boundary without changing state or emitting effects. Replaces the 250ms `_eof_ignore_until` timestamp window that the previous code used to suppress stale eof events from a previous video's stop.
|
||||
- **Double-load race.** `NavigateRequested` from a media-bearing state transitions to `AwaitingContent` once. A second `NavigateRequested` while still in `AwaitingContent` re-emits the navigate signal but does not re-stop or re-load. The state machine never produces two `LoadVideo` / `LoadImage` effects for the same navigation cycle, regardless of how many `NavigateRequested` events the eventFilter dispatches.
|
||||
- **Persistent viewport.** The viewport (center + long_side) is a state machine field, only mutated by user-action events (`WindowMoved`, `WindowResized`, or `HyprlandDriftDetected`). Never overwritten by reading the previous fit's output. Replaces the per-nav drift accumulation that the previous "recompute viewport from current state" shortcut produced.
|
||||
- **F11 round-trip.** Entering fullscreen snapshots the current viewport into a separate `pre_fullscreen_viewport` field. Exiting restores from the snapshot. The pre-fullscreen viewport is the captured value at the moment of entering, regardless of how the user got there.
|
||||
- **Seek slider pin.** `SeekingVideo` state holds the user's click target. The slider rendering reads from the state machine: while in `SeekingVideo`, the displayed value is the click target; otherwise it's mpv's actual `time_pos`. `SeekCompleted` (from mpv's `playback-restart` event) transitions back to `PlayingVideo`. No timestamp window.
|
||||
- **Pending mute.** The mute / volume / loop_mode values are state machine fields. `MuteToggleRequested` flips the field regardless of which state the machine is in. The `PlayingVideo` entry handler emits `[ApplyMute, ApplyVolume, ApplyLoopMode]` so the persistent values land in the freshly-loaded video on every load cycle.
|
||||
|
||||
The Qt adapter's interface to `main_window.py` was also cleaned up. Previously `main_window.py` reached into `_fullscreen_window._video.X`, `_fullscreen_window._stack.currentIndex()`, `_fullscreen_window._bookmark_btn.setVisible(...)`, and similar private-attribute access at ~25 sites. Those are gone. Nine new public methods on `FullscreenPreview` replace them: `is_video_active`, `set_toolbar_visibility`, `sync_video_state`, `get_video_state`, `seek_video_to`, `connect_media_ready_once`, `pause_media`, `force_mpv_pause`, `stop_media`. Existing methods (`set_media`, `update_state`, `set_post_tags`, `privacy_hide`, `privacy_show`) are preserved unchanged.
|
||||
|
||||
A new debug environment variable `BOORU_VIEWER_STRICT_STATE=1` raises an `InvalidTransition` exception on illegal (state, event) pairs in the state machine. Default release mode drops + logs at debug.
|
||||
|
||||
### Slider drag-back race fixed
|
||||
|
||||
The slider's `_seek` method used `mpv.seek(pos / 1000.0, 'absolute')` (keyframe-only seek). On videos with sparse keyframes (typical 1-5s GOP), mpv lands on the nearest keyframe at-or-before the click position, which is up to 5 seconds behind where the user actually clicked. The 500ms pin window from the earlier fix sweep papered over this for half a second, but afterwards the slider visibly dragged back to mpv's keyframe-rounded position and crawled forward.
|
||||
|
||||
- **`'absolute' → 'absolute+exact'`** in `VideoPlayer._seek`. Aligns the slider with `seek_to_ms` and `_seek_relative`, which were already using exact seek. mpv decodes from the previous keyframe forward to the EXACT target position before reporting it via `time_pos`. Costs 30-100ms more per seek but lands at the exact click position. No more drag-back. Affects both the embedded preview and the popout because they share the `VideoPlayer` class.
|
||||
- **Legacy 500ms pin window removed.** Now redundant after the exact-seek fix. The supporting fields (`_seek_target_ms`, `_seek_pending_until`, `_seek_pin_window_secs`) are gone, `_seek` is one line, `_poll`'s slider write is unconditional after the `isSliderDown()` check.
|
||||
|
||||
### Grid layout fix
|
||||
|
||||
The grid was collapsing by a column when switching to a post in some scenarios. Two compounding issues.
|
||||
|
||||
- **The flow layout's wrap loop was vulnerable to per-cell width drift.** Walked each thumb summing `widget.width() + THUMB_SPACING` and wrapped on `x + item_w > self.width()`. If `THUMB_SIZE` was changed at runtime via Settings, existing thumbs kept their old `setFixedSize` value while new ones from infinite-scroll backfill got the new value. Mixed widths break a width-summing wrap loop.
|
||||
- **The `columns` property had an off-by-one** at column boundaries because it omitted the leading margin from `w // (THUMB_SIZE + THUMB_SPACING)`. A row that fits N thumbs needs `THUMB_SPACING + N * step` pixels, not `N * step`. The visible symptom was that keyboard Up/Down navigation step was off-by-one in the boundary range.
|
||||
- **Fix.** The flow layout now computes column count once via `(width - THUMB_SPACING) // step` and positions thumbs by `(col, row)` index, with no per-widget `widget.width()` reads. The `columns` property uses the EXACT same formula so keyboard nav matches the visual layout at every window width. Affects all three tabs (Browse / Bookmarks / Library) since they all use the same `ThumbnailGrid`.
|
||||
|
||||
### Other fixes
|
||||
|
||||
These two landed right after v0.2.2 was tagged but before the structural refactor started.
|
||||
|
||||
- **Popout video load performance.** mpv URL streaming for uncached videos via a new `video_stream` signal that hands the remote URL to mpv directly instead of waiting for the cache download to finish. mpv fast-load options `vd_lavc_fast` and `vd_lavc_skiploopfilter=nonkey`. GL pre-warm at popout open via a `showEvent` calling `ensure_gl_init` so the first video click doesn't pay for context creation. Identical-rect skip in `_fit_to_content` so back-to-back same-aspect navigation doesn't redundantly dispatch hyprctl. Plus three race-defense layers: pause-on-activate at the top of `_on_post_activated`, the 250ms stale-eof suppression window in VideoPlayer that the state machine refactor later subsumed, and removed redundant `_update_fullscreen` calls from `_navigate_fullscreen` and `_on_video_end_next` that were re-loading the previous post's path with a stale value.
|
||||
- **Double-activation race fix in `_navigate_preview`.** Removed a redundant `_on_post_activated` call from all five view types (browse, bookmarks normal, bookmarks wrap-edge, library normal, library wrap-edge). `_select(idx)` already chains through `post_selected` which already calls `_on_post_activated`, so calling it explicitly again was a duplicate that fired the activation handler twice per keyboard nav.
|
||||
|
||||
## v0.2.2
|
||||
|
||||
A hardening + decoupling release. Bookmark folders and library folders are no longer the same thing under the hood, the `core/` layers get a defensive hardening pass, the async/DB layers get a real concurrency refactor, and the README finally articulates what this project is.
|
||||
|
||||
### Bookmarks ↔ Library decoupling
|
||||
|
||||
- **Bookmark folders and library folders are now independent name spaces.** Used to share identity through `_db.get_folders()` — the same string was both a row in `favorite_folders` and a directory under `saved_dir`. The cross-bleed produced a duplicate-on-move bug and made "Save to Library" silently re-file the bookmark. Now they're two stores: bookmark folders are DB-backed labels for organizing your bookmark list, library folders are real subdirectories of `saved/` for organizing files on disk.
|
||||
- **`library_folders()`** in `core.config` is the new source of truth for every Save-to-Library menu — reads filesystem subdirs of `saved_dir` directly.
|
||||
- **`find_library_files(post_id)`** is the new "is this saved?" / delete primitive — walks the library shallowly by post id.
|
||||
- **Move-aware Save to Library.** If the post is already in another library folder, atomic `Path.rename()` into the destination instead of re-copying from cache. Also fixes the duplicate-on-move bug.
|
||||
- **Library tab right-click: Move to Folder submenu** for both single and multi-select, using `Path.rename` for atomic moves.
|
||||
- **Bookmarks tab: − Folder button** next to + Folder for deleting the selected bookmark folder. DB-only, library filesystem untouched.
|
||||
- **Browse tab right-click: "Bookmark as" submenu** when a post is not yet bookmarked (Unfiled / your bookmark folders / + New); flat "Remove Bookmark" when already bookmarked.
|
||||
- **Embedded preview Bookmark button** got the same submenu shape via a new `bookmark_to_folder` signal + `set_bookmark_folders_callback`.
|
||||
- **Popout Bookmark and Save buttons** both got the submenu treatment; works in both Browse and Bookmarks tab modes.
|
||||
- **Popout in library mode** keeps the Save button visible as Unsave; the rest of the toolbar (Bookmark / BL Tag / BL Post) is hidden since they don't apply.
|
||||
- **Popout state drift fixed.** `_update_fullscreen_state` now mirrors the embedded preview's `_is_bookmarked` / `_is_saved` instead of re-querying DB+filesystem, eliminating a state race during async bookmark adds.
|
||||
- **"Unsorted" renamed to "Unfiled"** everywhere user-facing. Library Unfiled and bookmarks Unfiled now share one label.
|
||||
- `favorite_folders` table preserved for backward compatibility — no migration required.
|
||||
|
||||
### Concurrency refactor
|
||||
|
||||
The earlier worker pattern of `threading.Thread + asyncio.run` was a real loop-affinity bug. The first throwaway loop a worker constructed would bind the shared httpx clients, and the next call from the persistent loop would fail with "Event loop is closed". This release routes everything through one loop and adds the locking and cleanup that should have been there from the start.
|
||||
|
||||
- **`core/concurrency.py`** is a new module: `set_app_loop()` / `get_app_loop()` / `run_on_app_loop()`. Every async piece of work in the GUI now schedules through one persistent loop, registered at startup by `BooruApp`.
|
||||
- **`gui/sites.py` SiteDialog** Detect and Test buttons now route through `run_on_app_loop` instead of spawning a daemon thread. Results marshal back via Qt Signals with `QueuedConnection`. The dialog tracks in-flight futures and cancels them on close so a mid-detect dialog dismissal doesn't poke a destroyed QObject.
|
||||
- **`gui/bookmarks.py` thumbnail loader** got the same swap. The existing `thumb_ready` signal already marshaled correctly.
|
||||
- **Lazy-init lock on shared httpx clients.** `BooruClient._shared_client`, `E621Client._e621_client`, and `cache._shared_client` all use a fast-path / locked-slow-path lazy init. Concurrent first-callers can no longer both build a client and leak one.
|
||||
- **`E621Client` UA-change leftover tracking.** When the User-Agent changes (api_user edit) and a new client is built, the old one is stashed in `_e621_to_close` and drained at shutdown instead of leaking.
|
||||
- **`aclose_shared` on shutdown.** `BooruApp.closeEvent` now runs an `_close_all` coroutine via `run_coroutine_threadsafe(...).result(timeout=5)` before stopping the loop. Connection pools, keepalive sockets, and TLS state release cleanly instead of being abandoned.
|
||||
- **`Database._write_lock` (RLock) + new `_write()` context manager.** Every write method now serializes through one lock so the asyncio thread and the Qt main thread can't interleave multi-statement writes. RLock so a writing method can call another writing method on the same thread without self-deadlocking. Reads stay lock-free under WAL.
|
||||
|
||||
### Defensive hardening
|
||||
|
||||
- **DB transactions.** `delete_site`, `add_search_history`, `remove_folder`, `rename_folder`, and `_migrate` now wrap their multi-statement bodies in `with self.conn:` so a crash mid-method can't leave orphan rows.
|
||||
- **`add_bookmark` lastrowid fix.** When `INSERT OR IGNORE` collides on `(site_id, post_id)`, `lastrowid` is stale; the method now re-`SELECT`s the existing id. Was returning `Bookmark(id=0)` silently, which then no-op'd `update_bookmark_cache_path` on the next bookmark.
|
||||
- **LIKE wildcard escape.** `get_bookmarks` LIKE clauses now `ESCAPE '\\'` so user search literals stop acting as SQL wildcards (`cat_ear` no longer matches `catear`).
|
||||
- **Path traversal guard on folder names.** New `_validate_folder_name` rejects `..`, path separators, and leading `.`/`~` at write time. `saved_folder_dir()` resolves the candidate and refuses anything that doesn't `relative_to` the saved-images base.
|
||||
- **Download size cap and streaming.** `download_image` enforces a 500 MB hard cap against the advertised Content-Length and the running total inside the chunk loop (servers can lie). Payloads ≥ 50 MB stream to a tempfile and atomic `os.replace` instead of buffering in RAM.
|
||||
- **Per-URL coalesce lock.** `defaultdict[str, asyncio.Lock]` keyed by URL hash so concurrent callers downloading the same URL don't race `write_bytes`.
|
||||
- **`Image.MAX_IMAGE_PIXELS = 256M`** with `DecompressionBombError` handling in both PIL converters.
|
||||
- **Ugoira zip-bomb caps.** Frame count and cumulative uncompressed size checked from `ZipInfo` headers before any decompression.
|
||||
- **`_convert_animated_to_gif` failure cache.** Writes a `.convfailed` sentinel sibling on failure to break the re-decode-every-paint loop for malformed animated PNGs/WebPs.
|
||||
- **`_is_valid_media` distinguishes IO errors from "definitely invalid".** Returns `True` (don't delete) on `OSError` so a transient EBUSY/permissions hiccup no longer triggers a delete + re-download loop.
|
||||
- **Hostname suffix matching for Referer.** Was using substring `in` matching, which meant `imgblahgelbooru.attacker.com` falsely mapped to `gelbooru.com`. Now uses proper suffix check.
|
||||
- **`_request` retries on `httpx.NetworkError` and `httpx.ConnectError`** in addition to `TimeoutException`. A single DNS hiccup or RST no longer blows up the whole search.
|
||||
- **`test_connection` no longer echoes the response body** in error strings. It was a body-leak gadget when used via `detect_site_type`'s redirect-following client.
|
||||
- **Exception logging across `detect`, `search`, and `autocomplete`** in every API client. Previously every failure was a silent `return []`; now every swallowed exception logs at WARNING with type, message, and (where relevant) the response body prefix.
|
||||
- **`main_gui.py`** `file_dialog_platform` DB probe failure now prints to stderr instead of vanishing.
|
||||
- **Folder name validation surfaced as `QMessageBox.warning`** in `gui/bookmarks.py` and `gui/app.py` instead of crashing when a user types something the validator rejects.
|
||||
|
||||
### Popout overlay fix
|
||||
|
||||
- **`WA_StyledBackground` set on `_slideshow_toolbar` and `_slideshow_controls`.** Plain `QWidget` parents silently ignore QSS `background:` declarations without this attribute, which is why the popout overlay strip was rendering fully transparent (buttons styled, but the bar behind them showing the letterbox color).
|
||||
- **Base popout overlay style baked into the QSS loader.** `_BASE_POPOUT_OVERLAY_QSS` is prepended before the user's `custom.qss` so themes that don't define overlay rules still get a usable translucent black bar with white text. Bundled themes still override on the same selectors.
|
||||
|
||||
### Popout aspect-ratio handling
|
||||
|
||||
The popout viewer's aspect handling had been patch-thrashing for ~20 commits since 0.2.0. A cold-context audit mapped 13 distinct failure modes still live in the code; this release closes the four highest-impact ones.
|
||||
|
||||
- **Width-anchor ratchet broken.** The previous `_fit_to_content` was width-anchored: `start_w = self.width()` read the current window width and derived height from aspect, with a back-derive if height exceeded the cap. Width was the only stable reference, and because portrait content has aspect < 1 and the height cap (90% of screen) was tighter than the width cap (100%), every portrait visit ran the back-derive and permanently shrunk the window. Going P→L→P→L→P on a 1080p screen produced a visibly smaller landscape on each loop.
|
||||
- **New `Viewport(center_x, center_y, long_side)` model.** Three numbers, no aspect. Aspect is recomputed from content on every nav. The new `_compute_window_rect(viewport, content_aspect, screen)` is a pure static method: symmetric across portrait/landscape (`long_side` becomes width for landscape and height for portrait), proportional clamp shrinks both edges by the same factor when either would exceed its 0.90 ceiling, no asymmetric clamp constants, no back-derive step.
|
||||
- **Viewport derived per-call from existing state.** No persistent field, no `moveEvent`/`resizeEvent` hooks needed for the basic ratchet fix. Three priority sources: pending one-shots (first fit after open or F11 exit) → current Hyprland window position+size → current Qt geometry. The Hyprland-current source captures whatever the user has dragged the popout to, so the next nav respects manual resizes.
|
||||
- **First-fit aspect-lock race fixed.** `_fit_to_content` used to call `_is_hypr_floating` which returned `None` for both "not Hyprland" and "Hyprland but the window isn't visible to hyprctl yet". The latter happens on the very first popout open because the `wm:openWindow` event hasn't been processed when `set_media` fires. The method then fell through to a plain Qt resize and skipped the `keep_aspect_ratio` setprop, so the first image always opened unlocked and only subsequent navigations got the right shape. Now inlines the env-var check, distinguishes the two `None` cases, and retries on Hyprland with a 40ms backoff (capped at 5 attempts / 200ms total) when the window isn't registered yet.
|
||||
- **Non-Hyprland top-left drift fixed.** The Qt fallback branch used to call `self.resize(w, h)`, which anchors top-left and lets bottom-right drift. The popout center walked toward the upper-left of the screen across navigations on Qt-driven WMs. Now uses `self.setGeometry(QRect(x, y, w, h))` with the computed top-left so the center stays put.
|
||||
|
||||
### Image fill in popout and embedded preview
|
||||
|
||||
- **`ImageViewer._fit_to_view` no longer caps zoom at native pixel size.** Used `min(scale_w, scale_h, 1.0)` so a smaller image in a larger window centered with letterbox space around it. The `1.0` cap is gone — images scale up to fill the available view, matching how the video player fills its widget. Combined with the popout's `keep_aspect_ratio`, the window matches the image's aspect AND the image fills it cleanly. Tiled popouts with mismatched aspect still letterbox (intentional — the layout owns the window shape).
|
||||
|
||||
### Main app flash and popout resize speed
|
||||
|
||||
- **Suppress dl_progress widget when the popout is open.** The download progress bar at the bottom of the right splitter was unconditionally `show()`'d on every grid click, including when the popout was open and the right splitter had been collapsed to give the grid full width. The show/hide pulse forced a layout pass on the right splitter that briefly compressed the main grid before the download finished and `hide()` fired. Visible flash on every click in the main app, even when clicking the same post that was already loaded (because `download_image` still runs against the cache). Three callsites now skip the widget entirely when the popout is visible. The status bar still updates with `Loading #X...` so the user has feedback in the main window.
|
||||
- **Cache `_hyprctl_get_window` across one fit call.** `_fit_to_content` used to call `hyprctl clients -j` three times per popout navigation: once at the top for the floating check, once inside `_derive_viewport_for_fit` for the position/size read, and once inside `_hyprctl_resize_and_move` for the address lookup. Each call is a ~3ms `subprocess.run` that blocks the Qt event loop, totalling ~9ms of UI freeze per nav. The two helpers now accept an optional `win=None` parameter; `_fit_to_content` fetches the window dict once and threads it down. Per-fit subprocess count drops from 3 to 1 (~6ms saved per navigation), making rapid clicking and aspect-flip transitions feel snappier.
|
||||
- **Show download progress on the active thumbnail when the embedded preview is hidden.** After the dl_progress suppression above landed, the user lost all visible download feedback in the main app whenever the popout was open. `_on_post_activated` now decides per call whether to use the dl_progress widget at the bottom of the right splitter or fall back to drawing the download progress on the active thumbnail in the main grid via the existing prefetch-progress paint path (`set_prefetch_progress(0.0..1.0)` to fill, `set_prefetch_progress(-1)` to clear). The decision is captured at function entry as `preview_hidden = not (self._preview.isVisible() and self._preview.width() > 0)` and closed over by the `_progress` callback and the `_load` coroutine, so the indicator that starts on a download stays on the same target even if the user opens or closes the popout mid-download. Generalizes to any reason the preview is hidden, not just popout-open: a user who has dragged the main splitter to collapse the preview gets the thumbnail indicator now too.
|
||||
|
||||
### Popout overlay stays hidden across navigation
|
||||
|
||||
- **Stop auto-showing the popout overlay on every `set_media`.** `FullscreenPreview.set_media` ended with an unconditional `self._show_overlay()` call, which meant the floating toolbar and video controls bar popped back into view on every left/right/hjkl navigation between posts. Visually noisy and not what the overlay is for — it's supposed to be a hover-triggered surface, not a per-post popup. Removed the call. The overlay is still shown by `__init__` default state (`_ui_visible = True`, so the user sees it for ~2 seconds on first popout open and the auto-hide timer hides it after that), by `eventFilter` mouse-move-into-top/bottom-edge-zone (the intended hover trigger, unchanged), by volume scroll on the video stack (unchanged), and by `Ctrl+H` toggle (unchanged). After this, the only way the overlay appears mid-session is hover or `Ctrl+H` — navigation through posts no longer flashes it back into view.
|
||||
|
||||
### Discord screen-share audio capture
|
||||
|
||||
- **`ao=pulse` in the mpv constructor.** mpv defaults to `ao=pipewire` (native PipeWire audio output) on Linux. Discord's screen-share-with-audio capture on Linux only enumerates clients connected via the libpulse API; native PipeWire clients are invisible to it. Visible symptom: video plays locally fine but audio is silently dropped from any Discord screen share. Firefox works because Firefox uses libpulse to talk to PipeWire's pulseaudio compat layer. Setting `ao="pulse,wasapi,"` in the MPV constructor (comma-separated priority list, mpv tries each in order) routes mpv through the same pulseaudio compat layer Firefox uses. `pulse` works on Linux; `wasapi` is the Windows fallback; trailing empty falls through to mpv's compiled-in default. No platform branch needed — mpv silently skips audio outputs that aren't available. Verified by inspection: with the fix, mpv's sink-input has `module-stream-restore.id = "sink-input-by-application-name:booru-viewer"` (the pulse-protocol form, identical to Firefox) instead of `"sink-input-by-application-id:booru-viewer"` (the native-pipewire form). References: [mpv #11100](https://github.com/mpv-player/mpv/issues/11100), [edisionnano/Screenshare-with-audio-on-Discord-with-Linux](https://github.com/edisionnano/Screenshare-with-audio-on-Discord-with-Linux).
|
||||
- **`audio_client_name="booru-viewer"` in the mpv constructor.** mpv now registers in pulseaudio/pipewire introspection as `booru-viewer` instead of the default "mpv Media Player". Sets `application.name`, `application.id`, `application.icon_name`, `node.name`, and `device.description` to `booru-viewer` so capture tools group mpv's audio under the same identity as the Qt application.
|
||||
|
||||
### Docs
|
||||
|
||||
- **README repositioning.** New "Why booru-viewer" section between Screenshots and Features that names ahoviewer, Grabber, and Hydrus, lays out the labor axis (who does the filing) and the desktop axis (Hyprland/Wayland targeting), and explains the bookmark/library two-tier model with the browser-bookmark analogy.
|
||||
- **New tagline** that does positioning instead of category description.
|
||||
- **Bookmarks and Library Features sections split** to remove the previous intertwining; each now describes its own folder concept clearly.
|
||||
- **Backup recipe** in Data Locations explaining the `saved/` + `booru.db` split and the recovery path.
|
||||
- **Theming section** notes that each bundled theme ships in `*-rounded.qss` and `*-square.qss` variants.
|
||||
|
||||
### Fixes & polish
|
||||
|
||||
- **Drop the unused "Size: WxH" line from the InfoPanel** — bookmarks and library never had width/height plumbed and the field just showed 0×0.
|
||||
- **Tighter combo and button padding across all 12 bundled themes.** `QPushButton` padding 2px 8px → 2px 6px, `QComboBox` padding 2px 6px → 2px 4px, `QComboBox::drop-down` width 18px → 14px. Saves 8px non-text width per combo and 4px per button.
|
||||
- **Library sort combo: new "Post ID" entry** with a numeric stem sort that handles non-digit stems gracefully. Fits in 75px instead of needing 90px after the padding tightening.
|
||||
- **Score and page spinboxes 50px → 40px** in the top toolbar to recover horizontal space. The internal range (0–99999) is unchanged; values >9999 will visually clip at the right edge but the stored value is preserved.
|
||||
|
||||
## v0.2.1
|
||||
|
||||
A theme + persistence + ricer-friendliness release. The whole stylesheet system was rebuilt around a runtime preprocessor with `@palette` / `${name}` vars, every bundled theme was rewritten end-to-end, and 12 theme variants ship instead of 6. Lots of UI state now survives a restart, and Hyprland ricers get an explicit opt-out for the in-code window management.
|
||||
|
||||
This release does not ship a fresh Windows installer — the previous v0.2.0 installer remains the latest installable binary. Run from source to get 0.2.1, or wait for the next release.
|
||||
|
||||
### Theming System
|
||||
|
||||
- **`@palette` / `${name}` preprocessor** — themes start with a `/* @palette */` header block listing color slots, the body uses `${name}` placeholders that the app substitutes at load time. Edit the 17-slot palette block at the top of any theme to recolor the entire app — no hunting through hex literals.
|
||||
- **All 6 bundled themes rewritten** with comprehensive Fusion-style QSS covering every widget the app uses, every state (hover, focus, disabled, checked), every control variant
|
||||
- **Two corner-radius variants per theme** — `*-rounded.qss` (4px radius, default Fusion-style look) and `*-square.qss` (every border-radius stripped except radio buttons, which stay circular)
|
||||
- **Native Fusion sizing** — themed widgets shrunk to match Qt+Fusion defaults, toolbar row height is now ~23px instead of 30px, matching what `no-custom.qss` renders
|
||||
- **Bundled themes** — catppuccin-mocha, nord, gruvbox, solarized-dark, tokyo-night, everforest. 12 files total (6 themes × 2 variants)
|
||||
|
||||
### QSS-Targetable Surfaces
|
||||
|
||||
Many things hardcoded in Python paint code can now be overridden from a `custom.qss` without touching the source:
|
||||
|
||||
- **InfoPanel tag category colors** — `qproperty-tagArtistColor`, `tagCharacterColor`, `tagCopyrightColor`, `tagSpeciesColor`, `tagMetaColor`, `tagLoreColor`
|
||||
- **ThumbnailWidget selection paint** — `qproperty-selectionColor`, `multiSelectColor`, `hoverColor`, `idleColor` (in addition to existing `savedColor` and `bookmarkedColor`)
|
||||
- **VideoPlayer letterbox color** — `qproperty-letterboxColor`. mpv paints the area around the video frame in this color instead of hardcoded black. Defaults to `QPalette.Window` so KDE color schemes, qt6ct, Windows dark/light mode, and any system Qt theme automatically produce a matching letterbox
|
||||
- **Popout overlay bars** — translucent background for the floating top toolbar and bottom controls bar via the `overlay_bg` palette slot
|
||||
- **Library count label states** — `QLabel[libraryCountState="..."]` attribute selector distinguishes "N files" / "no items match" / "directory unreachable" with QSS-controlled colors instead of inline red
|
||||
|
||||
### Hyprland Integration
|
||||
|
||||
- **Two opt-out env vars** for users with their own windowrules:
|
||||
- `BOORU_VIEWER_NO_HYPR_RULES=1` — disables every in-code hyprctl dispatch except the popout's keep_aspect_ratio lock
|
||||
- `BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1` — independently disables the popout's aspect ratio enforcement
|
||||
- **Popout overlays themed** — top toolbar and bottom controls bar now look themed instead of hardcoded translucent black, respect the `@palette` `overlay_bg` slot
|
||||
- **Popout video letterbox tracks the theme's bg color** via the new `qproperty-letterboxColor`
|
||||
- **Wayland app_id** set via `setDesktopFileName("booru-viewer")` so compositors can target windows by class — `windowrule = float, class:^(booru-viewer)$` — instead of by the volatile window title
|
||||
|
||||
### State Persistence
|
||||
|
||||
- **Main window** — geometry, floating mode, tiled mode (Hyprland)
|
||||
- **Splitter sizes** — main splitter (grid vs preview), right splitter (preview vs dl_progress vs info panel)
|
||||
- **Info panel visibility**
|
||||
- **Cache spinbox** auto-derived dialog min height (no more clipping when dragging the settings dialog small)
|
||||
- **Popout window** position, dimensions, and F11 fullscreen state restored via Hyprland floating cache prime
|
||||
|
||||
### UX
|
||||
|
||||
- **Live debounced search** in bookmarks and library tabs — type to filter, press Enter to commit immediately. 150ms debounce on bookmarks (cheap SQLite), 250ms on library (filesystem scan)
|
||||
- **Search button removed** from bookmarks toolbar (live search + Enter)
|
||||
- **Score field +/- buttons removed** from main search bar — type the value directly
|
||||
- **Embedded preview video controls** moved out of the overlay style and into the panel layout, sitting under the media instead of floating on top of it. Popout still uses the floating overlay
|
||||
- **Next-mode loop wraps** to the start of the bookmarks/library list at the end of the last item instead of stopping
|
||||
- **Splitter handle margins** — 4px breathing margin on either side so toolbar buttons don't sit flush against the splitter line
|
||||
|
||||
### Performance
|
||||
|
||||
- **Page-load thumbnails** pre-fetch bookmarks + cache state into set lookups instead of N synchronous SQLite queries per page
|
||||
- **Animated PNG/WebP conversion** off-loaded to a worker thread via `asyncio.to_thread` so it doesn't block the asyncio event loop during downloads
|
||||
|
||||
### Fixes
|
||||
|
||||
- **Open in Browser/Default App** on the bookmarks tab now opens the bookmark's actual source post (was opening unrelated cached files)
|
||||
- **Cache settings spinboxes** can no longer be vertically clipped at the dialog's minimum size; spinboxes use Python-side `setMinimumHeight()` to propagate floors up the layout chain
|
||||
- **Settings dialog** uses side-by-side `+`/`-` buttons instead of QSpinBox's default vertical arrows for clearer interaction
|
||||
- **Bookmarks tab BL Tag** refreshes correctly when navigating bookmarked posts (was caching stale tags from the first selection)
|
||||
- **Popout F11 → windowed** restores its previous windowed position and dimensions
|
||||
- **Popout flicker on F11** transitions eliminated via `no_anim` setprop + deferred fit + dedupe of mpv `video-params` events
|
||||
- **Bookmark + saved indicator dots** in the thumbnail grid: bookmark star on left, saved dot on right, both vertically aligned in a fixed-size box
|
||||
- **Selection border** on thumbnail cells redrawn pen-aware: square geometry (no rounded corner artifacts), even line width on all sides, no off-by-one anti-aliasing seams
|
||||
- **Toolbar buttons in narrow slots** no longer clip text (Bookmark/Unbookmark, Save/Unsave, BL Tag, BL Post, Popout, + Folder, Refresh) — all bumped to fit "Unbookmark" comfortably under the bundled themes' button padding
|
||||
- **Toolbar rows** on bookmarks/library/preview panels now sit at a uniform 23px height matching the inputs/combos in the same row
|
||||
- **Score and Page spinbox heights** forced to 23px via `setFixedHeight` to work around QSpinBox reserving vertical space for arrow buttons even when `setButtonSymbols(NoButtons)` is set
|
||||
- **Library Open in Default App** uses the actual file path instead of routing through `cached_path_for` (which would return a hash path that doesn't exist for library files)
|
||||
|
||||
### Cleanup
|
||||
|
||||
- Deleted unused `booru_viewer/gui/theme.py` (222 lines of legacy stylesheet template that was never imported)
|
||||
- Deleted `GREEN`/`DARK_GREEN`/`DIM_GREEN`/`BG`/`BG_LIGHT` etc constants from `booru_viewer/core/config.py` (only `theme.py` used them)
|
||||
- Removed dead missing-indicator code (`set_missing`, `_missing_color`, `missingColor` Qt Property, the unreachable `if not filepath.exists()` branch in `library.refresh`)
|
||||
- Removed dead score `+`/`-` buttons code path
|
||||
|
||||
## v0.2.0
|
||||
|
||||
### New: mpv video backend
|
||||
|
||||
- Replaced Qt Multimedia (QMediaPlayer/QVideoWidget) with embedded mpv via `python-mpv`
|
||||
- OpenGL render API (`MpvRenderContext`) for Wayland-native compositing — no XWayland needed
|
||||
- Proper hardware-accelerated decoding (`hwdec=auto`)
|
||||
@ -13,6 +528,7 @@
|
||||
- Windows: bundle `mpv-2.dll` in PyInstaller build
|
||||
|
||||
### New: popout viewer (renamed from slideshow)
|
||||
|
||||
- Renamed "Slideshow" to "Popout" throughout UI
|
||||
- Toolbar and video controls float over media with translucent background (`rgba(0,0,0,160)`)
|
||||
- Auto-hide after 2 seconds of inactivity, reappear on mouse move
|
||||
@ -26,6 +542,7 @@
|
||||
- Default site setting in Settings > General
|
||||
|
||||
### New: preview toolbar
|
||||
|
||||
- Action bar above the preview panel: Bookmark, Save, BL Tag, BL Post, Popout
|
||||
- Appears when a post is active, hidden when preview is cleared
|
||||
- Save button opens folder picker menu (Unsorted / existing folders / + New Folder)
|
||||
@ -37,14 +554,17 @@
|
||||
- "Unsave from Library" only appears in context menu when post is saved
|
||||
|
||||
### New: media type filter
|
||||
|
||||
- Replaced "Animated" checkbox with dropdown: All / Animated / Video / GIF / Audio
|
||||
- Each option appends the corresponding booru tag to the search query
|
||||
|
||||
### New: thumbnail cache limits
|
||||
|
||||
- Added "Max thumbnail cache" setting (default 500 MB)
|
||||
- Auto-evicts oldest thumbnails when limit is reached
|
||||
|
||||
### Improved: state synchronization
|
||||
|
||||
- Saving/unsaving updates grid thumbnail dots instantly (browse, bookmarks, library)
|
||||
- Unbookmarking refreshes the bookmarks tab immediately
|
||||
- Saving from browse/bookmarks refreshes the library tab when async save completes
|
||||
@ -54,31 +574,27 @@
|
||||
- Bookmark state updates after async bookmark completes (not before)
|
||||
|
||||
### Improved: infinite scroll
|
||||
|
||||
- Fixed missing posts when media type filters reduce results per page
|
||||
- Local dedup set (`seen`) prevents cross-page duplicates within backfill without polluting `shown_post_ids`
|
||||
- Page counter only advances when results are returned, not when filtering empties them
|
||||
- Backfill loop increased to 10 max pages with 300ms delay between API calls (first call instant)
|
||||
|
||||
### Improved: pagination
|
||||
|
||||
- Status bar shows "(end)" when search returns fewer results than page size
|
||||
- Prev/Next buttons hide when at page boundaries instead of just disabling
|
||||
- Source URLs clickable in info panel, truncated at 60 chars for display
|
||||
|
||||
### Changed: scroll tilt navigation
|
||||
- Scroll tilt left/right now navigates between posts everywhere — grid, embedded preview, and popout — mirroring the L/R keys
|
||||
- Grid: moves selection one cell, falls through to `nav_before_start` / `nav_past_end` at the edges
|
||||
- Preview/popout: emits the existing `navigate` signal (±1)
|
||||
- Vertical scroll still adjusts video volume on the video stack; tilt and vertical can no longer interfere
|
||||
- Fixed: tilting over the image preview no longer zooms the image out (latent bug — `angleDelta().y() == 0` on pure tilt fell into the zoom-out branch)
|
||||
- `page_forward` / `page_back` grid signals removed (only consumer was the old tilt handler)
|
||||
|
||||
### Improved: video controls
|
||||
|
||||
- Seek step changed from 5s to ~3s for `,` and `.` keys
|
||||
- `,` and `.` seek keys now work in the main preview panel, not just popout
|
||||
- Translucent overlay style on video controls in both preview and popout
|
||||
- Volume slider fixed at 60px to not compete with seek slider at small sizes
|
||||
|
||||
### New: API retry logic
|
||||
|
||||
- Single retry with backoff on HTTP 429 (rate limit) and 503 (service unavailable)
|
||||
- Retries on request timeout
|
||||
- Respects `Retry-After` header (capped at 5s)
|
||||
@ -86,10 +602,185 @@
|
||||
- Downloads are not retried (large payloads, separate client)
|
||||
|
||||
### Refactor: SearchState dataclass
|
||||
|
||||
- Consolidated 8 scattered search state attributes into a single `SearchState` dataclass
|
||||
- Eliminated all defensive `getattr`/`hasattr` patterns (8 instances)
|
||||
- State resets cleanly on new search — no stale infinite scroll data
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Added `python-mpv>=1.0`
|
||||
- Removed dependency on `PySide6.QtMultimedia` and `PySide6.QtMultimediaWidgets`
|
||||
|
||||
## v0.1.9
|
||||
|
||||
### New Features
|
||||
|
||||
- **Animated filter** — checkbox to only show animated/video posts (server-side `animated` tag)
|
||||
- **Start from page** — page number field in top bar, jump to any page on search
|
||||
- **Post date** — creation date shown in the info line
|
||||
- **Prefetch modes** — Off / Nearby (4 cardinals) / Aggressive (3 row radius)
|
||||
- **Animated PNG/WebP** — auto-converted to GIF for Qt playback
|
||||
|
||||
### Improvements
|
||||
|
||||
- Thumbnail selection/hover box hugs the actual image content
|
||||
- Video controls locked to bottom of preview panel
|
||||
- Score filter uses +/- buttons instead of spinbox arrows
|
||||
- Cache eviction triggers after infinite scroll page drain
|
||||
- Combobox dropdown styling fixed on Windows dark mode
|
||||
- Saved thumbnail size applied on startup
|
||||
|
||||
### Fixes
|
||||
|
||||
- Infinite scroll no longer stops early from false exhaustion
|
||||
- Infinite scroll triggers when viewport isn't full (initial load, splitter resize, window resize)
|
||||
- Shared HTTP clients reset on startup (prevents stale event loop errors)
|
||||
- Non-JSON API responses handled gracefully instead of crashing
|
||||
|
||||
## v0.1.8
|
||||
|
||||
### Windows Installer
|
||||
|
||||
- **Inno Setup installer** — proper Windows installer with Start Menu shortcut, optional desktop icon, and uninstaller
|
||||
- **`--onedir` build** — instant startup, no temp extraction (was `--onefile`)
|
||||
- **`optimize=2`** — stripped docstrings/asserts for smaller, faster bytecode
|
||||
- **No UPX** — trades disk space for faster launch (no decompression overhead)
|
||||
- **`noarchive`** — loose .pyc files, no zip decompression at startup
|
||||
|
||||
### Performance
|
||||
|
||||
- **Shared HTTP client for API calls** — single TLS handshake for all Danbooru/Gelbooru/Moebooru requests
|
||||
- **E621 shared client** — separate pooled client (custom User-Agent required)
|
||||
- **Site detection reuses shared client** — no extra TLS for auto-detect
|
||||
- **Priority downloads** — clicking a post pauses prefetch, downloads at full speed, resumes after
|
||||
- **Referer header per-request** — fixes Gelbooru CDN returning HTML captcha pages
|
||||
|
||||
### Infinite Scroll
|
||||
|
||||
- **Auto-fill viewport** — if first page doesn't fill the screen, auto-loads more
|
||||
- **Auto-load after drain** — checks if still at bottom after staggered append finishes
|
||||
- **Content-aware trigger** — fires when scrollbar max is 0 (no scroll needed)
|
||||
|
||||
### Library
|
||||
|
||||
- **Tag categories stored** — saved as JSON in both library_meta and bookmarks DB
|
||||
- **Categorized tags in info panel** — Library and Bookmarks show Artist/Character/Copyright etc.
|
||||
- **Tag search in Library** — search box filters by stored tags
|
||||
- **Browse thumbnail copied on save** — Library tab shows thumbnails instantly
|
||||
- **Unsave from Library** in bookmarks right-click menu
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- **Clear preview on new search**
|
||||
- **Fixed diagonal grid navigation** — viewport width used for column count
|
||||
- **Fixed Gelbooru CDN** — Referer header passed per-request with shared client
|
||||
- **Crash guards** — pop(0) on empty queue, bounds checks in API clients
|
||||
- **Page cache capped** — 10 pages max in pagination mode
|
||||
- **Missing DB migrations** — tag_categories column added to existing tables
|
||||
- **Tag click switches to Browse** — clears preview and searches clicked tag
|
||||
|
||||
## v0.1.7
|
||||
|
||||
### Infinite Scroll
|
||||
|
||||
- **New mode** — toggle in Settings > General, applies live
|
||||
- Auto-loads more posts when scrolling to bottom
|
||||
- **Staggered loading** — posts appear one at a time as thumbnails arrive
|
||||
- **Stops at end** — gracefully handles API exhaustion
|
||||
- Arrow keys at bottom don't break the grid
|
||||
- Loading locked during drain to prevent multi-page burst
|
||||
- Triggered one row from bottom for seamless experience
|
||||
|
||||
### Page Cache & Deduplication
|
||||
|
||||
- Page results cached in memory — prev/next loads instantly
|
||||
- Backfilled posts don't repeat on subsequent pages
|
||||
- Page label updates on cached loads
|
||||
|
||||
### Prefetch
|
||||
|
||||
- **Ring expansion** — prefetches in all 8 directions (including diagonals)
|
||||
- **Auto-start on search** — begins from top of page immediately
|
||||
- **Re-centers on click** — restarts spiral from clicked post
|
||||
- **Triggers on infinite scroll** — new appended posts prefetch automatically
|
||||
|
||||
### Clipboard
|
||||
|
||||
- **Copy File to Clipboard** — works in grid, preview, bookmarks, and library
|
||||
- **Ctrl+C shortcut** — global shortcut via QShortcut
|
||||
- **QMimeData** — uses same mechanism as drag-and-drop for universal compatibility
|
||||
- Sets both file URL (for file managers) and image data (for Discord/image apps)
|
||||
- Videos copy as file URIs
|
||||
|
||||
### Slideshow
|
||||
|
||||
- **Blacklist Tag button** — opens categorized tag menu
|
||||
- **Blacklist Post button** — blacklists current post
|
||||
|
||||
### Blacklist
|
||||
|
||||
- **In-place removal** — blacklisting removes matching posts from grid without re-searching
|
||||
- Preserves infinite scroll state
|
||||
- Only clears preview when the blacklisted post is the one being viewed
|
||||
|
||||
### UI Polish
|
||||
|
||||
- **QProxyStyle dark arrows** — spinbox/combobox arrows visible on all dark QSS themes
|
||||
- **Diagonal nav fix** — column count reads viewport width correctly
|
||||
- **Status bar** — shows result count with action confirmations
|
||||
- **Live settings** — infinite scroll, library dir, thumbnail size apply without restart
|
||||
|
||||
### Stability
|
||||
|
||||
- All silent exceptions logged
|
||||
- Missing defaults added for fresh installs
|
||||
- Git history cleaned
|
||||
|
||||
## v0.1.6
|
||||
|
||||
### Infinite Scroll
|
||||
|
||||
- **New mode** — toggle in Settings > General: "Infinite scroll (replaces page buttons)"
|
||||
- Hides prev/next buttons, auto-loads more posts when scrolling to bottom
|
||||
- Posts appended to grid, deduped, blacklist filtered
|
||||
- Stops gracefully when API runs out of results (shows "end")
|
||||
- Arrow keys at bottom don't nuke the grid — page turn disabled in infinite scroll
|
||||
- Applies live — no restart needed
|
||||
|
||||
### Page Cache & Deduplication
|
||||
|
||||
- **Page results cached** — prev/next loads instantly from memory within a search session
|
||||
- **Post deduplication** — backfilled posts don't repeat on subsequent pages
|
||||
- **Page label updates** on cached page loads
|
||||
|
||||
### Prefetch
|
||||
|
||||
- **Ring expansion** — prefetches in all 8 directions (up, down, left, right, diagonals)
|
||||
- **Auto-start on search** — begins prefetching from top of page immediately
|
||||
- **Re-centers on click** — clicking a post restarts the spiral from that position
|
||||
- **Triggers on infinite scroll** — new appended posts start prefetching automatically
|
||||
|
||||
### Slideshow
|
||||
|
||||
- **Blacklist Tag button** — opens categorized tag menu in slideshow toolbar
|
||||
- **Blacklist Post button** — blacklists current post from slideshow toolbar
|
||||
- **Blacklisting clears slideshow** — both preview and slideshow cleared when previewed post is blacklisted
|
||||
|
||||
### Copy to Clipboard
|
||||
|
||||
- **Ctrl+C** — copies preview image to clipboard (falls back to cached file)
|
||||
- **Right-click grid** — "Copy Image to Clipboard" option
|
||||
- **Right-click preview** — "Copy Image to Clipboard" always available
|
||||
|
||||
### Live Settings
|
||||
|
||||
- **Most settings apply instantly** — infinite scroll, library directory, thumbnail size, rating, score
|
||||
- Removed "restart required" labels
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- **Blacklisting doesn't clear unrelated preview** — only clears when the previewed post matches
|
||||
- **Backfill confirmed working** — debug logging added
|
||||
- **Status bar keeps result count** — shows "N results — Loaded" instead of just "Loaded"
|
||||
- **Fixed README code block formatting** and added ffmpeg back to Linux deps
|
||||
|
||||
109
HYPRLAND.md
Normal file
109
HYPRLAND.md
Normal file
@ -0,0 +1,109 @@
|
||||
# Hyprland integration
|
||||
|
||||
I daily-drive booru-viewer on Hyprland and I've baked in my own opinions
|
||||
on how the app should behave there. By default, a handful of `hyprctl`
|
||||
dispatches run at runtime to:
|
||||
|
||||
- Restore the main window's last floating mode + dimensions on launch
|
||||
- Restore the popout's position and keep it anchored to its configured
|
||||
anchor point (center or any corner) as its content resizes during
|
||||
navigation, and suppress F11 / fullscreen-transition flicker
|
||||
- "Prime" Hyprland's per-window floating cache at startup so a mid-session
|
||||
toggle to floating uses your saved dimensions
|
||||
- Lock the popout's aspect ratio to its content so you can't accidentally
|
||||
stretch mpv playback by dragging the popout corner
|
||||
|
||||
## Opting out
|
||||
|
||||
If you're a ricer with your own `windowrule`s targeting
|
||||
`class:^(booru-viewer)$` and you'd rather the app keep its hands off your
|
||||
setup, there are two independent opt-out env vars:
|
||||
|
||||
- **`BOORU_VIEWER_NO_HYPR_RULES=1`** — disables every in-code hyprctl
|
||||
dispatch *except* the popout's `keep_aspect_ratio` lock. Use this if
|
||||
you want app-side window management out of the way but you still want
|
||||
the popout to size itself to its content.
|
||||
- **`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1`** — independently disables
|
||||
the popout's aspect ratio enforcement. Useful if you want to drag the
|
||||
popout to whatever shape you like (square, panoramic, monitor-aspect,
|
||||
whatever) and accept that mpv playback will letterbox or stretch to
|
||||
match.
|
||||
|
||||
For the full hands-off experience, set both:
|
||||
|
||||
```ini
|
||||
[Desktop Entry]
|
||||
Name=booru-viewer
|
||||
Exec=env BOORU_VIEWER_NO_HYPR_RULES=1 BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1 /path/to/booru-viewer/.venv/bin/booru-viewer
|
||||
Icon=/path/to/booru-viewer/icon.png
|
||||
Type=Application
|
||||
Categories=Graphics;
|
||||
```
|
||||
|
||||
Or for one-off launches from a shell:
|
||||
|
||||
```bash
|
||||
BOORU_VIEWER_NO_HYPR_RULES=1 booru-viewer
|
||||
```
|
||||
|
||||
## Writing your own rules
|
||||
|
||||
If you're running with `BOORU_VIEWER_NO_HYPR_RULES=1` (or layering rules
|
||||
on top of the defaults), here's the reference.
|
||||
|
||||
### Window identity
|
||||
|
||||
- Main window — class `booru-viewer`
|
||||
- Popout — class `booru-viewer`, title `booru-viewer — Popout`
|
||||
|
||||
> ⚠ The popout title uses an em dash (`—`, U+2014), not a hyphen. A rule
|
||||
> like `match:title = ^booru-viewer - Popout$` will silently match
|
||||
> nothing. Either paste the em dash verbatim or match the tail:
|
||||
> `match:title = Popout$`.
|
||||
|
||||
### Example rules
|
||||
|
||||
```ini
|
||||
# Float the popout with aspect-locked resize and no animation flicker
|
||||
windowrule {
|
||||
match:class = ^(booru-viewer)$
|
||||
match:title = Popout$
|
||||
float = yes
|
||||
keep_aspect_ratio = on
|
||||
no_anim = on
|
||||
}
|
||||
|
||||
# Per-window scroll factor if your global is too aggressive
|
||||
windowrule {
|
||||
match:class = ^(booru-viewer)$
|
||||
match:title = Popout$
|
||||
scroll_mouse = 0.65
|
||||
}
|
||||
```
|
||||
|
||||
### What the env vars actually disable
|
||||
|
||||
`BOORU_VIEWER_NO_HYPR_RULES=1` suppresses the in-code calls to:
|
||||
|
||||
- `dispatch resizeactive` / `moveactive` batches that restore saved
|
||||
popout geometry
|
||||
- `dispatch togglefloating` on the main window at launch
|
||||
- `dispatch setprop address:<addr> no_anim 1` applied during popout
|
||||
transitions (skipped on the first fit after open so Hyprland's
|
||||
`windowsIn` / `popin` animation can play — subsequent navigation
|
||||
fits still suppress anim to avoid resize flicker)
|
||||
- The startup "prime" sequence that warms Hyprland's per-window
|
||||
floating cache
|
||||
|
||||
`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1` suppresses only
|
||||
`dispatch setprop address:<addr> keep_aspect_ratio 1` on the popout.
|
||||
Everything else still runs.
|
||||
|
||||
Read-only queries (`hyprctl clients -j`, `hyprctl monitors -j`) always
|
||||
run regardless — the app needs them to know where it is.
|
||||
|
||||
### Hyprland requirements
|
||||
|
||||
The `keep_aspect_ratio` windowrule and `dispatch setprop
|
||||
keep_aspect_ratio` both require a recent Hyprland. On older builds the
|
||||
aspect lock is silently a no-op.
|
||||
50
KEYBINDS.md
Normal file
50
KEYBINDS.md
Normal file
@ -0,0 +1,50 @@
|
||||
# Keybinds
|
||||
|
||||
## Grid
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate grid |
|
||||
| `Ctrl+A` | Select all |
|
||||
| `Ctrl+Click` / `Shift+Click` | Multi-select |
|
||||
| `Home` / `End` | Jump to first / last |
|
||||
| Scroll tilt left / right | Previous / next thumbnail (one cell) |
|
||||
| `Ctrl+C` | Copy file to clipboard |
|
||||
| Right click | Context menu |
|
||||
|
||||
## Preview
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Scroll wheel | Zoom (image) / volume (video) |
|
||||
| Scroll tilt left / right | Previous / next post |
|
||||
| Middle click / `0` | Reset view |
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate posts |
|
||||
| `,` / `.` | Seek 3s back / forward (video) |
|
||||
| `Space` | Play / pause (video, hover to activate) |
|
||||
| Right click | Context menu (bookmark, save, popout) |
|
||||
|
||||
## Popout
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate posts |
|
||||
| Scroll tilt left / right | Previous / next post |
|
||||
| `,` / `.` | Seek 3s (video) |
|
||||
| `Space` | Play / pause (video) |
|
||||
| Scroll wheel | Volume up / down (video) |
|
||||
| `B` / `F` | Toggle bookmark on selected post |
|
||||
| `S` | Toggle save to library (Unfiled) |
|
||||
| `F11` | Toggle fullscreen / windowed |
|
||||
| `Ctrl+H` | Hide / show UI |
|
||||
| `Ctrl+P` | Privacy screen |
|
||||
| `Escape` / `Q` | Close popout |
|
||||
|
||||
## Global
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| `B` / `F` | Toggle bookmark on selected post |
|
||||
| `S` | Toggle save to library (Unfiled) |
|
||||
| `Ctrl+P` | Privacy screen |
|
||||
| `F11` | Toggle fullscreen |
|
||||
238
README.md
238
README.md
@ -1,118 +1,60 @@
|
||||
# booru-viewer
|
||||
A Qt6 booru client for people who keep what they save and rice what they run. Browse, search, and archive Danbooru, e621, Gelbooru, and Moebooru on Linux and Windows. Fully themeable.
|
||||
|
||||
Local desktop app for browsing, searching, and saving images from booru-style imageboards.
|
||||
|
||||
Qt6 GUI, cross-platform (Linux + Windows), fully themeable.
|
||||
|
||||
If you find this useful, consider buying me a coffee:
|
||||
|
||||
[](https://ko-fi.com/paxmoe)
|
||||
|
||||
## Screenshots
|
||||
|
||||
**Windows 11 — Light Theme**
|
||||
|
||||
<picture><img src="screenshots/windows11-light.png" alt="Windows 11 — Light Theme" width="700"></picture>
|
||||
|
||||
**Windows 11 — Dark Theme (auto-detected)**
|
||||
|
||||
<picture><img src="screenshots/windows11-dark.png" alt="Windows 11 — Dark Theme" width="700"></picture>
|
||||
|
||||
**Windows 10 — Light Theme**
|
||||
|
||||
<picture><img src="screenshots/windows.png" alt="Windows 10 — Light Theme" width="700"></picture>
|
||||
|
||||
**Windows 10 — Dark Theme (auto-detected)**
|
||||
|
||||
<picture><img src="screenshots/windows-dark.png" alt="Windows 10 — Dark Theme" width="700"></picture>
|
||||
|
||||
**Linux — Styled via system Qt6 theme**
|
||||
|
||||
<picture><img src="screenshots/linux.png" alt="Linux — System Qt6 theme" width="700"></picture>
|
||||
<img src="screenshots/linux.png" alt="Linux — System Qt6 theme" width="700">
|
||||
|
||||
Supports custom styling via `custom.qss` — see [Theming](#theming).
|
||||
|
||||
## Features
|
||||
|
||||
### Browsing
|
||||
- Supports **Danbooru, Gelbooru, Moebooru, and e621**
|
||||
- Auto-detect site API type — just paste the URL
|
||||
- Tag search with autocomplete, history dropdown, and saved searches
|
||||
- Rating and score filtering (server-side `score:>=N`)
|
||||
- **Media type filter** — dropdown: All / Animated / Video / GIF / Audio
|
||||
- Blacklisted tags and posts (client-side filtering with backfill)
|
||||
- Thumbnail grid with keyboard navigation
|
||||
- **Infinite scroll** — optional, auto-loads more posts at bottom
|
||||
- **Start from page** — jump to any page number on search
|
||||
- **Page cache** — prev/next loads from memory, no duplicates
|
||||
- **Copy File to Clipboard** — Ctrl+C, works for images and videos
|
||||
booru-viewer has three tabs that map to three commitment levels: **Browse** for live search against booru APIs, **Bookmarks** for posts you've starred for later, **Library** for files you've actually saved to disk.
|
||||
|
||||
### Preview
|
||||
- Image viewer with zoom (scroll wheel), pan (drag), and reset (middle click)
|
||||
- GIF animation, Pixiv ugoira auto-conversion (zip to animated GIF)
|
||||
- Animated PNG/WebP auto-conversion to GIF
|
||||
- Video playback via mpv (MP4, WebM, MKV) with play/pause, seek, volume, mute, and seamless looping
|
||||
- Info panel with post details, date, clickable tags, and filetype
|
||||
- **Preview toolbar** — Bookmark, Save, BL Tag, BL Post, and Popout buttons above the preview panel
|
||||
**Browsing** — Danbooru, e621, Gelbooru, and Moebooru. Tag search with autocomplete, rating/score/media-type filters, blacklist with backfill, infinite scroll, page cache, keyboard grid navigation, multi-select with bulk actions, drag thumbnails out as files.
|
||||
|
||||
### Popout Viewer
|
||||
- Right-click preview → "Popout" or click the Popout button in the preview toolbar
|
||||
- Arrow keys / `h`/`j`/`k`/`l` navigate posts (including during video playback)
|
||||
- `,` / `.` seek 3 seconds in videos, `Space` toggles play/pause
|
||||
- Floating overlay UI — toolbar and video controls auto-hide after 2 seconds, reappear on mouse move
|
||||
- `F11` toggles fullscreen/windowed, `Ctrl+H` hides all UI, `Ctrl+P` privacy screen
|
||||
- Window auto-sizes to content aspect ratio; state persisted across sessions
|
||||
- Hyprland: `keep_aspect_ratio` prop locks window to content proportions
|
||||
- Bidirectional sync — clicking posts in the main grid updates the popout
|
||||
- Video position and player state synced between preview and popout
|
||||
**Preview** — Image zoom/pan, GIF/APNG/WebP animation, video via mpv (stream from CDN, seamless loop, seek, volume), ugoira auto-conversion, color-coded tag categories in info panel.
|
||||
|
||||
### Bookmarks & Library
|
||||
- Bookmark posts, organize into folders
|
||||
- Three-tab layout: Browse, Bookmarks, and Library
|
||||
- Save to library (unsorted or per-folder), drag-and-drop thumbnails as files
|
||||
- Multi-select (Ctrl/Shift+Click, Ctrl+A) with bulk actions
|
||||
- Bulk context menus in both Browse and Bookmarks tabs
|
||||
- Unsave from Library available in grid, preview, and popout (only shown when post is saved)
|
||||
- Import/export bookmarks as JSON
|
||||
**Popout** — Dedicated viewer window. Arrow/vim keys navigate posts during video. Auto-hiding overlay UI. F11 fullscreen, Ctrl+H hide UI, Ctrl+P privacy screen. Syncs bidirectionally with main grid.
|
||||
|
||||
### Library
|
||||
- Dedicated tab for browsing saved files on disk
|
||||
- Folder sidebar with configurable library directory
|
||||
- Sort by date, name, or size
|
||||
- Video thumbnail generation (ffmpeg if available, placeholder fallback)
|
||||
- Unreachable directory detection
|
||||
**Bookmarks** — Star posts for later. Folder organization, tag search, bulk save/remove, JSON import/export.
|
||||
|
||||
### Search
|
||||
- Inline history dropdown inside the search bar
|
||||
- Saved searches with management dialog
|
||||
- Click empty search bar to open history
|
||||
- Session cache mode clears history on exit (keeps saved searches)
|
||||
**Library** — Save to disk with metadata indexing. Customizable filename templates (`%id%`, `%artist%`, `%md5%`, etc). Folder organization, tag search, sort by date/name/size.
|
||||
|
||||
**Search** — Inline history dropdown, saved searches, session cache mode.
|
||||
|
||||
## Install
|
||||
|
||||
### Windows
|
||||
|
||||
Download `booru-viewer-setup.exe` from [Releases](https://git.pax.moe/pax/booru-viewer/releases) and run the installer. It installs to AppData with Start Menu and optional desktop shortcuts. To update, just run the new installer over the old one — your data in `%APPDATA%\booru-viewer\` is preserved.
|
||||
Download `booru-viewer-setup.exe` from Releases and run the installer. It installs to AppData with Start Menu and optional desktop shortcuts. To update, just run the new installer over the old one. Your data in `%APPDATA%\booru-viewer\` is preserved.
|
||||
|
||||
Github: [/pxlwh/booru-viewer/releases](https://github.com/pxlwh/booru-viewer/releases)
|
||||
|
||||
Gitea: [/pax/booru-viewer/releases](https://git.pax.moe/pax/booru-viewer/releases)
|
||||
|
||||
Windows 10 dark mode is automatically detected and applied.
|
||||
|
||||
### Linux
|
||||
|
||||
Requires Python 3.11+ and pip. Most distros ship Python but you may need to install pip and the Qt6 system libraries.
|
||||
|
||||
**Arch / CachyOS:**
|
||||
**Arch / CachyOS / Manjaro** — install from the AUR:
|
||||
```sh
|
||||
sudo pacman -S python python-pip qt6-base mpv ffmpeg
|
||||
yay -S booru-viewer-git
|
||||
# or: paru -S booru-viewer-git
|
||||
```
|
||||
|
||||
**Ubuntu / Debian (24.04+):**
|
||||
The AUR package tracks the gitea `main` branch, so `yay -Syu` pulls the latest commit. Desktop entry and icon are installed automatically.
|
||||
|
||||
AUR: [/packages/booru-viewer-git](https://aur.archlinux.org/packages/booru-viewer-git)
|
||||
|
||||
**Other distros** — build from source. Requires Python 3.11+ and Qt6 system libraries.
|
||||
|
||||
Ubuntu / Debian (24.04+):
|
||||
```sh
|
||||
sudo apt install python3 python3-pip python3-venv mpv libmpv-dev ffmpeg
|
||||
sudo apt install python3 python3-pip python3-venv mpv libmpv-dev
|
||||
```
|
||||
|
||||
**Fedora:**
|
||||
Fedora:
|
||||
```sh
|
||||
sudo dnf install python3 python3-pip qt6-qtbase mpv mpv-libs-devel ffmpeg
|
||||
sudo dnf install python3 python3-pip qt6-qtbase mpv mpv-libs-devel
|
||||
```
|
||||
|
||||
Then clone and install:
|
||||
@ -122,17 +64,10 @@ cd booru-viewer
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
Run it:
|
||||
```sh
|
||||
booru-viewer
|
||||
```
|
||||
|
||||
Or without installing: `python3 -m booru_viewer.main_gui`
|
||||
|
||||
|
||||
**Desktop entry:** To add booru-viewer to your app launcher, create `~/.local/share/applications/booru-viewer.desktop`:
|
||||
To add a launcher entry, create `~/.local/share/applications/booru-viewer.desktop`:
|
||||
```ini
|
||||
[Desktop Entry]
|
||||
Name=booru-viewer
|
||||
@ -144,47 +79,11 @@ Categories=Graphics;
|
||||
|
||||
### Hyprland integration
|
||||
|
||||
I daily-drive booru-viewer on Hyprland and I've baked in my own opinions on
|
||||
how the app should behave there. By default, a handful of `hyprctl` dispatches
|
||||
run at runtime to:
|
||||
|
||||
- Restore the main window's last floating mode + dimensions on launch
|
||||
- Restore the popout's position, center-pin it around its content during
|
||||
navigation, and suppress F11 / fullscreen-transition flicker
|
||||
- "Prime" Hyprland's per-window floating cache at startup so a mid-session
|
||||
toggle to floating uses your saved dimensions
|
||||
- Lock the popout's aspect ratio to its content so you can't accidentally
|
||||
stretch mpv playback by dragging the popout corner
|
||||
|
||||
If you're a ricer with your own `windowrule`s targeting `class:^(booru-viewer)$`
|
||||
and you'd rather the app keep its hands off your setup, there are two
|
||||
independent opt-out env vars:
|
||||
|
||||
- **`BOORU_VIEWER_NO_HYPR_RULES=1`** — disables every in-code hyprctl dispatch
|
||||
*except* the popout's `keep_aspect_ratio` lock. Use this if you want app-side
|
||||
window management out of the way but you still want the popout to size itself
|
||||
to its content.
|
||||
- **`BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1`** — independently disables the popout's
|
||||
aspect ratio enforcement. Useful if you want to drag the popout to whatever
|
||||
shape you like (square, panoramic, monitor-aspect, whatever) and accept that
|
||||
mpv playback will letterbox or stretch to match.
|
||||
|
||||
For the full hands-off experience, set both:
|
||||
|
||||
```ini
|
||||
[Desktop Entry]
|
||||
Name=booru-viewer
|
||||
Exec=env BOORU_VIEWER_NO_HYPR_RULES=1 BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK=1 /path/to/booru-viewer/.venv/bin/booru-viewer
|
||||
Icon=/path/to/booru-viewer/icon.png
|
||||
Type=Application
|
||||
Categories=Graphics;
|
||||
```
|
||||
|
||||
Or for one-off launches from a shell:
|
||||
|
||||
```bash
|
||||
BOORU_VIEWER_NO_HYPR_RULES=1 booru-viewer
|
||||
```
|
||||
booru-viewer ships with built-in Hyprland window management (popout
|
||||
geometry restore, aspect ratio lock, animation suppression, etc.) that
|
||||
can be fully or partially opted out of via env vars. See
|
||||
[HYPRLAND.md](HYPRLAND.md) for the full details, opt-out flags, and
|
||||
example `windowrule` reference.
|
||||
|
||||
### Dependencies
|
||||
|
||||
@ -193,54 +92,11 @@ BOORU_VIEWER_NO_HYPR_RULES=1 booru-viewer
|
||||
- httpx
|
||||
- Pillow
|
||||
- python-mpv
|
||||
- mpv (system package on Linux, bundled DLL on Windows)
|
||||
- mpv
|
||||
|
||||
## Keybinds
|
||||
|
||||
### Grid
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate grid |
|
||||
| `Ctrl+A` | Select all |
|
||||
| `Ctrl+Click` / `Shift+Click` | Multi-select |
|
||||
| `Home` / `End` | Jump to first / last |
|
||||
| Scroll tilt left / right | Previous / next thumbnail (one cell) |
|
||||
| `Ctrl+C` | Copy file to clipboard |
|
||||
| Right click | Context menu |
|
||||
|
||||
### Preview
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Scroll wheel | Zoom (image) / volume (video) |
|
||||
| Scroll tilt left / right | Previous / next post |
|
||||
| Middle click / `0` | Reset view |
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate posts |
|
||||
| `,` / `.` | Seek 3s back / forward (video) |
|
||||
| `Space` | Play / pause (video, hover to activate) |
|
||||
| Right click | Context menu (bookmark, save, popout) |
|
||||
|
||||
### Popout
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Arrow keys / `h`/`j`/`k`/`l` | Navigate posts |
|
||||
| Scroll tilt left / right | Previous / next post |
|
||||
| `,` / `.` | Seek 3s (video) |
|
||||
| `Space` | Play / pause (video) |
|
||||
| Scroll wheel | Volume up / down (video) |
|
||||
| `F11` | Toggle fullscreen / windowed |
|
||||
| `Ctrl+H` | Hide / show UI |
|
||||
| `Ctrl+P` | Privacy screen |
|
||||
| `Escape` / `Q` | Close popout |
|
||||
|
||||
### Global
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| `Ctrl+P` | Privacy screen |
|
||||
| `F11` | Toggle fullscreen |
|
||||
See [KEYBINDS.md](KEYBINDS.md) for the full list.
|
||||
|
||||
## Adding Sites
|
||||
|
||||
@ -266,20 +122,14 @@ The app uses your OS native theme by default. To customize, copy a `.qss` file f
|
||||
|
||||
A template is also available in Settings > Theme > Create from Template.
|
||||
|
||||
### Included Themes
|
||||
|
||||
<picture><img src="screenshots/themes/nord.png" alt="Nord" width="400"></picture> <picture><img src="screenshots/themes/catppuccin-mocha.png" alt="Catppuccin Mocha" width="400"></picture>
|
||||
|
||||
<picture><img src="screenshots/themes/gruvbox.png" alt="Gruvbox" width="400"></picture> <picture><img src="screenshots/themes/solarized-dark.png" alt="Solarized Dark" width="400"></picture>
|
||||
|
||||
<picture><img src="screenshots/themes/tokyo-night.png" alt="Tokyo Night" width="400"></picture> <picture><img src="screenshots/themes/everforest.png" alt="Everforest" width="400"></picture>
|
||||
Six themes included, each in rounded and square variants. See [`themes/`](themes/) for screenshots and the full QSS reference.
|
||||
|
||||
## Settings
|
||||
|
||||
- **General** — page size, thumbnail size, default site, default rating/score, prefetch mode (Off / Nearby / Aggressive), infinite scroll, popout monitor, file dialog platform
|
||||
- **General** — page size, thumbnail size (100-200px), default site, default rating/score, prefetch mode (Off / Nearby / Aggressive), infinite scroll, unbookmark on save, search history, flip layout, popout monitor, popout anchor (resize pivot), file dialog platform
|
||||
- **Cache** — max cache size, max thumbnail cache, auto-evict, clear cache on exit (session-only mode)
|
||||
- **Blacklist** — tag blacklist with toggle, post URL blacklist
|
||||
- **Paths** — data directory, cache, database, configurable library directory
|
||||
- **Paths** — data directory, cache, database, configurable library directory, library filename template
|
||||
- **Theme** — custom.qss editor, template generator, CSS guide
|
||||
- **Network** — connection log showing all hosts contacted this session
|
||||
|
||||
@ -292,11 +142,15 @@ A template is also available in Settings > Theme > Create from Template.
|
||||
| Library | `~/.local/share/booru-viewer/saved/` | `%APPDATA%\booru-viewer\saved\` |
|
||||
| Theme | `~/.local/share/booru-viewer/custom.qss` | `%APPDATA%\booru-viewer\custom.qss` |
|
||||
|
||||
## Privacy
|
||||
To back up everything: copy `saved/` for the files themselves and `booru.db` for bookmarks, folders, and tag metadata. The two are independent — restoring one without the other still works. The `saved/` folder is browsable on its own in any file manager, and the database can be re-populated from the booru sites for any post IDs you still have on disk.
|
||||
|
||||
booru-viewer makes **no connections** except to the booru sites you configure. There is no telemetry, analytics, update checking, or phoning home. All data stays local on your machine.
|
||||
**Privacy:** No telemetry, analytics, or update checks. Only connects to booru sites you configure. Verify in Settings > Network.
|
||||
|
||||
Every outgoing request is logged in Settings > Network so you can verify this yourself — you will only see requests to the booru API endpoints and CDNs you chose to connect to.
|
||||
## Support
|
||||
|
||||
If you find this useful, consider buying me a coffee:
|
||||
|
||||
[](https://ko-fi.com/paxmoe)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@ -0,0 +1,18 @@
|
||||
"""booru_viewer.core package — pure-Python data + I/O layer (no Qt).
|
||||
|
||||
Side effect on import: install the project-wide PIL decompression-bomb
|
||||
cap. PIL's default warns silently above ~89M pixels; we want a hard
|
||||
fail above 256M pixels so DecompressionBombError can be caught and
|
||||
treated as a download failure.
|
||||
|
||||
Setting it here (rather than as a side effect of importing
|
||||
``core.cache``) means any code path that touches PIL via any
|
||||
``booru_viewer.core.*`` submodule gets the cap installed first,
|
||||
regardless of submodule import order. Audit finding #8.
|
||||
"""
|
||||
|
||||
from PIL import Image as _PILImage
|
||||
|
||||
_PILImage.MAX_IMAGE_PIXELS = 256 * 1024 * 1024
|
||||
|
||||
del _PILImage
|
||||
150
booru_viewer/core/api/_safety.py
Normal file
150
booru_viewer/core/api/_safety.py
Normal file
@ -0,0 +1,150 @@
|
||||
"""Network-safety helpers for httpx clients.
|
||||
|
||||
Keeps SSRF guards and secret redaction in one place so every httpx
|
||||
client in the project can share a single implementation. All helpers
|
||||
here are pure stdlib + httpx; no Qt, no project-side imports.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import ipaddress
|
||||
import socket
|
||||
from typing import Any, Mapping
|
||||
from urllib.parse import parse_qsl, urlencode, urlsplit, urlunsplit
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SSRF guard — finding #1
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_BLOCKED_V4 = [
|
||||
ipaddress.ip_network("0.0.0.0/8"), # this-network
|
||||
ipaddress.ip_network("10.0.0.0/8"), # RFC1918
|
||||
ipaddress.ip_network("100.64.0.0/10"), # CGNAT
|
||||
ipaddress.ip_network("127.0.0.0/8"), # loopback
|
||||
ipaddress.ip_network("169.254.0.0/16"), # link-local (incl. 169.254.169.254 metadata)
|
||||
ipaddress.ip_network("172.16.0.0/12"), # RFC1918
|
||||
ipaddress.ip_network("192.0.0.0/24"), # IETF protocol assignments
|
||||
ipaddress.ip_network("192.168.0.0/16"), # RFC1918
|
||||
ipaddress.ip_network("198.18.0.0/15"), # benchmark
|
||||
ipaddress.ip_network("224.0.0.0/4"), # multicast
|
||||
ipaddress.ip_network("240.0.0.0/4"), # reserved
|
||||
]
|
||||
|
||||
_BLOCKED_V6 = [
|
||||
ipaddress.ip_network("::1/128"), # loopback
|
||||
ipaddress.ip_network("::/128"), # unspecified
|
||||
ipaddress.ip_network("::ffff:0:0/96"), # IPv4-mapped (covers v4 via v6)
|
||||
ipaddress.ip_network("64:ff9b::/96"), # well-known NAT64
|
||||
ipaddress.ip_network("fc00::/7"), # unique local
|
||||
ipaddress.ip_network("fe80::/10"), # link-local
|
||||
ipaddress.ip_network("ff00::/8"), # multicast
|
||||
]
|
||||
|
||||
|
||||
def _is_blocked_ip(ip: ipaddress._BaseAddress) -> bool:
|
||||
nets = _BLOCKED_V4 if isinstance(ip, ipaddress.IPv4Address) else _BLOCKED_V6
|
||||
return any(ip in net for net in nets)
|
||||
|
||||
|
||||
def check_public_host(host: str) -> None:
|
||||
"""Raise httpx.RequestError if ``host`` is (or resolves to) a non-public IP.
|
||||
|
||||
Blocks loopback, RFC1918, link-local (including the 169.254.169.254
|
||||
cloud-metadata endpoint), unique-local v6, and similar. Used by both
|
||||
the initial request and every redirect hop — see
|
||||
``validate_public_request`` for the async wrapper.
|
||||
"""
|
||||
if not host:
|
||||
return
|
||||
try:
|
||||
ip = ipaddress.ip_address(host)
|
||||
except ValueError:
|
||||
ip = None
|
||||
if ip is not None:
|
||||
if _is_blocked_ip(ip):
|
||||
raise httpx.RequestError(f"blocked address: {host}")
|
||||
return
|
||||
try:
|
||||
infos = socket.getaddrinfo(host, None)
|
||||
except socket.gaierror as e:
|
||||
raise httpx.RequestError(f"DNS resolution failed for {host}: {e}")
|
||||
seen: set[str] = set()
|
||||
for info in infos:
|
||||
addr = info[4][0]
|
||||
if addr in seen:
|
||||
continue
|
||||
seen.add(addr)
|
||||
try:
|
||||
resolved = ipaddress.ip_address(addr.split("%", 1)[0])
|
||||
except ValueError:
|
||||
continue
|
||||
if _is_blocked_ip(resolved):
|
||||
raise httpx.RequestError(
|
||||
f"blocked request target {host} -> {addr}"
|
||||
)
|
||||
|
||||
|
||||
async def validate_public_request(request: httpx.Request) -> None:
|
||||
"""httpx request event hook — rejects private/metadata targets.
|
||||
|
||||
Fires on every hop including redirects. The initial request to a
|
||||
user-configured booru base_url is also validated; this intentionally
|
||||
blocks users from pointing the app at ``http://localhost/`` or an
|
||||
RFC1918 address (behavior change from v0.2.5).
|
||||
|
||||
Limitation: TOCTOU / DNS rebinding. We resolve the host here, but
|
||||
the kernel will re-resolve when the TCP connection actually opens,
|
||||
and a rebinder that returns a public IP on first query and a
|
||||
private IP on the second can bypass this hook. The project's threat
|
||||
model is a *malicious booru returning a 3xx to a private address* —
|
||||
not an active rebinder controlling the DNS recursor — so this check
|
||||
is the intended defense line. If the threat model ever widens, the
|
||||
follow-up is a custom httpx transport that validates post-connect.
|
||||
"""
|
||||
host = request.url.host
|
||||
if not host:
|
||||
return
|
||||
await asyncio.to_thread(check_public_host, host)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Credential redaction — finding #3
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Case-sensitive; matches the literal param names every booru client
|
||||
# uses today (verified via grep across danbooru/e621/gelbooru/moebooru).
|
||||
SECRET_KEYS: frozenset[str] = frozenset({
|
||||
"login",
|
||||
"api_key",
|
||||
"user_id",
|
||||
"password_hash",
|
||||
})
|
||||
|
||||
|
||||
def redact_url(url: str) -> str:
|
||||
"""Replace secret query params with ``***`` in a URL string.
|
||||
|
||||
Preserves ordering and non-secret params. Empty-query URLs pass
|
||||
through unchanged.
|
||||
"""
|
||||
parts = urlsplit(url)
|
||||
if not parts.query:
|
||||
return url
|
||||
pairs = parse_qsl(parts.query, keep_blank_values=True)
|
||||
redacted = [(k, "***" if k in SECRET_KEYS else v) for k, v in pairs]
|
||||
return urlunsplit((
|
||||
parts.scheme,
|
||||
parts.netloc,
|
||||
parts.path,
|
||||
urlencode(redacted),
|
||||
parts.fragment,
|
||||
))
|
||||
|
||||
|
||||
def redact_params(params: Mapping[str, Any]) -> dict[str, Any]:
|
||||
"""Return a copy of ``params`` with secret keys replaced by ``***``."""
|
||||
return {k: ("***" if k in SECRET_KEYS else v) for k, v in params.items()}
|
||||
@ -4,13 +4,15 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
import httpx
|
||||
|
||||
from ..config import USER_AGENT, DEFAULT_PAGE_SIZE
|
||||
from ..config import DEFAULT_PAGE_SIZE
|
||||
from ..cache import log_connection
|
||||
from ._safety import redact_url
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
@ -62,8 +64,18 @@ class BooruClient(ABC):
|
||||
|
||||
api_type: str = ""
|
||||
|
||||
# Shared client across all BooruClient instances for connection reuse
|
||||
# Shared httpx client across all BooruClient instances for connection
|
||||
# reuse. Lazily created on first access; the threading.Lock guards the
|
||||
# check-and-set so concurrent first-callers can't both build a client
|
||||
# and leak one. The lock is per-class, lives for the process lifetime.
|
||||
#
|
||||
# Loop affinity: by convention every async call goes through
|
||||
# `core.concurrency.run_on_app_loop`, which schedules on the persistent
|
||||
# event loop in `gui/app.py`. The first lazy init therefore binds the
|
||||
# client to that loop, and every subsequent use is on the same loop.
|
||||
# This is the contract that PR2 enforces — see core/concurrency.py.
|
||||
_shared_client: httpx.AsyncClient | None = None
|
||||
_shared_client_lock: threading.Lock = threading.Lock()
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -74,29 +86,55 @@ class BooruClient(ABC):
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.api_user = api_user
|
||||
# Set externally by client_for_type when db + site_id are
|
||||
# available. Gelbooru-shape and Moebooru clients use it to
|
||||
# populate post.tag_categories via HTML scrape / batch API.
|
||||
# Danbooru and e621 leave it None (inline categorization).
|
||||
self.category_fetcher = None # CategoryFetcher | None
|
||||
|
||||
@property
|
||||
def client(self) -> httpx.AsyncClient:
|
||||
if BooruClient._shared_client is None or BooruClient._shared_client.is_closed:
|
||||
BooruClient._shared_client = httpx.AsyncClient(
|
||||
headers={"User-Agent": USER_AGENT},
|
||||
follow_redirects=True,
|
||||
timeout=20.0,
|
||||
event_hooks={"request": [self._log_request]},
|
||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||
)
|
||||
return BooruClient._shared_client
|
||||
# Fast path: client exists and is open. No lock needed for the read.
|
||||
c = BooruClient._shared_client
|
||||
if c is not None and not c.is_closed:
|
||||
return c
|
||||
# Slow path: build it. Lock so two coroutines on the same loop don't
|
||||
# both construct + leak.
|
||||
from ..http import make_client
|
||||
with BooruClient._shared_client_lock:
|
||||
c = BooruClient._shared_client
|
||||
if c is None or c.is_closed:
|
||||
c = make_client(extra_request_hooks=[self._log_request])
|
||||
BooruClient._shared_client = c
|
||||
return c
|
||||
|
||||
@classmethod
|
||||
async def aclose_shared(cls) -> None:
|
||||
"""Cleanly aclose the shared client. Safe to call from any coroutine
|
||||
running on the loop the client is bound to. No-op if not initialized."""
|
||||
with cls._shared_client_lock:
|
||||
c = cls._shared_client
|
||||
cls._shared_client = None
|
||||
if c is not None and not c.is_closed:
|
||||
try:
|
||||
await c.aclose()
|
||||
except Exception as e:
|
||||
log.warning("BooruClient shared aclose failed: %s", e)
|
||||
|
||||
@staticmethod
|
||||
async def _log_request(request: httpx.Request) -> None:
|
||||
log_connection(str(request.url))
|
||||
# Redact api_key / login / user_id / password_hash from the
|
||||
# URL before it ever crosses the function boundary — the
|
||||
# rendered URL would otherwise land in tracebacks, debug logs,
|
||||
# or in-app connection-log views as plaintext.
|
||||
log_connection(redact_url(str(request.url)))
|
||||
|
||||
_RETRYABLE_STATUS = frozenset({429, 503})
|
||||
|
||||
async def _request(
|
||||
self, method: str, url: str, *, params: dict | None = None
|
||||
) -> httpx.Response:
|
||||
"""Issue an HTTP request with a single retry on 429/503/timeout."""
|
||||
"""Issue an HTTP request with a single retry on 429/503/timeout/network error."""
|
||||
for attempt in range(2):
|
||||
try:
|
||||
resp = await self.client.request(method, url, params=params)
|
||||
@ -114,15 +152,29 @@ class BooruClient(ABC):
|
||||
wait = 2.0
|
||||
log.info(f"Retrying {url} after {resp.status_code} (wait {wait}s)")
|
||||
await asyncio.sleep(wait)
|
||||
except httpx.TimeoutException:
|
||||
except (
|
||||
httpx.TimeoutException,
|
||||
httpx.ConnectError,
|
||||
httpx.NetworkError,
|
||||
httpx.RemoteProtocolError,
|
||||
httpx.ReadError,
|
||||
) as e:
|
||||
# Retry on transient DNS/TCP/timeout failures plus
|
||||
# mid-response drops — RemoteProtocolError and ReadError
|
||||
# are common when an overloaded booru closes the TCP
|
||||
# connection between headers and body. Without them a
|
||||
# single dropped response blows up the whole search.
|
||||
if attempt == 1:
|
||||
raise
|
||||
log.info(f"Retrying {url} after timeout")
|
||||
log.info(f"Retrying {url} after {type(e).__name__}: {e}")
|
||||
await asyncio.sleep(1.0)
|
||||
return resp # unreachable in practice, satisfies type checker
|
||||
|
||||
async def close(self) -> None:
|
||||
pass # shared client stays open
|
||||
# Per-instance close is a no-op — the shared pool is owned by the
|
||||
# class. Use `await BooruClient.aclose_shared()` from app shutdown
|
||||
# to actually release the connection pool.
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def search(
|
||||
@ -138,12 +190,41 @@ class BooruClient(ABC):
|
||||
"""Tag autocomplete. Override in subclasses that support it."""
|
||||
return []
|
||||
|
||||
def _post_view_url(self, post: Post) -> str | None:
|
||||
"""Return the URL for a post's HTML detail page, or None.
|
||||
|
||||
Override in subclasses whose booru exposes tag categories in
|
||||
the post-view HTML via ``class="tag-type-X"`` markup.
|
||||
CategoryFetcher.fetch_post uses this to scrape categories.
|
||||
Returning None means "no HTML scrape path" — the default for
|
||||
Danbooru and e621 which categorize inline via JSON.
|
||||
"""
|
||||
return None
|
||||
|
||||
def _tag_api_url(self) -> str | None:
|
||||
"""Return the base URL for the batch tag DAPI, or None.
|
||||
|
||||
Override in Gelbooru-shaped subclasses to enable the fast
|
||||
path in CategoryFetcher.fetch_via_tag_api. The fetcher
|
||||
appends ``?page=dapi&s=tag&q=index&...`` query params.
|
||||
Returning None disables the fast path; the fetcher falls
|
||||
back to per-post HTML scrape.
|
||||
"""
|
||||
return None
|
||||
|
||||
async def test_connection(self) -> tuple[bool, str]:
|
||||
"""Test connection. Returns (success, detail_message)."""
|
||||
"""Test connection. Returns (success, detail_message).
|
||||
|
||||
Deliberately does NOT echo the response body in the error string —
|
||||
when used from `detect_site_type` (which follows redirects), echoing
|
||||
the body of an arbitrary HTTP response back into UI text becomes a
|
||||
body-leak gadget if the URL ever points anywhere unexpected.
|
||||
"""
|
||||
try:
|
||||
posts = await self.search(limit=1)
|
||||
return True, f"OK — got {len(posts)} post(s)"
|
||||
except httpx.HTTPStatusError as e:
|
||||
return False, f"HTTP {e.response.status_code}: {e.response.text[:200]}"
|
||||
reason = e.response.reason_phrase or ""
|
||||
return False, f"HTTP {e.response.status_code} {reason}".strip()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
651
booru_viewer/core/api/category_fetcher.py
Normal file
651
booru_viewer/core/api/category_fetcher.py
Normal file
@ -0,0 +1,651 @@
|
||||
"""Per-post HTML scrape + per-tag cache for boorus that don't return
|
||||
tag categories inline (Gelbooru-shape, Moebooru).
|
||||
|
||||
Optionally accelerated by a batch-tag-API fast path when the attached
|
||||
BooruClient declares a ``_tag_api_url`` AND has credentials. The fast
|
||||
path fetches up to 500 tag types per request via the booru's tag DAPI,
|
||||
avoiding per-post HTML scraping entirely on sites that support it.
|
||||
|
||||
The per-post HTML scrape path is the correctness baseline — it works on
|
||||
every Gelbooru fork and every Moebooru deployment regardless of auth or
|
||||
API quirks. The batch API is an optimization that short-circuits it
|
||||
when possible.
|
||||
|
||||
Architectural note: Moebooru's ``/tag.json?limit=0`` returns the entire
|
||||
tag database in one request. A future "download tag database" feature
|
||||
can pre-populate ``tag_types`` via that endpoint, after which
|
||||
``try_compose_from_cache`` succeeds for every post without any per-post
|
||||
HTTP. The cache-compose fast path already supports this — no
|
||||
CategoryFetcher changes needed, just a new "populate cache from dump"
|
||||
entry point.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import BooruClient, Post
|
||||
from ..db import Database
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML parser for the universal `class="tag-type-X"` convention
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Two-pass approach:
|
||||
# 1. Find each tag-type element and its full inner content.
|
||||
# 2. Within the content, extract the tag name from the `tags=NAME`
|
||||
# URL parameter in the search link.
|
||||
#
|
||||
# This handles the cross-site variation cleanly:
|
||||
# - Gelbooru proper: only has `?` wiki links (no `tags=` param) →
|
||||
# returns 0 results, which is fine because Gelbooru uses the
|
||||
# batch tag API instead of HTML scraping.
|
||||
# - Rule34 / Safebooru.org: two <a> links per tag — `?` wiki link
|
||||
# + `<a href="...tags=TAGNAME">display name</a>`. We extract from
|
||||
# the URL, not the display text.
|
||||
# - yande.re / Konachan (Moebooru): same two-link pattern, but the
|
||||
# URL is `/post?tags=TAGNAME` instead of `page=post&s=list&tags=`.
|
||||
#
|
||||
# The `tags=` extraction gives us the canonical underscore form
|
||||
# directly from the URL, no display-text normalization needed.
|
||||
_TAG_ELEMENT_RE = re.compile(
|
||||
r'class="[^"]*tag-type-([a-z]+)[^"]*"[^>]*>' # class containing tag-type-NAME
|
||||
r'(.*?)' # inner content (lazy)
|
||||
r'</(?:li|span|td|div)>', # closing tag
|
||||
re.DOTALL,
|
||||
)
|
||||
_TAG_NAME_RE = re.compile(r'tags=([^&"<>\s]+)')
|
||||
|
||||
# HTML class name -> Capitalized label (matches danbooru.py / e621.py)
|
||||
_LABEL_MAP: dict[str, str] = {
|
||||
"general": "General",
|
||||
"artist": "Artist",
|
||||
"character": "Character",
|
||||
"copyright": "Copyright",
|
||||
"metadata": "Meta",
|
||||
"meta": "Meta",
|
||||
"species": "Species",
|
||||
"circle": "Circle",
|
||||
"style": "Style",
|
||||
}
|
||||
|
||||
# Sentinel cap on the HTML body the regex walks over. A real
|
||||
# Gelbooru/Moebooru post page is ~30-150KB; capping at 2MB gives
|
||||
# any legit page comfortable headroom while preventing a hostile
|
||||
# server from feeding the regex hundreds of MB and pegging CPU.
|
||||
# Audit finding #14.
|
||||
_FETCH_POST_HTML_CAP = 2 * 1024 * 1024
|
||||
|
||||
# Gelbooru tag DAPI integer code -> Capitalized label (for fetch_via_tag_api)
|
||||
_GELBOORU_TYPE_MAP: dict[int, str] = {
|
||||
0: "General",
|
||||
1: "Artist",
|
||||
3: "Copyright",
|
||||
4: "Character",
|
||||
5: "Meta",
|
||||
# 2 = Deprecated — intentionally omitted
|
||||
}
|
||||
|
||||
# Canonical display order for category-grouped tags. Matches the
|
||||
# insertion order danbooru.py and e621.py produce for their inline
|
||||
# categorization, so the info panel renders consistently across all
|
||||
# booru types.
|
||||
_CATEGORY_ORDER = [
|
||||
"Artist", "Character", "Copyright", "Species",
|
||||
"General", "Meta", "Lore",
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CategoryFetcher
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class CategoryFetcher:
|
||||
"""Fetch and cache tag categories for boorus without inline data.
|
||||
|
||||
Three entry points share one cache:
|
||||
|
||||
* ``try_compose_from_cache`` — instant, no HTTP.
|
||||
* ``fetch_via_tag_api`` — batch fast path for Gelbooru proper.
|
||||
* ``fetch_post`` — per-post HTML scrape, universal fallback.
|
||||
|
||||
``ensure_categories`` and ``prefetch_batch`` are the public
|
||||
dispatch methods that route through these.
|
||||
"""
|
||||
|
||||
_PREFETCH_CONCURRENCY = 3 # safebooru.org soft-limits at >3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client: "BooruClient",
|
||||
db: "Database",
|
||||
site_id: int,
|
||||
) -> None:
|
||||
self._client = client
|
||||
self._db = db
|
||||
self._site_id = site_id
|
||||
self._sem = asyncio.Semaphore(self._PREFETCH_CONCURRENCY)
|
||||
self._inflight: dict[int, asyncio.Task] = {}
|
||||
|
||||
# Probe state for the batch tag API. Persisted to DB so
|
||||
# the probe runs at most ONCE per site, ever. Rule34's
|
||||
# broken batch API is detected on the first session; every
|
||||
# subsequent session skips the probe and goes straight to
|
||||
# HTML prefetch (saving ~0.6s of wasted probe time).
|
||||
#
|
||||
# None — not yet probed, OR last probe hit a transient
|
||||
# error. Next prefetch_batch retries the probe.
|
||||
# True — probe succeeded (Gelbooru proper). Permanent.
|
||||
# False — clean 200 + zero matching names (Rule34).
|
||||
# Permanent. Per-post HTML from now on.
|
||||
self._batch_api_works = self._load_probe_result()
|
||||
|
||||
# ----- probe result persistence -----
|
||||
|
||||
_PROBE_KEY = "__batch_api_probe__" # sentinel name in tag_types
|
||||
|
||||
def _load_probe_result(self) -> bool | None:
|
||||
"""Read the persisted probe result from the DB, or None."""
|
||||
row = self._db.get_tag_labels(self._site_id, [self._PROBE_KEY])
|
||||
val = row.get(self._PROBE_KEY)
|
||||
if val == "true":
|
||||
return True
|
||||
elif val == "false":
|
||||
return False
|
||||
return None
|
||||
|
||||
def _save_probe_result(self, result: bool) -> None:
|
||||
"""Persist the probe result so future sessions skip the probe."""
|
||||
self._db.set_tag_labels(self._site_id, {self._PROBE_KEY: "true" if result else "false"})
|
||||
|
||||
# ----- cache compose (instant, no HTTP) -----
|
||||
|
||||
def try_compose_from_cache(self, post: "Post") -> bool:
|
||||
"""Build ``post.tag_categories`` from cached labels.
|
||||
|
||||
ALWAYS populates ``post.tag_categories`` with whatever tags
|
||||
ARE cached, even if some are missing — so the info panel can
|
||||
render partial categories immediately while a fetch is
|
||||
in-flight.
|
||||
|
||||
Returns True only when **every** unique tag in the post has
|
||||
a cached label (100% coverage = no fetch needed). Returns
|
||||
False when any tags are missing, signaling the caller that a
|
||||
fetch should follow to fill the gaps.
|
||||
|
||||
This distinction is critical for ``ensure_categories``:
|
||||
partial compose populates the post for display, but the
|
||||
dispatcher continues to the fetch path because False was
|
||||
returned. Without the 100%-or-False rule, a single cached
|
||||
tag would make ``ensure_categories`` skip the fetch and
|
||||
leave the post at 1/N coverage forever.
|
||||
"""
|
||||
tags = post.tag_list
|
||||
if not tags:
|
||||
return True
|
||||
cached = self._db.get_tag_labels(self._site_id, tags)
|
||||
if not cached:
|
||||
return False
|
||||
cats: dict[str, list[str]] = {}
|
||||
for tag in tags:
|
||||
label = cached.get(tag)
|
||||
if label:
|
||||
cats.setdefault(label, []).append(tag)
|
||||
if cats:
|
||||
post.tag_categories = _canonical_order(cats)
|
||||
return len(cached) >= len(set(tags))
|
||||
|
||||
# ----- batch tag API fast path -----
|
||||
|
||||
def _batch_api_available(self) -> bool:
|
||||
"""True when the attached client declares a tag API endpoint
|
||||
AND has credentials configured."""
|
||||
return (
|
||||
self._client._tag_api_url() is not None
|
||||
and bool(self._client.api_key)
|
||||
and bool(self._client.api_user)
|
||||
)
|
||||
|
||||
def _build_tag_api_params(self, chunk: list[str]) -> dict:
|
||||
"""Params dict for a tag-DAPI batch request.
|
||||
|
||||
The ``lstrip("&")`` and ``startswith("api_key=")`` guards
|
||||
accommodate users who paste their credentials with a leading
|
||||
``&`` or as ``api_key=VALUE`` — either form gets normalised
|
||||
to a clean name→value mapping.
|
||||
"""
|
||||
params: dict = {
|
||||
"page": "dapi",
|
||||
"s": "tag",
|
||||
"q": "index",
|
||||
"json": "1",
|
||||
"names": " ".join(chunk),
|
||||
"limit": len(chunk),
|
||||
}
|
||||
if self._client.api_key and self._client.api_user:
|
||||
key = self._client.api_key.strip().lstrip("&")
|
||||
user = self._client.api_user.strip().lstrip("&")
|
||||
if key and not key.startswith("api_key="):
|
||||
params["api_key"] = key
|
||||
if user and not user.startswith("user_id="):
|
||||
params["user_id"] = user
|
||||
return params
|
||||
|
||||
async def fetch_via_tag_api(self, posts: list["Post"]) -> int:
|
||||
"""Batch-fetch tag types via the booru's tag DAPI.
|
||||
|
||||
Collects every unique uncached tag name across ``posts``,
|
||||
chunks into 500-name batches, GETs the tag DAPI for each
|
||||
chunk, writes the results to the cache, then runs
|
||||
``try_compose_from_cache`` on every post.
|
||||
|
||||
Returns the count of newly-cached tags.
|
||||
"""
|
||||
# Collect unique uncached tag names
|
||||
all_tags: set[str] = set()
|
||||
for p in posts:
|
||||
all_tags.update(p.tag_list)
|
||||
if not all_tags:
|
||||
return 0
|
||||
cached = self._db.get_tag_labels(self._site_id, list(all_tags))
|
||||
missing = [t for t in all_tags if t not in cached]
|
||||
if not missing:
|
||||
for p in posts:
|
||||
self.try_compose_from_cache(p)
|
||||
return 0
|
||||
|
||||
tag_api_url = self._client._tag_api_url()
|
||||
if tag_api_url is None:
|
||||
return 0
|
||||
|
||||
new_labels: dict[str, str] = {}
|
||||
BATCH = 500
|
||||
for i in range(0, len(missing), BATCH):
|
||||
chunk = missing[i:i + BATCH]
|
||||
params = self._build_tag_api_params(chunk)
|
||||
try:
|
||||
resp = await self._client._request("GET", tag_api_url, params=params)
|
||||
resp.raise_for_status()
|
||||
except Exception as e:
|
||||
log.warning("Batch tag API failed (%d names): %s: %s",
|
||||
len(chunk), type(e).__name__, e)
|
||||
continue
|
||||
for name, type_int in _parse_tag_response(resp):
|
||||
label = _GELBOORU_TYPE_MAP.get(type_int)
|
||||
if label:
|
||||
new_labels[name] = label
|
||||
|
||||
if new_labels:
|
||||
self._db.set_tag_labels(self._site_id, new_labels)
|
||||
# Compose from the now-warm cache
|
||||
for p in posts:
|
||||
self.try_compose_from_cache(p)
|
||||
return len(new_labels)
|
||||
|
||||
# ----- per-post HTML scrape (universal fallback) -----
|
||||
|
||||
async def fetch_post(self, post: "Post") -> bool:
|
||||
"""Scrape the post-view HTML page for categorized tags.
|
||||
|
||||
Works on every Gelbooru fork and every Moebooru deployment.
|
||||
Does NOT require auth. Returns True on success.
|
||||
"""
|
||||
url = self._client._post_view_url(post)
|
||||
if url is None:
|
||||
return False
|
||||
async with self._sem:
|
||||
try:
|
||||
resp = await self._client._request("GET", url)
|
||||
resp.raise_for_status()
|
||||
except Exception as e:
|
||||
log.warning("Category HTML fetch for #%d failed: %s: %s",
|
||||
post.id, type(e).__name__, e)
|
||||
return False
|
||||
# Cap the HTML the regex walks over (audit #14). Truncation
|
||||
# vs. full read: the body is already buffered by httpx, so
|
||||
# this doesn't prevent a memory hit — but it does cap the
|
||||
# CPU spent in _TAG_ELEMENT_RE.finditer for a hostile server
|
||||
# returning hundreds of MB of HTML.
|
||||
cats, labels = _parse_post_html(resp.text[:_FETCH_POST_HTML_CAP])
|
||||
if not cats:
|
||||
return False
|
||||
post.tag_categories = _canonical_order(cats)
|
||||
if labels:
|
||||
self._db.set_tag_labels(self._site_id, labels)
|
||||
return True
|
||||
|
||||
# ----- dispatch: ensure (single post) -----
|
||||
|
||||
async def ensure_categories(self, post: "Post") -> None:
|
||||
"""Guarantee ``post.tag_categories`` is fully populated.
|
||||
|
||||
Dispatch:
|
||||
1. Cache compose with 100% coverage → return.
|
||||
2. Batch tag API (if available + probe passed) → return.
|
||||
3. Per-post HTML scrape → return.
|
||||
|
||||
Does NOT short-circuit on non-empty ``post.tag_categories``
|
||||
because partial cache composes can leave the post at e.g.
|
||||
5/40 coverage. Only the 100%-coverage return from
|
||||
``try_compose_from_cache`` is trusted as "done."
|
||||
|
||||
Coalesces concurrent calls for the same ``post.id``.
|
||||
"""
|
||||
if self.try_compose_from_cache(post):
|
||||
return
|
||||
|
||||
# Coalesce: if there's an in-flight fetch for this post, await it
|
||||
existing = self._inflight.get(post.id)
|
||||
if existing is not None and not existing.done():
|
||||
await existing
|
||||
return
|
||||
|
||||
task = asyncio.create_task(self._do_ensure(post))
|
||||
self._inflight[post.id] = task
|
||||
try:
|
||||
await task
|
||||
finally:
|
||||
self._inflight.pop(post.id, None)
|
||||
|
||||
async def _do_ensure(self, post: "Post") -> None:
|
||||
"""Inner dispatch for ensure_categories.
|
||||
|
||||
Dispatch:
|
||||
- ``_batch_api_works is True``: call ``fetch_via_tag_api``
|
||||
directly. If it populates categories we're done; a
|
||||
transient failure leaves them empty and we fall through
|
||||
to the HTML scrape.
|
||||
- ``_batch_api_works is None``: route through
|
||||
``_probe_batch_api``, which only flips the flag to
|
||||
True/False on a clean HTTP response. Transient errors
|
||||
leave it ``None`` so the next call retries the probe.
|
||||
Previously this path called ``fetch_via_tag_api`` and
|
||||
inferred the result from empty ``tag_categories`` — but
|
||||
``fetch_via_tag_api`` swallows per-chunk failures with
|
||||
``continue``, so a mid-call network drop poisoned
|
||||
``_batch_api_works = False`` for the site permanently.
|
||||
- ``_batch_api_works is False`` or unavailable: straight
|
||||
to HTML scrape.
|
||||
"""
|
||||
if self._batch_api_works is True and self._batch_api_available():
|
||||
try:
|
||||
await self.fetch_via_tag_api([post])
|
||||
except Exception as e:
|
||||
log.debug("Batch API ensure failed (transient): %s", e)
|
||||
if post.tag_categories:
|
||||
return
|
||||
elif self._batch_api_works is None and self._batch_api_available():
|
||||
try:
|
||||
result = await self._probe_batch_api([post])
|
||||
except Exception as e:
|
||||
log.info("Batch API probe error (will retry next call): %s: %s",
|
||||
type(e).__name__, e)
|
||||
result = None
|
||||
if result is True:
|
||||
# Probe succeeded — results cached and post composed.
|
||||
return
|
||||
# result is False (broken API) or None (transient) — fall through
|
||||
# HTML scrape fallback (works on Rule34/Safebooru.org/Moebooru,
|
||||
# returns empty on Gelbooru proper which is fine because the
|
||||
# batch path above covers Gelbooru)
|
||||
await self.fetch_post(post)
|
||||
|
||||
# ----- dispatch: prefetch (batch, fire-and-forget) -----
|
||||
|
||||
async def prefetch_batch(self, posts: list["Post"]) -> None:
|
||||
"""Background prefetch for a page of search results.
|
||||
|
||||
ONE fetch path per invocation — no mixing batch API + HTML
|
||||
scrape in the same call.
|
||||
|
||||
Dispatch (exactly one branch executes per call):
|
||||
|
||||
a. ``_batch_api_works is True``
|
||||
→ ``fetch_via_tag_api`` for all uncached posts.
|
||||
|
||||
b. ``_batch_api_works is None`` AND capability check passes
|
||||
→ ``fetch_via_tag_api`` as the probe.
|
||||
- HTTP 200 + >=1 requested name matched
|
||||
→ ``_batch_api_works = True``. Done.
|
||||
- HTTP 200 + 0 requested names matched
|
||||
→ ``_batch_api_works = False``. Stop.
|
||||
Do NOT fall through to HTML in this call.
|
||||
- HTTP error / timeout / parse exception
|
||||
→ ``_batch_api_works`` stays None. Stop.
|
||||
Next call retries the probe.
|
||||
|
||||
c. ``_batch_api_works is False``, OR no ``_tag_api_url``,
|
||||
OR no auth
|
||||
→ per-post ``ensure_categories`` for each uncached post,
|
||||
bounded by ``Semaphore(_PREFETCH_CONCURRENCY)``.
|
||||
"""
|
||||
# Step 1: cache-compose everything we can
|
||||
uncached: list["Post"] = []
|
||||
for p in posts:
|
||||
if p.tag_categories:
|
||||
continue
|
||||
if not self.try_compose_from_cache(p):
|
||||
uncached.append(p)
|
||||
if not uncached:
|
||||
return
|
||||
|
||||
# Step 2: route decision
|
||||
if self._batch_api_works is True and self._batch_api_available():
|
||||
# Branch (a): batch API known to work
|
||||
try:
|
||||
await self.fetch_via_tag_api(uncached)
|
||||
except Exception as e:
|
||||
log.warning("Batch prefetch failed: %s: %s", type(e).__name__, e)
|
||||
return
|
||||
|
||||
if self._batch_api_works is None and self._batch_api_available():
|
||||
# Branch (b): probe
|
||||
try:
|
||||
result = await self._probe_batch_api(uncached)
|
||||
except Exception as e:
|
||||
# Transient error → leave _batch_api_works = None, stop
|
||||
log.info("Batch API probe error (will retry next search): %s: %s",
|
||||
type(e).__name__, e)
|
||||
return
|
||||
if result is True:
|
||||
# Probe succeeded — results already cached, posts composed
|
||||
return
|
||||
elif result is False:
|
||||
# Probe failed cleanly — stop, don't fall through to HTML
|
||||
return
|
||||
else:
|
||||
# result is None — transient, stop, retry next call
|
||||
return
|
||||
|
||||
# Branch (c): per-post HTML scrape
|
||||
tasks = []
|
||||
for p in uncached:
|
||||
if not p.tag_categories:
|
||||
tasks.append(asyncio.create_task(self.ensure_categories(p)))
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
async def _probe_batch_api(self, posts: list["Post"]) -> bool | None:
|
||||
"""Probe whether the batch tag API works on this site.
|
||||
|
||||
Returns:
|
||||
True — probe succeeded, _batch_api_works set to True,
|
||||
results already cached.
|
||||
False — clean HTTP 200 with 0 matching names,
|
||||
_batch_api_works set to False.
|
||||
None — transient error, _batch_api_works stays None.
|
||||
"""
|
||||
# Collect a sample of uncached tag names for the probe
|
||||
all_tags: set[str] = set()
|
||||
for p in posts:
|
||||
all_tags.update(p.tag_list)
|
||||
cached = self._db.get_tag_labels(self._site_id, list(all_tags))
|
||||
missing = [t for t in all_tags if t not in cached]
|
||||
if not missing:
|
||||
# Everything's cached — can't probe, skip
|
||||
if self._batch_api_works is None:
|
||||
self._batch_api_works = True
|
||||
self._save_probe_result(True)
|
||||
for p in posts:
|
||||
self.try_compose_from_cache(p)
|
||||
return True
|
||||
|
||||
tag_api_url = self._client._tag_api_url()
|
||||
if tag_api_url is None:
|
||||
return None
|
||||
|
||||
# Send one batch request
|
||||
chunk = missing[:500]
|
||||
params = self._build_tag_api_params(chunk)
|
||||
|
||||
try:
|
||||
resp = await self._client._request("GET", tag_api_url, params=params)
|
||||
except Exception:
|
||||
# Network/timeout error → transient, leave None
|
||||
return None
|
||||
|
||||
if resp.status_code != 200:
|
||||
# Non-200 → transient, leave None
|
||||
return None
|
||||
|
||||
try:
|
||||
entries = list(_parse_tag_response(resp))
|
||||
except Exception:
|
||||
# Parse error → transient, leave None
|
||||
return None
|
||||
|
||||
# Check if ANY of the returned names match what we asked for
|
||||
asked = set(chunk)
|
||||
matched: dict[str, str] = {}
|
||||
for name, type_int in entries:
|
||||
label = _GELBOORU_TYPE_MAP.get(type_int)
|
||||
if label:
|
||||
matched[name] = label
|
||||
|
||||
got_any = any(n in asked for n in matched)
|
||||
|
||||
if got_any:
|
||||
self._batch_api_works = True
|
||||
self._save_probe_result(True)
|
||||
if matched:
|
||||
self._db.set_tag_labels(self._site_id, matched)
|
||||
# Fetch any remaining missing tags via the batch path
|
||||
await self.fetch_via_tag_api(posts)
|
||||
return True
|
||||
else:
|
||||
# Clean 200 but zero matching names → structurally broken
|
||||
self._batch_api_works = False
|
||||
self._save_probe_result(False)
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Parsers (module-level, stateless)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _parse_post_html(html: str) -> tuple[dict[str, list[str]], dict[str, str]]:
|
||||
"""Extract tag categories from a Gelbooru-shape / Moebooru post-view page.
|
||||
|
||||
Returns ``(categories_dict, labels_dict)`` where:
|
||||
- ``categories_dict`` is ``{label: [tag_names]}`` ready for
|
||||
``post.tag_categories``.
|
||||
- ``labels_dict`` is ``{tag_name: label}`` ready for
|
||||
``db.set_tag_labels``.
|
||||
|
||||
Uses a two-pass approach: find each ``tag-type-X`` element, then
|
||||
extract the tag name from the ``tags=NAME`` URL parameter inside
|
||||
the element's links. This avoids the `?` wiki-link ambiguity
|
||||
(Gelbooru-forks have a ``?`` link before the actual tag link).
|
||||
Returns empty on Gelbooru proper (whose post page only has ``?``
|
||||
links with no ``tags=`` parameter); that's fine because Gelbooru
|
||||
uses the batch tag API instead.
|
||||
"""
|
||||
from urllib.parse import unquote
|
||||
|
||||
cats: dict[str, list[str]] = {}
|
||||
labels: dict[str, str] = {}
|
||||
for m in _TAG_ELEMENT_RE.finditer(html):
|
||||
type_class = m.group(1).lower()
|
||||
content = m.group(2)
|
||||
label = _LABEL_MAP.get(type_class)
|
||||
if not label:
|
||||
continue
|
||||
tag_match = _TAG_NAME_RE.search(content)
|
||||
if not tag_match:
|
||||
continue
|
||||
tag_name = unquote(tag_match.group(1)).strip().lower()
|
||||
if not tag_name:
|
||||
continue
|
||||
cats.setdefault(label, []).append(tag_name)
|
||||
labels[tag_name] = label
|
||||
return cats, labels
|
||||
|
||||
|
||||
def _parse_tag_response(resp) -> list[tuple[str, int]]:
|
||||
"""Parse a Gelbooru-shaped tag DAPI response, JSON or XML.
|
||||
|
||||
Gelbooru proper honors ``json=1`` and returns JSON. Rule34 and
|
||||
Safebooru.org return XML even with ``json=1``. We sniff the
|
||||
body's first non-whitespace char to choose a parser.
|
||||
|
||||
Returns ``[(name, type_int), ...]``.
|
||||
"""
|
||||
body = resp.text.lstrip()
|
||||
if not body:
|
||||
return []
|
||||
out: list[tuple[str, int]] = []
|
||||
if body.startswith("<"):
|
||||
if "<!DOCTYPE" in body or "<!ENTITY" in body:
|
||||
log.warning("XML response contains DOCTYPE/ENTITY, skipping")
|
||||
return []
|
||||
try:
|
||||
root = ET.fromstring(body)
|
||||
except ET.ParseError as e:
|
||||
log.warning("Tag XML parse failed: %s", e)
|
||||
return []
|
||||
for tag in root.iter("tag"):
|
||||
name = tag.get("name")
|
||||
type_val = tag.get("type")
|
||||
if name and type_val is not None:
|
||||
try:
|
||||
out.append((name, int(type_val)))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception as e:
|
||||
log.warning("Tag JSON parse failed: %s", e)
|
||||
return []
|
||||
if isinstance(data, dict):
|
||||
data = data.get("tag", [])
|
||||
if not isinstance(data, list):
|
||||
return []
|
||||
for entry in data:
|
||||
name = entry.get("name")
|
||||
type_val = entry.get("type")
|
||||
if name and type_val is not None:
|
||||
try:
|
||||
out.append((name, int(type_val)))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
return out
|
||||
|
||||
|
||||
def _canonical_order(cats: dict[str, list[str]]) -> dict[str, list[str]]:
|
||||
"""Reorder to Artist > Character > Copyright > ... > Meta."""
|
||||
ordered: dict[str, list[str]] = {}
|
||||
for label in _CATEGORY_ORDER:
|
||||
if label in cats:
|
||||
ordered[label] = cats[label]
|
||||
for label in cats:
|
||||
if label not in ordered:
|
||||
ordered[label] = cats[label]
|
||||
return ordered
|
||||
@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
|
||||
from ..config import DEFAULT_PAGE_SIZE
|
||||
from ._safety import redact_params
|
||||
from .base import BooruClient, Post, _parse_date
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
@ -23,7 +24,7 @@ class DanbooruClient(BooruClient):
|
||||
|
||||
url = f"{self.base_url}/posts.json"
|
||||
log.info(f"GET {url}")
|
||||
log.debug(f" params: {params}")
|
||||
log.debug(f" params: {redact_params(params)}")
|
||||
resp = await self._request("GET", url, params=params)
|
||||
log.info(f" -> {resp.status_code}")
|
||||
if resp.status_code != 200:
|
||||
@ -31,7 +32,9 @@ class DanbooruClient(BooruClient):
|
||||
resp.raise_for_status()
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
log.warning("Danbooru search JSON parse failed: %s: %s — body: %s",
|
||||
type(e).__name__, e, resp.text[:200])
|
||||
return []
|
||||
|
||||
# Some Danbooru forks wrap in {"posts": [...]}
|
||||
@ -87,6 +90,7 @@ class DanbooruClient(BooruClient):
|
||||
width=item.get("image_width", 0),
|
||||
height=item.get("image_height", 0),
|
||||
created_at=_parse_date(item.get("created_at")),
|
||||
tag_categories=self._extract_tag_categories(item),
|
||||
)
|
||||
|
||||
async def autocomplete(self, query: str, limit: int = 10) -> list[str]:
|
||||
@ -97,7 +101,9 @@ class DanbooruClient(BooruClient):
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return [item.get("value", item.get("label", "")) for item in resp.json()]
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
log.warning("Danbooru autocomplete failed for %r: %s: %s",
|
||||
query, type(e).__name__, e)
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
|
||||
@ -2,15 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
from ..config import USER_AGENT
|
||||
from ..http import make_client
|
||||
from .danbooru import DanbooruClient
|
||||
from .gelbooru import GelbooruClient
|
||||
from .moebooru import MoebooruClient
|
||||
from .e621 import E621Client
|
||||
from .base import BooruClient
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
async def detect_site_type(
|
||||
url: str,
|
||||
@ -24,84 +26,83 @@ async def detect_site_type(
|
||||
url = url.rstrip("/")
|
||||
|
||||
from .base import BooruClient as _BC
|
||||
# Reuse shared client for site detection
|
||||
# Reuse shared client for site detection. Event hooks mirror
|
||||
# BooruClient.client so detection requests get the same SSRF
|
||||
# validation and connection logging as regular API calls.
|
||||
if _BC._shared_client is None or _BC._shared_client.is_closed:
|
||||
_BC._shared_client = httpx.AsyncClient(
|
||||
headers={"User-Agent": USER_AGENT},
|
||||
follow_redirects=True,
|
||||
timeout=20.0,
|
||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||
)
|
||||
_BC._shared_client = make_client(extra_request_hooks=[_BC._log_request])
|
||||
client = _BC._shared_client
|
||||
if True: # keep indent level
|
||||
# Try Danbooru / e621 first — /posts.json is a definitive endpoint
|
||||
try:
|
||||
params: dict = {"limit": 1}
|
||||
if api_key and api_user:
|
||||
params["login"] = api_user
|
||||
params["api_key"] = api_key
|
||||
resp = await client.get(f"{url}/posts.json", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, dict) and "posts" in data:
|
||||
# e621/e926 wraps in {"posts": [...]}, with nested file/tags dicts
|
||||
posts = data["posts"]
|
||||
if isinstance(posts, list) and posts:
|
||||
p = posts[0]
|
||||
if isinstance(p.get("file"), dict) and isinstance(p.get("tags"), dict):
|
||||
return "e621"
|
||||
return "danbooru"
|
||||
elif isinstance(data, list) and data:
|
||||
# Danbooru returns a flat list of post objects
|
||||
if isinstance(data[0], dict) and any(
|
||||
k in data[0] for k in ("tag_string", "image_width", "large_file_url")
|
||||
):
|
||||
return "danbooru"
|
||||
elif resp.status_code in (401, 403):
|
||||
if "e621" in url or "e926" in url:
|
||||
return "e621"
|
||||
# Try Danbooru / e621 first — /posts.json is a definitive endpoint
|
||||
try:
|
||||
params: dict = {"limit": 1}
|
||||
if api_key and api_user:
|
||||
params["login"] = api_user
|
||||
params["api_key"] = api_key
|
||||
resp = await client.get(f"{url}/posts.json", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, dict) and "posts" in data:
|
||||
# e621/e926 wraps in {"posts": [...]}, with nested file/tags dicts
|
||||
posts = data["posts"]
|
||||
if isinstance(posts, list) and posts:
|
||||
p = posts[0]
|
||||
if isinstance(p.get("file"), dict) and isinstance(p.get("tags"), dict):
|
||||
return "e621"
|
||||
return "danbooru"
|
||||
except Exception:
|
||||
pass
|
||||
elif isinstance(data, list) and data:
|
||||
# Danbooru returns a flat list of post objects
|
||||
if isinstance(data[0], dict) and any(
|
||||
k in data[0] for k in ("tag_string", "image_width", "large_file_url")
|
||||
):
|
||||
return "danbooru"
|
||||
elif resp.status_code in (401, 403):
|
||||
if "e621" in url or "e926" in url:
|
||||
return "e621"
|
||||
return "danbooru"
|
||||
except Exception as e:
|
||||
log.warning("Danbooru/e621 probe failed for %s: %s: %s",
|
||||
url, type(e).__name__, e)
|
||||
|
||||
# Try Gelbooru — /index.php?page=dapi
|
||||
try:
|
||||
params = {
|
||||
"page": "dapi", "s": "post", "q": "index", "json": "1", "limit": 1,
|
||||
}
|
||||
if api_key and api_user:
|
||||
params["api_key"] = api_key
|
||||
params["user_id"] = api_user
|
||||
resp = await client.get(f"{url}/index.php", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, list) and data and isinstance(data[0], dict):
|
||||
if any(k in data[0] for k in ("file_url", "preview_url", "directory")):
|
||||
return "gelbooru"
|
||||
elif isinstance(data, dict):
|
||||
if "post" in data or "@attributes" in data:
|
||||
return "gelbooru"
|
||||
elif resp.status_code in (401, 403):
|
||||
if "gelbooru" in url or "safebooru.org" in url or "rule34" in url:
|
||||
# Try Gelbooru — /index.php?page=dapi
|
||||
try:
|
||||
params = {
|
||||
"page": "dapi", "s": "post", "q": "index", "json": "1", "limit": 1,
|
||||
}
|
||||
if api_key and api_user:
|
||||
params["api_key"] = api_key
|
||||
params["user_id"] = api_user
|
||||
resp = await client.get(f"{url}/index.php", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, list) and data and isinstance(data[0], dict):
|
||||
if any(k in data[0] for k in ("file_url", "preview_url", "directory")):
|
||||
return "gelbooru"
|
||||
except Exception:
|
||||
pass
|
||||
elif isinstance(data, dict):
|
||||
if "post" in data or "@attributes" in data:
|
||||
return "gelbooru"
|
||||
elif resp.status_code in (401, 403):
|
||||
if "gelbooru" in url or "safebooru.org" in url or "rule34" in url:
|
||||
return "gelbooru"
|
||||
except Exception as e:
|
||||
log.warning("Gelbooru probe failed for %s: %s: %s",
|
||||
url, type(e).__name__, e)
|
||||
|
||||
# Try Moebooru — /post.json (singular)
|
||||
try:
|
||||
params = {"limit": 1}
|
||||
if api_key and api_user:
|
||||
params["login"] = api_user
|
||||
params["password_hash"] = api_key
|
||||
resp = await client.get(f"{url}/post.json", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, list) or (isinstance(data, dict) and "posts" in data):
|
||||
return "moebooru"
|
||||
elif resp.status_code in (401, 403):
|
||||
# Try Moebooru — /post.json (singular)
|
||||
try:
|
||||
params = {"limit": 1}
|
||||
if api_key and api_user:
|
||||
params["login"] = api_user
|
||||
params["password_hash"] = api_key
|
||||
resp = await client.get(f"{url}/post.json", params=params)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if isinstance(data, list) or (isinstance(data, dict) and "posts" in data):
|
||||
return "moebooru"
|
||||
except Exception:
|
||||
pass
|
||||
elif resp.status_code in (401, 403):
|
||||
return "moebooru"
|
||||
except Exception as e:
|
||||
log.warning("Moebooru probe failed for %s: %s: %s",
|
||||
url, type(e).__name__, e)
|
||||
|
||||
return None
|
||||
|
||||
@ -111,8 +112,22 @@ def client_for_type(
|
||||
base_url: str,
|
||||
api_key: str | None = None,
|
||||
api_user: str | None = None,
|
||||
db=None,
|
||||
site_id: int | None = None,
|
||||
) -> BooruClient:
|
||||
"""Return the appropriate client class for an API type string."""
|
||||
"""Return the appropriate client class for an API type string.
|
||||
|
||||
When ``db`` and ``site_id`` are passed, clients that need
|
||||
post-hoc tag categorization (Gelbooru-shape, Moebooru) get a
|
||||
``CategoryFetcher`` attached. The fetcher handles the per-tag
|
||||
cache, the batch tag API fast path (for Gelbooru proper), and
|
||||
the per-post HTML scrape fallback. Danbooru and e621 categorize
|
||||
inline and don't get a fetcher.
|
||||
|
||||
Leave ``db``/``site_id`` as None for clients outside the main
|
||||
app (Test Connection dialog, scripts) — category population
|
||||
becomes a no-op.
|
||||
"""
|
||||
clients = {
|
||||
"danbooru": DanbooruClient,
|
||||
"gelbooru": GelbooruClient,
|
||||
@ -122,4 +137,8 @@ def client_for_type(
|
||||
cls = clients.get(api_type)
|
||||
if cls is None:
|
||||
raise ValueError(f"Unknown API type: {api_type}")
|
||||
return cls(base_url, api_key=api_key, api_user=api_user)
|
||||
client = cls(base_url, api_key=api_key, api_user=api_user)
|
||||
if db is not None and site_id is not None and api_type in ("gelbooru", "moebooru"):
|
||||
from .category_fetcher import CategoryFetcher
|
||||
client.category_fetcher = CategoryFetcher(client, db, site_id)
|
||||
return client
|
||||
|
||||
@ -3,10 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import httpx
|
||||
|
||||
from ..config import DEFAULT_PAGE_SIZE, USER_AGENT
|
||||
from ._safety import redact_params, validate_public_request
|
||||
from .base import BooruClient, Post, _parse_date
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
@ -15,23 +17,62 @@ log = logging.getLogger("booru")
|
||||
class E621Client(BooruClient):
|
||||
api_type = "e621"
|
||||
|
||||
# Same shared-singleton pattern as BooruClient, but e621 needs a custom
|
||||
# User-Agent (their TOS requires identifying the app + user). When the
|
||||
# UA changes (api_user edit) we need to rebuild — and we explicitly
|
||||
# close the old client to avoid leaking its connection pool.
|
||||
_e621_client: httpx.AsyncClient | None = None
|
||||
_e621_ua: str = ""
|
||||
_e621_lock: threading.Lock = threading.Lock()
|
||||
# Old clients pending aclose. We can't await from a sync property, so
|
||||
# we stash them here and the app's shutdown coroutine drains them.
|
||||
_e621_to_close: list[httpx.AsyncClient] = []
|
||||
|
||||
@property
|
||||
def client(self) -> httpx.AsyncClient:
|
||||
ua = USER_AGENT
|
||||
if self.api_user:
|
||||
ua = f"{USER_AGENT} (by {self.api_user} on e621)"
|
||||
if E621Client._e621_client is None or E621Client._e621_client.is_closed or E621Client._e621_ua != ua:
|
||||
E621Client._e621_ua = ua
|
||||
E621Client._e621_client = httpx.AsyncClient(
|
||||
headers={"User-Agent": ua},
|
||||
follow_redirects=True,
|
||||
timeout=20.0,
|
||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||
)
|
||||
return E621Client._e621_client
|
||||
# Fast path
|
||||
c = E621Client._e621_client
|
||||
if c is not None and not c.is_closed and E621Client._e621_ua == ua:
|
||||
return c
|
||||
with E621Client._e621_lock:
|
||||
c = E621Client._e621_client
|
||||
if c is None or c.is_closed or E621Client._e621_ua != ua:
|
||||
# Stash old client for shutdown cleanup if it's still open.
|
||||
if c is not None and not c.is_closed:
|
||||
E621Client._e621_to_close.append(c)
|
||||
E621Client._e621_ua = ua
|
||||
c = httpx.AsyncClient(
|
||||
headers={"User-Agent": ua},
|
||||
follow_redirects=True,
|
||||
timeout=20.0,
|
||||
event_hooks={
|
||||
"request": [
|
||||
validate_public_request,
|
||||
BooruClient._log_request,
|
||||
],
|
||||
},
|
||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||
)
|
||||
E621Client._e621_client = c
|
||||
return c
|
||||
|
||||
@classmethod
|
||||
async def aclose_shared(cls) -> None:
|
||||
"""Cleanly aclose the active client and any UA-change leftovers."""
|
||||
with cls._e621_lock:
|
||||
current = cls._e621_client
|
||||
cls._e621_client = None
|
||||
pending = cls._e621_to_close
|
||||
cls._e621_to_close = []
|
||||
for c in [current, *pending]:
|
||||
if c is not None and not c.is_closed:
|
||||
try:
|
||||
await c.aclose()
|
||||
except Exception as e:
|
||||
log.warning("E621Client aclose failed: %s", e)
|
||||
|
||||
async def search(
|
||||
self, tags: str = "", page: int = 1, limit: int = DEFAULT_PAGE_SIZE
|
||||
@ -43,7 +84,7 @@ class E621Client(BooruClient):
|
||||
|
||||
url = f"{self.base_url}/posts.json"
|
||||
log.info(f"GET {url}")
|
||||
log.debug(f" params: {params}")
|
||||
log.debug(f" params: {redact_params(params)}")
|
||||
resp = await self._request("GET", url, params=params)
|
||||
log.info(f" -> {resp.status_code}")
|
||||
if resp.status_code != 200:
|
||||
@ -51,7 +92,9 @@ class E621Client(BooruClient):
|
||||
resp.raise_for_status()
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
except ValueError as e:
|
||||
log.warning("e621 search JSON parse failed: %s: %s — body: %s",
|
||||
type(e).__name__, e, resp.text[:200])
|
||||
return []
|
||||
|
||||
# e621 wraps posts in {"posts": [...]}
|
||||
@ -109,6 +152,7 @@ class E621Client(BooruClient):
|
||||
width=self._get_nested(item, "file", "width") or 0,
|
||||
height=self._get_nested(item, "file", "height") or 0,
|
||||
created_at=_parse_date(item.get("created_at")),
|
||||
tag_categories=self._extract_tag_categories(item),
|
||||
)
|
||||
|
||||
async def autocomplete(self, query: str, limit: int = 10) -> list[str]:
|
||||
@ -123,7 +167,9 @@ class E621Client(BooruClient):
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return [item.get("name", "") for item in resp.json() if item.get("name")]
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
log.warning("e621 autocomplete failed for %r: %s: %s",
|
||||
query, type(e).__name__, e)
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
|
||||
@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import logging
|
||||
|
||||
from ..config import DEFAULT_PAGE_SIZE
|
||||
from ._safety import redact_params
|
||||
from .base import BooruClient, Post, _parse_date
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
@ -13,6 +14,12 @@ log = logging.getLogger("booru")
|
||||
class GelbooruClient(BooruClient):
|
||||
api_type = "gelbooru"
|
||||
|
||||
def _post_view_url(self, post: Post) -> str:
|
||||
return f"{self.base_url}/index.php?page=post&s=view&id={post.id}"
|
||||
|
||||
def _tag_api_url(self) -> str:
|
||||
return f"{self.base_url}/index.php"
|
||||
|
||||
async def search(
|
||||
self, tags: str = "", page: int = 1, limit: int = DEFAULT_PAGE_SIZE
|
||||
) -> list[Post]:
|
||||
@ -37,7 +44,7 @@ class GelbooruClient(BooruClient):
|
||||
|
||||
url = f"{self.base_url}/index.php"
|
||||
log.info(f"GET {url}")
|
||||
log.debug(f" params: {params}")
|
||||
log.debug(f" params: {redact_params(params)}")
|
||||
resp = await self._request("GET", url, params=params)
|
||||
log.info(f" -> {resp.status_code}")
|
||||
if resp.status_code != 200:
|
||||
@ -75,6 +82,18 @@ class GelbooruClient(BooruClient):
|
||||
created_at=_parse_date(item.get("created_at")),
|
||||
)
|
||||
)
|
||||
# Background prefetch ONLY when the batch tag API is known to
|
||||
# work (persisted probe result = True, i.e. Gelbooru proper
|
||||
# with auth). One request covers all tags for the page, so the
|
||||
# cache is warm before the user clicks. Rule34/Safebooru.org
|
||||
# skip this (batch_api_works is False or None) — their only
|
||||
# path is per-post HTML which runs on click.
|
||||
if (
|
||||
self.category_fetcher is not None
|
||||
and self.category_fetcher._batch_api_works is True
|
||||
):
|
||||
import asyncio
|
||||
asyncio.create_task(self.category_fetcher.prefetch_batch(posts))
|
||||
return posts
|
||||
|
||||
@staticmethod
|
||||
@ -107,7 +126,7 @@ class GelbooruClient(BooruClient):
|
||||
file_url = item.get("file_url", "")
|
||||
if not file_url:
|
||||
return None
|
||||
return Post(
|
||||
post = Post(
|
||||
id=item["id"],
|
||||
file_url=file_url,
|
||||
preview_url=item.get("preview_url"),
|
||||
@ -119,6 +138,9 @@ class GelbooruClient(BooruClient):
|
||||
height=item.get("height", 0),
|
||||
created_at=_parse_date(item.get("created_at")),
|
||||
)
|
||||
if self.category_fetcher is not None:
|
||||
await self.category_fetcher.prefetch_batch([post])
|
||||
return post
|
||||
|
||||
async def autocomplete(self, query: str, limit: int = 10) -> list[str]:
|
||||
try:
|
||||
@ -139,5 +161,7 @@ class GelbooruClient(BooruClient):
|
||||
if isinstance(data, dict):
|
||||
data = data.get("tag", [])
|
||||
return [t.get("name", "") for t in data if t.get("name")]
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
log.warning("Gelbooru autocomplete failed for %r: %s: %s",
|
||||
query, type(e).__name__, e)
|
||||
return []
|
||||
|
||||
@ -13,6 +13,9 @@ log = logging.getLogger("booru")
|
||||
class MoebooruClient(BooruClient):
|
||||
api_type = "moebooru"
|
||||
|
||||
def _post_view_url(self, post: Post) -> str:
|
||||
return f"{self.base_url}/post/show/{post.id}"
|
||||
|
||||
async def search(
|
||||
self, tags: str = "", page: int = 1, limit: int = DEFAULT_PAGE_SIZE
|
||||
) -> list[Post]:
|
||||
@ -25,7 +28,9 @@ class MoebooruClient(BooruClient):
|
||||
resp.raise_for_status()
|
||||
try:
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
except ValueError as e:
|
||||
log.warning("Moebooru search JSON parse failed: %s: %s — body: %s",
|
||||
type(e).__name__, e, resp.text[:200])
|
||||
return []
|
||||
if isinstance(data, dict):
|
||||
data = data.get("posts", data.get("post", []))
|
||||
@ -72,7 +77,7 @@ class MoebooruClient(BooruClient):
|
||||
file_url = item.get("file_url") or item.get("jpeg_url") or ""
|
||||
if not file_url:
|
||||
return None
|
||||
return Post(
|
||||
post = Post(
|
||||
id=item["id"],
|
||||
file_url=file_url,
|
||||
preview_url=item.get("preview_url") or item.get("actual_preview_url"),
|
||||
@ -84,6 +89,9 @@ class MoebooruClient(BooruClient):
|
||||
height=item.get("height", 0),
|
||||
created_at=_parse_date(item.get("created_at")),
|
||||
)
|
||||
if self.category_fetcher is not None:
|
||||
await self.category_fetcher.prefetch_batch([post])
|
||||
return post
|
||||
|
||||
async def autocomplete(self, query: str, limit: int = 10) -> list[str]:
|
||||
try:
|
||||
@ -93,5 +101,7 @@ class MoebooruClient(BooruClient):
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return [t["name"] for t in resp.json() if "name" in t]
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
log.warning("Moebooru autocomplete failed for %r: %s: %s",
|
||||
query, type(e).__name__, e)
|
||||
return []
|
||||
|
||||
@ -4,6 +4,10 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import threading
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
@ -13,7 +17,30 @@ from urllib.parse import urlparse
|
||||
import httpx
|
||||
from PIL import Image
|
||||
|
||||
from .config import cache_dir, thumbnails_dir, USER_AGENT
|
||||
from .config import cache_dir, thumbnails_dir
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
# Hard cap on a single download. Anything advertising larger via
|
||||
# Content-Length is rejected before allocating; the running-total guard
|
||||
# in the chunk loop catches lying servers. Generous enough for typical
|
||||
# booru uploads (long doujinshi/HD video) without leaving the door open
|
||||
# to multi-GB OOM/disk-fill from a hostile or misconfigured site.
|
||||
MAX_DOWNLOAD_BYTES = 500 * 1024 * 1024 # 500 MB
|
||||
|
||||
# Threshold above which we stream to a tempfile + atomic os.replace
|
||||
# instead of buffering. Below this, the existing path is fine and the
|
||||
# regression risk of the streaming rewrite is zero.
|
||||
STREAM_TO_DISK_THRESHOLD = 50 * 1024 * 1024 # 50 MB
|
||||
|
||||
# PIL's MAX_IMAGE_PIXELS cap is set in core/__init__.py so any
|
||||
# `booru_viewer.core.*` import installs it first — see audit #8.
|
||||
|
||||
# Defends `_convert_ugoira_to_gif` against zip bombs. A real ugoira is
|
||||
# typically <500 frames at 1080p; these caps comfortably allow legit
|
||||
# content while refusing million-frame archives.
|
||||
UGOIRA_MAX_FRAMES = 5000
|
||||
UGOIRA_MAX_UNCOMPRESSED_BYTES = 500 * 1024 * 1024 # 500 MB
|
||||
|
||||
# Track all outgoing connections: {host: [timestamp, ...]}
|
||||
_connection_log: OrderedDict[str, list[str]] = OrderedDict()
|
||||
@ -36,23 +63,44 @@ def _url_hash(url: str) -> str:
|
||||
return hashlib.sha256(url.encode()).hexdigest()[:16]
|
||||
|
||||
|
||||
# Shared httpx client for connection pooling (avoids per-request TLS handshakes)
|
||||
# Shared httpx client for connection pooling (avoids per-request TLS handshakes).
|
||||
# Lazily created on first download. Lock guards the check-and-set so concurrent
|
||||
# first-callers can't both build a client and leak one. Loop affinity is
|
||||
# guaranteed by routing all downloads through `core.concurrency.run_on_app_loop`
|
||||
# (see PR2).
|
||||
_shared_client: httpx.AsyncClient | None = None
|
||||
_shared_client_lock = threading.Lock()
|
||||
|
||||
|
||||
def _get_shared_client(referer: str = "") -> httpx.AsyncClient:
|
||||
global _shared_client
|
||||
if _shared_client is None or _shared_client.is_closed:
|
||||
_shared_client = httpx.AsyncClient(
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
"Accept": "image/*,video/*,*/*",
|
||||
},
|
||||
follow_redirects=True,
|
||||
timeout=60.0,
|
||||
limits=httpx.Limits(max_connections=10, max_keepalive_connections=5),
|
||||
)
|
||||
return _shared_client
|
||||
c = _shared_client
|
||||
if c is not None and not c.is_closed:
|
||||
return c
|
||||
# Lazy import: core.http imports from core.api._safety, which
|
||||
# lives inside the api package that imports this module, so a
|
||||
# top-level import would circular through cache.py's load.
|
||||
from .http import make_client
|
||||
with _shared_client_lock:
|
||||
c = _shared_client
|
||||
if c is None or c.is_closed:
|
||||
c = make_client(timeout=60.0, accept="image/*,video/*,*/*")
|
||||
_shared_client = c
|
||||
return c
|
||||
|
||||
|
||||
async def aclose_shared_client() -> None:
|
||||
"""Cleanly aclose the cache module's shared download client. Safe to call
|
||||
once at app shutdown; no-op if not initialized."""
|
||||
global _shared_client
|
||||
with _shared_client_lock:
|
||||
c = _shared_client
|
||||
_shared_client = None
|
||||
if c is not None and not c.is_closed:
|
||||
try:
|
||||
await c.aclose()
|
||||
except Exception as e:
|
||||
log.warning("cache shared client aclose failed: %s", e)
|
||||
|
||||
|
||||
_IMAGE_MAGIC = {
|
||||
@ -65,22 +113,49 @@ _IMAGE_MAGIC = {
|
||||
b'PK\x03\x04': True, # ZIP (ugoira)
|
||||
}
|
||||
|
||||
# Header size used by both _looks_like_media (in-memory bytes) and the
|
||||
# in-stream early validator in _do_download. 16 bytes covers JPEG (3),
|
||||
# PNG (8), GIF (6), WebP (12), MP4/MOV (8), WebM/MKV (4), and ZIP (4)
|
||||
# magics with comfortable margin.
|
||||
_MEDIA_HEADER_MIN = 16
|
||||
|
||||
|
||||
def _looks_like_media(header: bytes) -> bool:
|
||||
"""Return True if the leading bytes match a known media magic.
|
||||
|
||||
Conservative on the empty case: an empty header is "unknown",
|
||||
not "valid", because the streaming validator (audit #10) calls us
|
||||
before any bytes have arrived means the server returned nothing
|
||||
useful. The on-disk validator wraps this with an OSError fallback
|
||||
that returns True instead — see _is_valid_media.
|
||||
"""
|
||||
if not header:
|
||||
return False
|
||||
if header.startswith(b'<') or header.startswith(b'<!'):
|
||||
return False
|
||||
for magic in _IMAGE_MAGIC:
|
||||
if header.startswith(magic):
|
||||
return True
|
||||
# Not a known magic and not HTML: treat as ok (some boorus serve
|
||||
# exotic-but-legal containers we don't enumerate above).
|
||||
return b'<html' not in header.lower() and b'<!doctype' not in header.lower()
|
||||
|
||||
|
||||
def _is_valid_media(path: Path) -> bool:
|
||||
"""Check if a file looks like actual media, not an HTML error page."""
|
||||
"""Check if a file looks like actual media, not an HTML error page.
|
||||
|
||||
On transient IO errors (file locked, EBUSY, permissions hiccup), returns
|
||||
True so the caller does NOT delete the cached file. The previous behavior
|
||||
treated IO errors as "invalid", causing a delete + re-download loop on
|
||||
every access while the underlying issue persisted.
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
header = f.read(16)
|
||||
if not header or header.startswith(b'<') or header.startswith(b'<!'):
|
||||
return False
|
||||
# Check for known magic bytes
|
||||
for magic in _IMAGE_MAGIC:
|
||||
if header.startswith(magic):
|
||||
return True
|
||||
# If not a known type but not HTML, assume it's ok
|
||||
return b'<html' not in header.lower() and b'<!doctype' not in header.lower()
|
||||
except Exception:
|
||||
return False
|
||||
header = f.read(_MEDIA_HEADER_MIN)
|
||||
except OSError as e:
|
||||
log.warning("Cannot read %s for validation (%s); treating as valid", path, e)
|
||||
return True
|
||||
return _looks_like_media(header)
|
||||
|
||||
|
||||
def _ext_from_url(url: str) -> str:
|
||||
@ -91,48 +166,86 @@ def _ext_from_url(url: str) -> str:
|
||||
|
||||
|
||||
def _convert_ugoira_to_gif(zip_path: Path) -> Path:
|
||||
"""Convert a Pixiv ugoira zip (numbered JPEG/PNG frames) to an animated GIF."""
|
||||
"""Convert a Pixiv ugoira zip (numbered JPEG/PNG frames) to an animated GIF.
|
||||
|
||||
Defends against zip bombs by capping frame count and cumulative
|
||||
uncompressed size, both checked from `ZipInfo` headers BEFORE any
|
||||
decompression. Falls back to returning the original zip on any error
|
||||
so the caller still has a usable file.
|
||||
"""
|
||||
import io
|
||||
gif_path = zip_path.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
_IMG_EXTS = {".jpg", ".jpeg", ".png", ".bmp", ".webp"}
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
names = sorted(n for n in zf.namelist() if Path(n).suffix.lower() in _IMG_EXTS)
|
||||
frames = []
|
||||
for name in names:
|
||||
try:
|
||||
data = zf.read(name)
|
||||
frames.append(Image.open(io.BytesIO(data)).convert("RGBA"))
|
||||
except Exception:
|
||||
continue
|
||||
if not frames:
|
||||
# Can't convert — just return the zip path as-is
|
||||
try:
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
infos = [zi for zi in zf.infolist()
|
||||
if Path(zi.filename).suffix.lower() in _IMG_EXTS]
|
||||
if len(infos) > UGOIRA_MAX_FRAMES:
|
||||
log.warning(
|
||||
"Ugoira %s has %d frames (cap %d); skipping conversion",
|
||||
zip_path.name, len(infos), UGOIRA_MAX_FRAMES,
|
||||
)
|
||||
return zip_path
|
||||
total_uncompressed = sum(zi.file_size for zi in infos)
|
||||
if total_uncompressed > UGOIRA_MAX_UNCOMPRESSED_BYTES:
|
||||
log.warning(
|
||||
"Ugoira %s uncompressed size %d exceeds cap %d; skipping",
|
||||
zip_path.name, total_uncompressed, UGOIRA_MAX_UNCOMPRESSED_BYTES,
|
||||
)
|
||||
return zip_path
|
||||
infos.sort(key=lambda zi: zi.filename)
|
||||
frames = []
|
||||
for zi in infos:
|
||||
try:
|
||||
data = zf.read(zi)
|
||||
with Image.open(io.BytesIO(data)) as im:
|
||||
frames.append(im.convert("RGBA"))
|
||||
except Exception as e:
|
||||
log.debug("Skipping ugoira frame %s: %s", zi.filename, e)
|
||||
continue
|
||||
except (zipfile.BadZipFile, OSError) as e:
|
||||
log.warning("Ugoira zip read failed for %s: %s", zip_path.name, e)
|
||||
return zip_path
|
||||
if not frames:
|
||||
return zip_path
|
||||
try:
|
||||
frames[0].save(
|
||||
gif_path, save_all=True, append_images=frames[1:],
|
||||
duration=80, loop=0, disposal=2,
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning("Ugoira GIF write failed for %s: %s", zip_path.name, e)
|
||||
return zip_path
|
||||
frames[0].save(
|
||||
gif_path, save_all=True, append_images=frames[1:],
|
||||
duration=80, loop=0, disposal=2,
|
||||
)
|
||||
if gif_path.exists():
|
||||
zip_path.unlink()
|
||||
return gif_path
|
||||
|
||||
|
||||
def _convert_animated_to_gif(source_path: Path) -> Path:
|
||||
"""Convert animated PNG or WebP to GIF for Qt playback."""
|
||||
"""Convert animated PNG or WebP to GIF for Qt playback.
|
||||
|
||||
Writes a `.failed` sentinel sibling on conversion failure so we don't
|
||||
re-attempt every access — re-trying on every paint of a malformed
|
||||
file used to chew CPU silently.
|
||||
"""
|
||||
gif_path = source_path.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
sentinel = source_path.with_suffix(source_path.suffix + ".convfailed")
|
||||
if sentinel.exists():
|
||||
return source_path
|
||||
try:
|
||||
img = Image.open(source_path)
|
||||
if not getattr(img, 'is_animated', False):
|
||||
return source_path # not animated, keep as-is
|
||||
frames = []
|
||||
durations = []
|
||||
for i in range(img.n_frames):
|
||||
img.seek(i)
|
||||
frames.append(img.convert("RGBA").copy())
|
||||
durations.append(img.info.get("duration", 80))
|
||||
with Image.open(source_path) as img:
|
||||
if not getattr(img, 'is_animated', False):
|
||||
return source_path # not animated, keep as-is
|
||||
frames = []
|
||||
durations = []
|
||||
for i in range(img.n_frames):
|
||||
img.seek(i)
|
||||
frames.append(img.convert("RGBA").copy())
|
||||
durations.append(img.info.get("duration", 80))
|
||||
if not frames:
|
||||
return source_path
|
||||
frames[0].save(
|
||||
@ -142,11 +255,91 @@ def _convert_animated_to_gif(source_path: Path) -> Path:
|
||||
if gif_path.exists():
|
||||
source_path.unlink()
|
||||
return gif_path
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.warning("Animated->GIF conversion failed for %s: %s", source_path.name, e)
|
||||
try:
|
||||
sentinel.touch()
|
||||
except OSError:
|
||||
pass
|
||||
return source_path
|
||||
|
||||
|
||||
def _referer_for(parsed) -> str:
|
||||
"""Build a Referer header value for booru CDNs that gate downloads.
|
||||
|
||||
Uses proper hostname suffix matching instead of substring `in` to avoid
|
||||
`imgblahgelbooru.attacker.com` falsely mapping to `gelbooru.com`.
|
||||
"""
|
||||
netloc = parsed.netloc
|
||||
bare = netloc.split(":", 1)[0].lower() # strip any port
|
||||
referer_host = netloc
|
||||
if bare.endswith(".gelbooru.com") or bare == "gelbooru.com":
|
||||
referer_host = "gelbooru.com"
|
||||
elif bare.endswith(".donmai.us") or bare == "donmai.us":
|
||||
referer_host = "danbooru.donmai.us"
|
||||
return f"{parsed.scheme}://{referer_host}/"
|
||||
|
||||
|
||||
# Per-URL coalescing locks. When two callers race on the same URL (e.g.
|
||||
# grid prefetch + an explicit click on the same thumbnail), only one
|
||||
# does the actual download; the other waits and reads the cached file.
|
||||
# Loop-bound, but the existing module is already loop-bound, so this
|
||||
# doesn't make anything worse and is fixed cleanly in PR2.
|
||||
#
|
||||
# Capped at _URL_LOCKS_MAX entries (audit finding #5). The previous
|
||||
# defaultdict grew unbounded over a long browsing session, and an
|
||||
# adversarial booru returning cache-buster query strings could turn
|
||||
# the leak into an OOM DoS.
|
||||
_URL_LOCKS_MAX = 4096
|
||||
_url_locks: "OrderedDict[str, asyncio.Lock]" = OrderedDict()
|
||||
|
||||
|
||||
def _get_url_lock(h: str) -> asyncio.Lock:
|
||||
"""Return the asyncio.Lock for URL hash *h*, creating it if needed.
|
||||
|
||||
Touches LRU order on every call so frequently-accessed hashes
|
||||
survive eviction. The first call for a new hash inserts it and
|
||||
triggers _evict_url_locks() to trim back toward the cap.
|
||||
"""
|
||||
lock = _url_locks.get(h)
|
||||
if lock is None:
|
||||
lock = asyncio.Lock()
|
||||
_url_locks[h] = lock
|
||||
_evict_url_locks(skip=h)
|
||||
else:
|
||||
_url_locks.move_to_end(h)
|
||||
return lock
|
||||
|
||||
|
||||
def _evict_url_locks(skip: str) -> None:
|
||||
"""Trim _url_locks back toward _URL_LOCKS_MAX, oldest first.
|
||||
|
||||
Each pass skips:
|
||||
- the hash *skip* we just inserted (it's the youngest — evicting
|
||||
it immediately would be self-defeating), and
|
||||
- any entry whose lock is currently held (we can't drop a lock
|
||||
that a coroutine is mid-`async with` on without that coroutine
|
||||
blowing up on exit).
|
||||
|
||||
Stops as soon as one pass finds no evictable entries — that
|
||||
handles the edge case where every remaining entry is either
|
||||
*skip* or currently held. In that state the cap is temporarily
|
||||
exceeded; the next insertion will retry eviction.
|
||||
"""
|
||||
while len(_url_locks) > _URL_LOCKS_MAX:
|
||||
evicted = False
|
||||
for old_h in list(_url_locks.keys()):
|
||||
if old_h == skip:
|
||||
continue
|
||||
if _url_locks[old_h].locked():
|
||||
continue
|
||||
_url_locks.pop(old_h, None)
|
||||
evicted = True
|
||||
break
|
||||
if not evicted:
|
||||
return
|
||||
|
||||
|
||||
async def download_image(
|
||||
url: str,
|
||||
client: httpx.AsyncClient | None = None,
|
||||
@ -161,75 +354,45 @@ async def download_image(
|
||||
filename = _url_hash(url) + _ext_from_url(url)
|
||||
local = dest_dir / filename
|
||||
|
||||
# Check if a ugoira zip was already converted to gif
|
||||
if local.suffix.lower() == ".zip":
|
||||
gif_path = local.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
# If the zip is cached but not yet converted, convert it now.
|
||||
# PIL frame iteration is CPU-bound and would block the asyncio
|
||||
# loop for hundreds of ms — run it in a worker thread instead.
|
||||
if local.exists() and zipfile.is_zipfile(local):
|
||||
return await asyncio.to_thread(_convert_ugoira_to_gif, local)
|
||||
async with _get_url_lock(_url_hash(url)):
|
||||
# Check if a ugoira zip was already converted to gif
|
||||
if local.suffix.lower() == ".zip":
|
||||
gif_path = local.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
# If the zip is cached but not yet converted, convert it now.
|
||||
# PIL frame iteration is CPU-bound and would block the asyncio
|
||||
# loop for hundreds of ms — run it in a worker thread instead.
|
||||
if local.exists() and zipfile.is_zipfile(local):
|
||||
return await asyncio.to_thread(_convert_ugoira_to_gif, local)
|
||||
|
||||
# Check if animated PNG/WebP was already converted to gif
|
||||
if local.suffix.lower() in (".png", ".webp"):
|
||||
gif_path = local.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
# Check if animated PNG/WebP was already converted to gif
|
||||
if local.suffix.lower() in (".png", ".webp"):
|
||||
gif_path = local.with_suffix(".gif")
|
||||
if gif_path.exists():
|
||||
return gif_path
|
||||
|
||||
# Validate cached file isn't corrupt (e.g. HTML error page saved as image)
|
||||
if local.exists():
|
||||
if _is_valid_media(local):
|
||||
# Convert animated PNG/WebP on access if not yet converted
|
||||
if local.suffix.lower() in (".png", ".webp"):
|
||||
converted = await asyncio.to_thread(_convert_animated_to_gif, local)
|
||||
if converted != local:
|
||||
return converted
|
||||
return local
|
||||
else:
|
||||
local.unlink() # Remove corrupt cache entry
|
||||
# Validate cached file isn't corrupt (e.g. HTML error page saved as image)
|
||||
if local.exists():
|
||||
if _is_valid_media(local):
|
||||
# Convert animated PNG/WebP on access if not yet converted
|
||||
if local.suffix.lower() in (".png", ".webp"):
|
||||
converted = await asyncio.to_thread(_convert_animated_to_gif, local)
|
||||
if converted != local:
|
||||
return converted
|
||||
return local
|
||||
else:
|
||||
local.unlink() # Remove corrupt cache entry
|
||||
|
||||
# Extract referer from URL domain (needed for Gelbooru CDN etc.)
|
||||
parsed = urlparse(url)
|
||||
# Map CDN hostnames back to the main site
|
||||
referer_host = parsed.netloc
|
||||
if referer_host.startswith("img") and "gelbooru" in referer_host:
|
||||
referer_host = "gelbooru.com"
|
||||
elif referer_host.startswith("cdn") and "donmai" in referer_host:
|
||||
referer_host = "danbooru.donmai.us"
|
||||
referer = f"{parsed.scheme}://{referer_host}/"
|
||||
parsed = urlparse(url)
|
||||
referer = _referer_for(parsed)
|
||||
log_connection(url)
|
||||
req_headers = {"Referer": referer}
|
||||
|
||||
log_connection(url)
|
||||
if client is None:
|
||||
client = _get_shared_client()
|
||||
|
||||
req_headers = {"Referer": referer}
|
||||
|
||||
own_client = client is None
|
||||
if own_client:
|
||||
client = _get_shared_client()
|
||||
try:
|
||||
if progress_callback:
|
||||
async with client.stream("GET", url, headers=req_headers) as resp:
|
||||
resp.raise_for_status()
|
||||
content_type = resp.headers.get("content-type", "")
|
||||
if "text/html" in content_type:
|
||||
raise ValueError(f"Server returned HTML instead of media (possible captcha/block)")
|
||||
total = int(resp.headers.get("content-length", 0))
|
||||
downloaded = 0
|
||||
chunks = []
|
||||
async for chunk in resp.aiter_bytes(8192):
|
||||
chunks.append(chunk)
|
||||
downloaded += len(chunk)
|
||||
progress_callback(downloaded, total)
|
||||
data = b"".join(chunks)
|
||||
local.write_bytes(data)
|
||||
else:
|
||||
resp = await client.get(url, headers=req_headers)
|
||||
resp.raise_for_status()
|
||||
content_type = resp.headers.get("content-type", "")
|
||||
if "text/html" in content_type:
|
||||
raise ValueError(f"Server returned HTML instead of media (possible captcha/block)")
|
||||
local.write_bytes(resp.content)
|
||||
await _do_download(client, url, req_headers, local, progress_callback)
|
||||
|
||||
# Verify the downloaded file
|
||||
if not _is_valid_media(local):
|
||||
@ -243,11 +406,112 @@ async def download_image(
|
||||
# Convert animated PNG/WebP to GIF for Qt playback
|
||||
elif local.suffix.lower() in (".png", ".webp"):
|
||||
local = await asyncio.to_thread(_convert_animated_to_gif, local)
|
||||
finally:
|
||||
pass # shared client stays open for connection reuse
|
||||
return local
|
||||
|
||||
|
||||
async def _do_download(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
req_headers: dict,
|
||||
local: Path,
|
||||
progress_callback,
|
||||
) -> None:
|
||||
"""Perform the actual HTTP fetch and write to `local`.
|
||||
|
||||
Splits on size: small/unknown payloads buffer in memory and write atomically;
|
||||
large payloads stream to a tempfile in the same directory and `os.replace`
|
||||
on completion. The split keeps the existing fast-path for thumbnails (which
|
||||
is the vast majority of downloads) while preventing OOM on multi-hundred-MB
|
||||
videos. Both paths enforce `MAX_DOWNLOAD_BYTES` against the advertised
|
||||
Content-Length AND the running total (servers can lie about length).
|
||||
"""
|
||||
async with client.stream("GET", url, headers=req_headers) as resp:
|
||||
resp.raise_for_status()
|
||||
content_type = resp.headers.get("content-type", "")
|
||||
if "text/html" in content_type:
|
||||
raise ValueError("Server returned HTML instead of media (possible captcha/block)")
|
||||
|
||||
try:
|
||||
total = int(resp.headers.get("content-length", 0))
|
||||
except (TypeError, ValueError):
|
||||
total = 0
|
||||
if total > MAX_DOWNLOAD_BYTES:
|
||||
raise ValueError(
|
||||
f"Download too large: {total} bytes (cap {MAX_DOWNLOAD_BYTES})"
|
||||
)
|
||||
|
||||
# Audit #10: accumulate the leading bytes (≥16) before
|
||||
# committing to writing the rest. A hostile server that omits
|
||||
# Content-Type and ignores the HTML check could otherwise
|
||||
# stream up to MAX_DOWNLOAD_BYTES of garbage to disk before
|
||||
# the post-download _is_valid_media check rejects and deletes
|
||||
# it. We accumulate across chunks because slow servers (or
|
||||
# chunked encoding with tiny chunks) can deliver fewer than
|
||||
# 16 bytes in the first chunk and validation would false-fail.
|
||||
use_large = total >= STREAM_TO_DISK_THRESHOLD
|
||||
chunk_iter = resp.aiter_bytes(64 * 1024 if use_large else 8192)
|
||||
|
||||
header_buf = bytearray()
|
||||
async for chunk in chunk_iter:
|
||||
header_buf.extend(chunk)
|
||||
if len(header_buf) >= _MEDIA_HEADER_MIN:
|
||||
break
|
||||
if len(header_buf) > MAX_DOWNLOAD_BYTES:
|
||||
raise ValueError(
|
||||
f"Download exceeded cap mid-stream: {len(header_buf)} bytes"
|
||||
)
|
||||
if not _looks_like_media(bytes(header_buf)):
|
||||
raise ValueError("Downloaded data is not valid media")
|
||||
|
||||
if use_large:
|
||||
# Large download: stream to tempfile in the same dir, atomic replace.
|
||||
local.parent.mkdir(parents=True, exist_ok=True)
|
||||
fd, tmp_name = tempfile.mkstemp(
|
||||
prefix=f".{local.name}.", suffix=".part", dir=str(local.parent)
|
||||
)
|
||||
tmp_path = Path(tmp_name)
|
||||
try:
|
||||
downloaded = len(header_buf)
|
||||
with os.fdopen(fd, "wb") as out:
|
||||
out.write(header_buf)
|
||||
if progress_callback:
|
||||
progress_callback(downloaded, total)
|
||||
async for chunk in chunk_iter:
|
||||
out.write(chunk)
|
||||
downloaded += len(chunk)
|
||||
if downloaded > MAX_DOWNLOAD_BYTES:
|
||||
raise ValueError(
|
||||
f"Download exceeded cap mid-stream: {downloaded} bytes"
|
||||
)
|
||||
if progress_callback:
|
||||
progress_callback(downloaded, total)
|
||||
os.replace(tmp_path, local)
|
||||
except BaseException:
|
||||
# BaseException on purpose: also clean up the .part file on
|
||||
# Ctrl-C / task cancellation, not just on Exception.
|
||||
try:
|
||||
tmp_path.unlink(missing_ok=True)
|
||||
except OSError:
|
||||
pass
|
||||
raise
|
||||
else:
|
||||
# Small/unknown size: buffer in memory, write whole.
|
||||
chunks: list[bytes] = [bytes(header_buf)]
|
||||
downloaded = len(header_buf)
|
||||
if progress_callback:
|
||||
progress_callback(downloaded, total)
|
||||
async for chunk in chunk_iter:
|
||||
chunks.append(chunk)
|
||||
downloaded += len(chunk)
|
||||
if downloaded > MAX_DOWNLOAD_BYTES:
|
||||
raise ValueError(
|
||||
f"Download exceeded cap mid-stream: {downloaded} bytes"
|
||||
)
|
||||
if progress_callback:
|
||||
progress_callback(downloaded, total)
|
||||
local.write_bytes(b"".join(chunks))
|
||||
|
||||
|
||||
async def download_thumbnail(
|
||||
url: str,
|
||||
client: httpx.AsyncClient | None = None,
|
||||
@ -266,17 +530,51 @@ def is_cached(url: str, dest_dir: Path | None = None) -> bool:
|
||||
return cached_path_for(url, dest_dir).exists()
|
||||
|
||||
|
||||
def delete_from_library(post_id: int, folder: str | None = None) -> bool:
|
||||
"""Delete a saved image from the library. Returns True if a file was deleted."""
|
||||
from .config import saved_dir, saved_folder_dir
|
||||
search_dir = saved_folder_dir(folder) if folder else saved_dir()
|
||||
from .config import MEDIA_EXTENSIONS
|
||||
for ext in MEDIA_EXTENSIONS:
|
||||
path = search_dir / f"{post_id}{ext}"
|
||||
if path.exists():
|
||||
def delete_from_library(post_id: int, folder: str | None = None, db=None) -> bool:
|
||||
"""Delete every saved copy of `post_id` from the library.
|
||||
|
||||
Returns True if at least one file was deleted.
|
||||
|
||||
The `folder` argument is kept for back-compat with existing call sites
|
||||
but is now ignored — we walk every library folder by post id and delete
|
||||
all matches. This is what makes the "bookmark folder ≠ library folder"
|
||||
separation work: a bookmark no longer needs to know which folder its
|
||||
library file lives in. It also cleans up duplicates left by the old
|
||||
pre-fix "save to folder = copy" bug in a single Unsave action.
|
||||
|
||||
Pass `db` to also match templated filenames (post-refactor saves
|
||||
that aren't named {post_id}.{ext}) and to clean up the library_meta
|
||||
row in the same call. Without `db`, only digit-stem files are
|
||||
found and the meta row stays — that's the old broken behavior,
|
||||
preserved as a fallback for callers that don't have a Database
|
||||
handle.
|
||||
"""
|
||||
from .config import find_library_files
|
||||
matches = find_library_files(post_id, db=db)
|
||||
deleted = False
|
||||
for path in matches:
|
||||
try:
|
||||
path.unlink()
|
||||
return True
|
||||
return False
|
||||
deleted = True
|
||||
except OSError:
|
||||
pass
|
||||
# Always drop the meta row, even when no files were unlinked.
|
||||
# Two cases this matters for:
|
||||
# 1. Files were on disk and unlinked — meta row is now stale.
|
||||
# 2. Files were already gone (orphan meta row from a previous
|
||||
# delete that didn't clean up). The user asked to "unsave"
|
||||
# this post and the meta should reflect that, even if
|
||||
# there's nothing left on disk.
|
||||
# Without this cleanup the post stays "saved" in the DB and
|
||||
# is_post_in_library lies forever. The lookup is keyed by
|
||||
# post_id so this is one cheap DELETE regardless of how many
|
||||
# copies were on disk.
|
||||
if db is not None:
|
||||
try:
|
||||
db.remove_library_meta(post_id)
|
||||
except Exception:
|
||||
pass
|
||||
return deleted
|
||||
|
||||
|
||||
def cache_size_bytes(include_thumbnails: bool = True) -> int:
|
||||
@ -294,23 +592,36 @@ def cache_file_count(include_thumbnails: bool = True) -> tuple[int, int]:
|
||||
return images, thumbs
|
||||
|
||||
|
||||
def evict_oldest(max_bytes: int, protected_paths: set[str] | None = None) -> int:
|
||||
"""Delete oldest non-protected cached images until under max_bytes. Returns count deleted."""
|
||||
protected = protected_paths or set()
|
||||
files = sorted(cache_dir().iterdir(), key=lambda f: f.stat().st_mtime)
|
||||
deleted = 0
|
||||
current = cache_size_bytes(include_thumbnails=False)
|
||||
def evict_oldest(max_bytes: int, protected_paths: set[str] | None = None,
|
||||
current_bytes: int | None = None) -> int:
|
||||
"""Delete oldest non-protected cached images until under max_bytes. Returns count deleted.
|
||||
|
||||
for f in files:
|
||||
*current_bytes* avoids a redundant directory scan when the caller
|
||||
already measured the cache size.
|
||||
"""
|
||||
protected = protected_paths or set()
|
||||
# Single directory walk: collect (path, stat) pairs, sort by mtime,
|
||||
# and sum sizes — avoids the previous pattern of iterdir() for the
|
||||
# sort + a second full iterdir()+stat() inside cache_size_bytes().
|
||||
entries = []
|
||||
total = 0
|
||||
for f in cache_dir().iterdir():
|
||||
if not f.is_file():
|
||||
continue
|
||||
st = f.stat()
|
||||
entries.append((f, st))
|
||||
total += st.st_size
|
||||
current = current_bytes if current_bytes is not None else total
|
||||
entries.sort(key=lambda e: e[1].st_mtime)
|
||||
deleted = 0
|
||||
for f, st in entries:
|
||||
if current <= max_bytes:
|
||||
break
|
||||
if not f.is_file() or str(f) in protected:
|
||||
if str(f) in protected or f.suffix == ".part":
|
||||
continue
|
||||
size = f.stat().st_size
|
||||
f.unlink()
|
||||
current -= size
|
||||
current -= st.st_size
|
||||
deleted += 1
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
@ -319,17 +630,23 @@ def evict_oldest_thumbnails(max_bytes: int) -> int:
|
||||
td = thumbnails_dir()
|
||||
if not td.exists():
|
||||
return 0
|
||||
files = sorted(td.iterdir(), key=lambda f: f.stat().st_mtime)
|
||||
deleted = 0
|
||||
current = sum(f.stat().st_size for f in td.iterdir() if f.is_file())
|
||||
for f in files:
|
||||
if current <= max_bytes:
|
||||
break
|
||||
entries = []
|
||||
current = 0
|
||||
for f in td.iterdir():
|
||||
if not f.is_file():
|
||||
continue
|
||||
size = f.stat().st_size
|
||||
st = f.stat()
|
||||
entries.append((f, st))
|
||||
current += st.st_size
|
||||
if current <= max_bytes:
|
||||
return 0
|
||||
entries.sort(key=lambda e: e[1].st_mtime)
|
||||
deleted = 0
|
||||
for f, st in entries:
|
||||
if current <= max_bytes:
|
||||
break
|
||||
f.unlink()
|
||||
current -= size
|
||||
current -= st.st_size
|
||||
deleted += 1
|
||||
return deleted
|
||||
|
||||
|
||||
64
booru_viewer/core/concurrency.py
Normal file
64
booru_viewer/core/concurrency.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""Process-wide handle to the app's persistent asyncio event loop.
|
||||
|
||||
The GUI runs Qt on the main thread and a single long-lived asyncio loop in
|
||||
a daemon thread (`BooruApp._async_thread`). Every async piece of code in the
|
||||
app — searches, downloads, autocomplete, site detection, bookmark thumb
|
||||
loading — must run on that one loop. Without this guarantee, the shared
|
||||
httpx clients (which httpx binds to whatever loop first instantiated them)
|
||||
end up attached to a throwaway loop from a `threading.Thread + asyncio.run`
|
||||
worker, then break the next time the persistent loop tries to use them
|
||||
("attached to a different loop" / "Event loop is closed").
|
||||
|
||||
This module is the single source of truth for "the loop". `BooruApp.__init__`
|
||||
calls `set_app_loop()` once after constructing it; everything else uses
|
||||
`run_on_app_loop()` to schedule coroutines from any thread.
|
||||
|
||||
Why a module global instead of passing the loop everywhere: it avoids
|
||||
threading a parameter through every dialog, view, and helper. There's only
|
||||
one loop in the process, ever, so a global is the honest representation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from concurrent.futures import Future
|
||||
from typing import Any, Awaitable, Callable
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
_app_loop: asyncio.AbstractEventLoop | None = None
|
||||
|
||||
|
||||
def set_app_loop(loop: asyncio.AbstractEventLoop) -> None:
|
||||
"""Register the persistent event loop. Called once at app startup."""
|
||||
global _app_loop
|
||||
_app_loop = loop
|
||||
|
||||
|
||||
def get_app_loop() -> asyncio.AbstractEventLoop:
|
||||
"""Return the persistent event loop. Raises if `set_app_loop` was never called."""
|
||||
if _app_loop is None:
|
||||
raise RuntimeError(
|
||||
"App event loop not initialized — call set_app_loop() before "
|
||||
"scheduling any async work."
|
||||
)
|
||||
return _app_loop
|
||||
|
||||
|
||||
def run_on_app_loop(
|
||||
coro: Awaitable[Any],
|
||||
done_callback: Callable[[Future], None] | None = None,
|
||||
) -> Future:
|
||||
"""Schedule `coro` on the app's persistent event loop from any thread.
|
||||
|
||||
Returns a `concurrent.futures.Future` (not asyncio.Future) — same shape as
|
||||
`asyncio.run_coroutine_threadsafe`. If `done_callback` is provided, it
|
||||
runs on the loop thread when the coroutine finishes; the callback is
|
||||
responsible for marshaling results back to the GUI thread (typically by
|
||||
emitting a Qt Signal connected with `Qt.ConnectionType.QueuedConnection`).
|
||||
"""
|
||||
fut = asyncio.run_coroutine_threadsafe(coro, get_app_loop())
|
||||
if done_callback is not None:
|
||||
fut.add_done_callback(done_callback)
|
||||
return fut
|
||||
@ -4,12 +4,29 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .api.base import Post
|
||||
|
||||
APPNAME = "booru-viewer"
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
|
||||
# Windows reserved device names (audit finding #7). Filenames whose stem
|
||||
# (before the first dot) lower-cases to one of these are illegal on
|
||||
# Windows because the OS routes opens of `con.jpg` to the CON device.
|
||||
# Checked by render_filename_template() unconditionally so a library
|
||||
# saved on Linux can still be copied to a Windows machine without
|
||||
# breaking on these stems.
|
||||
_WINDOWS_RESERVED_NAMES = frozenset({
|
||||
"con", "prn", "aux", "nul",
|
||||
*{f"com{i}" for i in range(1, 10)},
|
||||
*{f"lpt{i}" for i in range(1, 10)},
|
||||
})
|
||||
|
||||
|
||||
def hypr_rules_enabled() -> bool:
|
||||
"""Whether the in-code hyprctl dispatches that change window state
|
||||
@ -39,7 +56,15 @@ def popout_aspect_lock_enabled() -> bool:
|
||||
|
||||
|
||||
def data_dir() -> Path:
|
||||
"""Return the platform-appropriate data/cache directory."""
|
||||
"""Return the platform-appropriate data/cache directory.
|
||||
|
||||
On POSIX, the directory is chmod'd to 0o700 after creation so the
|
||||
SQLite DB inside (and the api_key/api_user columns it stores) are
|
||||
not exposed to other local users on shared workstations or
|
||||
networked home dirs with permissive umasks. On Windows the chmod
|
||||
is a no-op — NTFS ACLs handle access control separately and the
|
||||
OS already restricts AppData\\Roaming\\<app> to the owning user.
|
||||
"""
|
||||
if IS_WINDOWS:
|
||||
base = Path.home() / "AppData" / "Roaming"
|
||||
else:
|
||||
@ -50,6 +75,13 @@ def data_dir() -> Path:
|
||||
)
|
||||
path = base / APPNAME
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
if not IS_WINDOWS:
|
||||
try:
|
||||
os.chmod(path, 0o700)
|
||||
except OSError:
|
||||
# Filesystem may not support chmod (e.g. some FUSE mounts).
|
||||
# Better to keep working than refuse to start.
|
||||
pass
|
||||
return path
|
||||
|
||||
|
||||
@ -86,10 +118,22 @@ def saved_dir() -> Path:
|
||||
|
||||
|
||||
def saved_folder_dir(folder: str) -> Path:
|
||||
"""Return a subfolder inside saved images."""
|
||||
path = saved_dir() / folder
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
"""Return a subfolder inside saved images, refusing path traversal.
|
||||
|
||||
Folder names should normally be filtered by `db._validate_folder_name`
|
||||
before reaching the filesystem, but this is a defense-in-depth check:
|
||||
resolve the candidate path and ensure it's still inside `saved_dir()`.
|
||||
Anything that escapes (`..`, absolute paths, symlink shenanigans) raises
|
||||
ValueError instead of silently writing to disk wherever the string points.
|
||||
"""
|
||||
base = saved_dir().resolve()
|
||||
candidate = (base / folder).resolve()
|
||||
try:
|
||||
candidate.relative_to(base)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Folder escapes saved directory: {folder!r}") from e
|
||||
candidate.mkdir(parents=True, exist_ok=True)
|
||||
return candidate
|
||||
|
||||
|
||||
def db_path() -> Path:
|
||||
@ -97,6 +141,187 @@ def db_path() -> Path:
|
||||
return data_dir() / "booru.db"
|
||||
|
||||
|
||||
def library_folders() -> list[str]:
|
||||
"""List library folder names — direct subdirectories of saved_dir().
|
||||
|
||||
The library is filesystem-truth: a folder exists iff there is a real
|
||||
directory on disk. There is no separate DB list of folder names. This
|
||||
is the source the "Save to Library → folder" menus everywhere should
|
||||
read from. Bookmark folders (DB-backed) are a different concept.
|
||||
"""
|
||||
root = saved_dir()
|
||||
if not root.is_dir():
|
||||
return []
|
||||
return sorted(d.name for d in root.iterdir() if d.is_dir())
|
||||
|
||||
|
||||
def find_library_files(post_id: int, db=None) -> list[Path]:
|
||||
"""Return all library files matching `post_id` across every folder.
|
||||
|
||||
The library has a flat shape: root + one level of subdirectories.
|
||||
Walks shallowly (one iterdir of root + one iterdir per subdir)
|
||||
and matches files in two ways:
|
||||
1. Legacy v0.2.3 layout: stem equals str(post_id) (e.g. 12345.jpg).
|
||||
2. Templated layout (post-refactor): basename appears in
|
||||
`library_meta.filename` for this post_id.
|
||||
|
||||
The templated match requires `db` — when None, only the legacy
|
||||
digit-stem path runs. Pass `db=self._db` from any caller that
|
||||
has a Database instance handy (essentially every gui caller).
|
||||
Used by:
|
||||
- delete_from_library (delete every copy on disk)
|
||||
- main_window's bookmark→library preview lookup
|
||||
- the unified save flow's pre-existing-copy detection (now
|
||||
handled inside save_post_file via _same_post_on_disk)
|
||||
"""
|
||||
matches: list[Path] = []
|
||||
root = saved_dir()
|
||||
if not root.is_dir():
|
||||
return matches
|
||||
|
||||
stem = str(post_id)
|
||||
|
||||
# Templated filenames stored for this post, if a db handle was passed.
|
||||
templated: set[str] = set()
|
||||
if db is not None:
|
||||
try:
|
||||
rows = db.conn.execute(
|
||||
"SELECT filename FROM library_meta WHERE post_id = ? AND filename != ''",
|
||||
(post_id,),
|
||||
).fetchall()
|
||||
templated = {r["filename"] for r in rows}
|
||||
except Exception:
|
||||
pass # DB issue → degrade to digit-stem-only matching
|
||||
|
||||
def _matches(p: Path) -> bool:
|
||||
if p.suffix.lower() not in MEDIA_EXTENSIONS:
|
||||
return False
|
||||
if p.stem == stem:
|
||||
return True
|
||||
if p.name in templated:
|
||||
return True
|
||||
return False
|
||||
|
||||
for entry in root.iterdir():
|
||||
if entry.is_file() and _matches(entry):
|
||||
matches.append(entry)
|
||||
elif entry.is_dir():
|
||||
for sub in entry.iterdir():
|
||||
if sub.is_file() and _matches(sub):
|
||||
matches.append(sub)
|
||||
return matches
|
||||
|
||||
|
||||
def render_filename_template(template: str, post: "Post", ext: str) -> str:
|
||||
"""Render a filename template against a Post into a filesystem-safe basename.
|
||||
|
||||
Tokens supported:
|
||||
%id% post id
|
||||
%md5% md5 hash extracted from file_url (empty if URL doesn't carry one)
|
||||
%ext% extension without the leading dot
|
||||
%rating% post.rating or empty
|
||||
%score% post.score
|
||||
%artist% underscore-joined names from post.tag_categories["artist"]
|
||||
%character% same, character category
|
||||
%copyright% same, copyright category
|
||||
%general% same, general category
|
||||
%meta% same, meta category
|
||||
%species% same, species category
|
||||
|
||||
The returned string is a basename including the extension. If `template`
|
||||
is empty or post-sanitization the rendered stem is empty, falls back to
|
||||
f"{post.id}{ext}" so callers always get a usable name.
|
||||
|
||||
The rendered stem is capped at 200 characters before the extension is
|
||||
appended. This stays under the 255-byte ext4/NTFS filename limit for
|
||||
typical ASCII/Latin-1 templates; users typing emoji-heavy templates may
|
||||
still hit the limit but won't see a hard error from this function.
|
||||
|
||||
Sanitization replaces filesystem-reserved characters (`/\\:*?"<>|`) with
|
||||
underscores, collapses whitespace runs to a single underscore, and strips
|
||||
leading/trailing dots/spaces and `..` prefixes so the rendered name can't
|
||||
escape the destination directory or trip Windows' trailing-dot quirk.
|
||||
"""
|
||||
if not template:
|
||||
return f"{post.id}{ext}"
|
||||
|
||||
cats = post.tag_categories or {}
|
||||
|
||||
def _join_cat(name: str) -> str:
|
||||
# API clients (danbooru.py, e621.py) store categories with
|
||||
# Capitalized keys ("Artist", "Character", ...) — that's the
|
||||
# convention info_panel/preview_pane already iterate against.
|
||||
# Accept either casing here so future drift in either direction
|
||||
# doesn't silently break templates.
|
||||
items = cats.get(name) or cats.get(name.lower()) or cats.get(name.capitalize()) or []
|
||||
return "_".join(items)
|
||||
|
||||
# %md5% — most boorus name files by md5 in the URL path
|
||||
# (e.g. https://cdn.donmai.us/original/0a/1b/0a1b...md5...{ext}).
|
||||
# Extract the URL stem and accept it only if it's 32 hex chars.
|
||||
md5 = ""
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
url_path = urlparse(post.file_url).path
|
||||
url_stem = Path(url_path).stem
|
||||
if len(url_stem) == 32 and all(c in "0123456789abcdef" for c in url_stem.lower()):
|
||||
md5 = url_stem
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
has_ext_token = "%ext%" in template
|
||||
replacements = {
|
||||
"%id%": str(post.id),
|
||||
"%md5%": md5,
|
||||
"%ext%": ext.lstrip("."),
|
||||
"%rating%": post.rating or "",
|
||||
"%score%": str(post.score),
|
||||
"%artist%": _join_cat("Artist"),
|
||||
"%character%": _join_cat("Character"),
|
||||
"%copyright%": _join_cat("Copyright"),
|
||||
"%general%": _join_cat("General"),
|
||||
"%meta%": _join_cat("Meta"),
|
||||
"%species%": _join_cat("Species"),
|
||||
}
|
||||
|
||||
rendered = template
|
||||
for token, value in replacements.items():
|
||||
rendered = rendered.replace(token, value)
|
||||
|
||||
# Sanitization: filesystem-reserved chars first, then control chars,
|
||||
# then whitespace collapse, then leading-cleanup.
|
||||
for ch in '/\\:*?"<>|':
|
||||
rendered = rendered.replace(ch, "_")
|
||||
rendered = "".join(c if ord(c) >= 32 else "_" for c in rendered)
|
||||
rendered = re.sub(r"\s+", "_", rendered)
|
||||
while rendered.startswith(".."):
|
||||
rendered = rendered[2:]
|
||||
rendered = rendered.lstrip("._")
|
||||
rendered = rendered.rstrip("._ ")
|
||||
|
||||
# Length cap on the stem (before any system-appended extension).
|
||||
if len(rendered) > 200:
|
||||
rendered = rendered[:200].rstrip("._ ")
|
||||
|
||||
# Reject Windows reserved device names (audit finding #7). On Windows,
|
||||
# opening `con.jpg` or `prn.png` for writing redirects to the device,
|
||||
# so a tag value of `con` from a hostile booru would silently break
|
||||
# save. Prefix with `_` to break the device-name match while keeping
|
||||
# the user's intended name visible.
|
||||
if rendered:
|
||||
stem_lower = rendered.split(".", 1)[0].lower()
|
||||
if stem_lower in _WINDOWS_RESERVED_NAMES:
|
||||
rendered = "_" + rendered
|
||||
|
||||
if not rendered:
|
||||
return f"{post.id}{ext}"
|
||||
|
||||
if not has_ext_token:
|
||||
rendered = rendered + ext
|
||||
|
||||
return rendered
|
||||
|
||||
|
||||
# Defaults
|
||||
DEFAULT_THUMBNAIL_SIZE = (200, 200)
|
||||
DEFAULT_PAGE_SIZE = 40
|
||||
|
||||
@ -2,13 +2,45 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import json
|
||||
import threading
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from .config import db_path
|
||||
from .config import IS_WINDOWS, db_path
|
||||
|
||||
|
||||
def _validate_folder_name(name: str) -> str:
|
||||
"""Reject folder names that could break out of the saved-images dir.
|
||||
|
||||
Folder names hit the filesystem in `core.config.saved_folder_dir` (joined
|
||||
with `saved_dir()` and `mkdir`'d). Without this guard, an attacker — or a
|
||||
user pasting nonsense — could create / delete files anywhere by passing
|
||||
`..` segments, an absolute path, or an OS-native separator. We refuse
|
||||
those at write time so the DB never stores a poisoned name in the first
|
||||
place.
|
||||
|
||||
Permits anything else (Unicode, spaces, parentheses, hyphens) so existing
|
||||
folders like `miku(lewd)` keep working.
|
||||
"""
|
||||
if not name:
|
||||
raise ValueError("Folder name cannot be empty")
|
||||
if name in (".", ".."):
|
||||
raise ValueError(f"Invalid folder name: {name!r}")
|
||||
if "/" in name or "\\" in name or os.sep in name:
|
||||
raise ValueError(f"Folder name may not contain path separators: {name!r}")
|
||||
if name.startswith(".") or name.startswith("~"):
|
||||
raise ValueError(f"Folder name may not start with {name[0]!r}: {name!r}")
|
||||
# Reject any embedded `..` segment (e.g. `foo..bar` is fine, but `..` alone
|
||||
# is already caught above; this catches `..` inside slash-rejected paths
|
||||
# if someone tries to be clever — defensive belt for the suspenders).
|
||||
if ".." in name.split(os.sep):
|
||||
raise ValueError(f"Invalid folder name: {name!r}")
|
||||
return name
|
||||
|
||||
_SCHEMA = """
|
||||
CREATE TABLE IF NOT EXISTS sites (
|
||||
@ -66,8 +98,13 @@ CREATE TABLE IF NOT EXISTS library_meta (
|
||||
rating TEXT,
|
||||
source TEXT,
|
||||
file_url TEXT,
|
||||
saved_at TEXT
|
||||
saved_at TEXT,
|
||||
filename TEXT NOT NULL DEFAULT ''
|
||||
);
|
||||
-- The idx_library_meta_filename index is created in _migrate(), not here.
|
||||
-- _SCHEMA runs before _migrate against legacy databases that don't yet have
|
||||
-- the filename column, so creating the index here would fail with "no such
|
||||
-- column" before the migration could ALTER the column in.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
@ -87,6 +124,14 @@ CREATE TABLE IF NOT EXISTS saved_searches (
|
||||
query TEXT NOT NULL,
|
||||
site_id INTEGER
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tag_types (
|
||||
site_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
label TEXT NOT NULL,
|
||||
fetched_at TEXT NOT NULL,
|
||||
PRIMARY KEY (site_id, name)
|
||||
);
|
||||
"""
|
||||
|
||||
_DEFAULTS = {
|
||||
@ -106,6 +151,9 @@ _DEFAULTS = {
|
||||
"slideshow_monitor": "",
|
||||
"library_dir": "",
|
||||
"infinite_scroll": "0",
|
||||
"library_filename_template": "",
|
||||
"unbookmark_on_save": "0",
|
||||
"search_history_enabled": "1",
|
||||
}
|
||||
|
||||
|
||||
@ -137,14 +185,17 @@ class Bookmark:
|
||||
tag_categories: dict = field(default_factory=dict)
|
||||
|
||||
|
||||
# Back-compat alias — will be removed in a future version.
|
||||
Favorite = Bookmark
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, path: Path | None = None) -> None:
|
||||
self._path = path or db_path()
|
||||
self._conn: sqlite3.Connection | None = None
|
||||
# Single writer lock for the connection. Reads happen concurrently
|
||||
# under WAL without contention; writes from multiple threads (Qt
|
||||
# main + the persistent asyncio loop thread) need explicit
|
||||
# serialization to avoid interleaved multi-statement methods.
|
||||
# RLock so a writing method can call another writing method on the
|
||||
# same thread without self-deadlocking.
|
||||
self._write_lock = threading.RLock()
|
||||
|
||||
@property
|
||||
def conn(self) -> sqlite3.Connection:
|
||||
@ -155,28 +206,95 @@ class Database:
|
||||
self._conn.execute("PRAGMA foreign_keys=ON")
|
||||
self._conn.executescript(_SCHEMA)
|
||||
self._migrate()
|
||||
self._restrict_perms()
|
||||
return self._conn
|
||||
|
||||
def _restrict_perms(self) -> None:
|
||||
"""Tighten the DB file (and WAL/SHM sidecars) to 0o600 on POSIX.
|
||||
|
||||
The sites table stores api_key + api_user in plaintext, so the
|
||||
file must not be readable by other local users. Sidecars only
|
||||
exist after the first WAL checkpoint, so we tolerate
|
||||
FileNotFoundError. Windows: NTFS ACLs handle this; chmod is a
|
||||
no-op there. Filesystem-level chmod failures are swallowed —
|
||||
better to keep working than refuse to start.
|
||||
"""
|
||||
if IS_WINDOWS:
|
||||
return
|
||||
for suffix in ("", "-wal", "-shm"):
|
||||
target = Path(str(self._path) + suffix) if suffix else self._path
|
||||
try:
|
||||
os.chmod(target, 0o600)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def _write(self):
|
||||
"""Context manager for write methods.
|
||||
|
||||
Acquires the write lock for cross-thread serialization, then enters
|
||||
sqlite3's connection context manager (which BEGINs and COMMIT/ROLLBACKs
|
||||
atomically). Use this in place of `with self.conn:` whenever a method
|
||||
writes — it composes the two guarantees we want:
|
||||
1. Multi-statement atomicity (sqlite3 handles)
|
||||
2. Cross-thread write serialization (the RLock handles)
|
||||
Reads do not need this — they go through `self.conn.execute(...)` directly
|
||||
and rely on WAL for concurrent-reader isolation.
|
||||
"""
|
||||
with self._write_lock:
|
||||
with self.conn:
|
||||
yield self.conn
|
||||
|
||||
def _migrate(self) -> None:
|
||||
"""Add columns that may not exist in older databases."""
|
||||
cur = self._conn.execute("PRAGMA table_info(favorites)")
|
||||
cols = {row[1] for row in cur.fetchall()}
|
||||
if "folder" not in cols:
|
||||
self._conn.execute("ALTER TABLE favorites ADD COLUMN folder TEXT")
|
||||
self._conn.commit()
|
||||
self._conn.execute("CREATE INDEX IF NOT EXISTS idx_favorites_folder ON favorites(folder)")
|
||||
# Add tag_categories to library_meta if missing
|
||||
tables = {r[0] for r in self._conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
|
||||
if "library_meta" in tables:
|
||||
cur = self._conn.execute("PRAGMA table_info(library_meta)")
|
||||
meta_cols = {row[1] for row in cur.fetchall()}
|
||||
if "tag_categories" not in meta_cols:
|
||||
self._conn.execute("ALTER TABLE library_meta ADD COLUMN tag_categories TEXT DEFAULT ''")
|
||||
self._conn.commit()
|
||||
# Add tag_categories to favorites if missing
|
||||
if "tag_categories" not in cols:
|
||||
self._conn.execute("ALTER TABLE favorites ADD COLUMN tag_categories TEXT DEFAULT ''")
|
||||
self._conn.commit()
|
||||
"""Add columns that may not exist in older databases.
|
||||
|
||||
All ALTERs are wrapped in a single transaction so a crash partway
|
||||
through can't leave the schema half-migrated. Note: this runs from
|
||||
the `conn` property's lazy init, where `_write_lock` exists but the
|
||||
connection is being built — we only need to serialize writes via
|
||||
the lock; the connection context manager handles atomicity.
|
||||
"""
|
||||
with self._write_lock:
|
||||
with self._conn:
|
||||
cur = self._conn.execute("PRAGMA table_info(favorites)")
|
||||
cols = {row[1] for row in cur.fetchall()}
|
||||
if "folder" not in cols:
|
||||
self._conn.execute("ALTER TABLE favorites ADD COLUMN folder TEXT")
|
||||
self._conn.execute("CREATE INDEX IF NOT EXISTS idx_favorites_folder ON favorites(folder)")
|
||||
# Add tag_categories to library_meta if missing
|
||||
tables = {r[0] for r in self._conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
|
||||
if "library_meta" in tables:
|
||||
cur = self._conn.execute("PRAGMA table_info(library_meta)")
|
||||
meta_cols = {row[1] for row in cur.fetchall()}
|
||||
if "tag_categories" not in meta_cols:
|
||||
self._conn.execute("ALTER TABLE library_meta ADD COLUMN tag_categories TEXT DEFAULT ''")
|
||||
# Add filename column. Empty-string default acts as the
|
||||
# "unknown" sentinel for legacy v0.2.3 rows whose on-disk
|
||||
# filenames are digit stems — library scan code falls
|
||||
# back to int(stem) when filename is empty.
|
||||
if "filename" not in meta_cols:
|
||||
self._conn.execute("ALTER TABLE library_meta ADD COLUMN filename TEXT NOT NULL DEFAULT ''")
|
||||
self._conn.execute("CREATE INDEX IF NOT EXISTS idx_library_meta_filename ON library_meta(filename)")
|
||||
# Add tag_categories to favorites if missing
|
||||
if "tag_categories" not in cols:
|
||||
self._conn.execute("ALTER TABLE favorites ADD COLUMN tag_categories TEXT DEFAULT ''")
|
||||
# Tag-type cache for boorus that don't return
|
||||
# categorized tags inline (Gelbooru-shape, Moebooru).
|
||||
# Per-site keying so forks don't cross-contaminate.
|
||||
# Uses string labels ("Artist", "Character", ...)
|
||||
# instead of integer codes — the labels come from
|
||||
# the HTML class names directly.
|
||||
self._conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS tag_types (
|
||||
site_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
label TEXT NOT NULL,
|
||||
fetched_at TEXT NOT NULL,
|
||||
PRIMARY KEY (site_id, name)
|
||||
)
|
||||
""")
|
||||
|
||||
def close(self) -> None:
|
||||
if self._conn:
|
||||
@ -194,12 +312,12 @@ class Database:
|
||||
api_user: str | None = None,
|
||||
) -> Site:
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO sites (name, url, api_type, api_key, api_user, added_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(name, url.rstrip("/"), api_type, api_key, api_user, now),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO sites (name, url, api_type, api_key, api_user, added_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(name, url.rstrip("/"), api_type, api_key, api_user, now),
|
||||
)
|
||||
return Site(
|
||||
id=cur.lastrowid, # type: ignore[arg-type]
|
||||
name=name,
|
||||
@ -229,9 +347,12 @@ class Database:
|
||||
]
|
||||
|
||||
def delete_site(self, site_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM favorites WHERE site_id = ?", (site_id,))
|
||||
self.conn.execute("DELETE FROM sites WHERE id = ?", (site_id,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM tag_types WHERE site_id = ?", (site_id,))
|
||||
self.conn.execute("DELETE FROM search_history WHERE site_id = ?", (site_id,))
|
||||
self.conn.execute("DELETE FROM saved_searches WHERE site_id = ?", (site_id,))
|
||||
self.conn.execute("DELETE FROM favorites WHERE site_id = ?", (site_id,))
|
||||
self.conn.execute("DELETE FROM sites WHERE id = ?", (site_id,))
|
||||
|
||||
def update_site(self, site_id: int, **fields: str | None) -> None:
|
||||
allowed = {"name", "url", "api_type", "api_key", "api_user", "enabled"}
|
||||
@ -245,10 +366,10 @@ class Database:
|
||||
if not sets:
|
||||
return
|
||||
vals.append(site_id)
|
||||
self.conn.execute(
|
||||
f"UPDATE sites SET {', '.join(sets)} WHERE id = ?", vals
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
f"UPDATE sites SET {', '.join(sets)} WHERE id = ?", vals
|
||||
)
|
||||
|
||||
# -- Bookmarks --
|
||||
|
||||
@ -268,15 +389,30 @@ class Database:
|
||||
) -> Bookmark:
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
cats_json = json.dumps(tag_categories) if tag_categories else ""
|
||||
cur = self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorites "
|
||||
"(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, favorited_at, tag_categories) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, now, cats_json),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
cur = self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorites "
|
||||
"(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, favorited_at, tag_categories) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, now, cats_json),
|
||||
)
|
||||
if cur.rowcount == 0:
|
||||
# Row already existed (UNIQUE collision on site_id, post_id);
|
||||
# INSERT OR IGNORE leaves lastrowid stale, so re-SELECT the
|
||||
# actual id. Without this, the returned Bookmark.id is bogus
|
||||
# (e.g. 0) and any subsequent update keyed on that id silently
|
||||
# no-ops — see app.py update_bookmark_cache_path callsite.
|
||||
row = self.conn.execute(
|
||||
"SELECT id, favorited_at FROM favorites WHERE site_id = ? AND post_id = ?",
|
||||
(site_id, post_id),
|
||||
).fetchone()
|
||||
bm_id = row["id"]
|
||||
bookmarked_at = row["favorited_at"]
|
||||
else:
|
||||
bm_id = cur.lastrowid
|
||||
bookmarked_at = now
|
||||
return Bookmark(
|
||||
id=cur.lastrowid, # type: ignore[arg-type]
|
||||
id=bm_id,
|
||||
site_id=site_id,
|
||||
post_id=post_id,
|
||||
file_url=file_url,
|
||||
@ -287,7 +423,7 @@ class Database:
|
||||
source=source,
|
||||
cached_path=cached_path,
|
||||
folder=folder,
|
||||
bookmarked_at=now,
|
||||
bookmarked_at=bookmarked_at,
|
||||
)
|
||||
|
||||
# Back-compat shim
|
||||
@ -295,26 +431,26 @@ class Database:
|
||||
|
||||
def add_bookmarks_batch(self, bookmarks: list[dict]) -> None:
|
||||
"""Add multiple bookmarks in a single transaction."""
|
||||
for fav in bookmarks:
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorites "
|
||||
"(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, favorited_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(fav['site_id'], fav['post_id'], fav['file_url'], fav.get('preview_url'),
|
||||
fav.get('tags', ''), fav.get('rating'), fav.get('score'), fav.get('source'),
|
||||
fav.get('cached_path'), fav.get('folder'), fav.get('favorited_at', datetime.now(timezone.utc).isoformat())),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
for fav in bookmarks:
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorites "
|
||||
"(site_id, post_id, file_url, preview_url, tags, rating, score, source, cached_path, folder, favorited_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(fav['site_id'], fav['post_id'], fav['file_url'], fav.get('preview_url'),
|
||||
fav.get('tags', ''), fav.get('rating'), fav.get('score'), fav.get('source'),
|
||||
fav.get('cached_path'), fav.get('folder'), fav.get('favorited_at', datetime.now(timezone.utc).isoformat())),
|
||||
)
|
||||
|
||||
# Back-compat shim
|
||||
add_favorites_batch = add_bookmarks_batch
|
||||
|
||||
def remove_bookmark(self, site_id: int, post_id: int) -> None:
|
||||
self.conn.execute(
|
||||
"DELETE FROM favorites WHERE site_id = ? AND post_id = ?",
|
||||
(site_id, post_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"DELETE FROM favorites WHERE site_id = ? AND post_id = ?",
|
||||
(site_id, post_id),
|
||||
)
|
||||
|
||||
# Back-compat shim
|
||||
remove_favorite = remove_bookmark
|
||||
@ -347,8 +483,16 @@ class Database:
|
||||
params.append(folder)
|
||||
if search:
|
||||
for tag in search.strip().split():
|
||||
q += " AND tags LIKE ?"
|
||||
params.append(f"%{tag}%")
|
||||
# Escape SQL LIKE wildcards in user input. Without ESCAPE,
|
||||
# `_` matches any single char and `%` matches any sequence,
|
||||
# so searching `cat_ear` would also match `catear`/`catxear`.
|
||||
escaped = (
|
||||
tag.replace("\\", "\\\\")
|
||||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
q += " AND tags LIKE ? ESCAPE '\\'"
|
||||
params.append(f"%{escaped}%")
|
||||
q += " ORDER BY favorited_at DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
rows = self.conn.execute(q, params).fetchall()
|
||||
@ -381,11 +525,11 @@ class Database:
|
||||
_row_to_favorite = _row_to_bookmark
|
||||
|
||||
def update_bookmark_cache_path(self, fav_id: int, cached_path: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET cached_path = ? WHERE id = ?",
|
||||
(cached_path, fav_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET cached_path = ? WHERE id = ?",
|
||||
(cached_path, fav_id),
|
||||
)
|
||||
|
||||
# Back-compat shim
|
||||
update_favorite_cache_path = update_bookmark_cache_path
|
||||
@ -404,32 +548,34 @@ class Database:
|
||||
return [r["name"] for r in rows]
|
||||
|
||||
def add_folder(self, name: str) -> None:
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorite_folders (name) VALUES (?)", (name.strip(),)
|
||||
)
|
||||
self.conn.commit()
|
||||
clean = _validate_folder_name(name.strip())
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO favorite_folders (name) VALUES (?)", (clean,)
|
||||
)
|
||||
|
||||
def remove_folder(self, name: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = NULL WHERE folder = ?", (name,)
|
||||
)
|
||||
self.conn.execute("DELETE FROM favorite_folders WHERE name = ?", (name,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = NULL WHERE folder = ?", (name,)
|
||||
)
|
||||
self.conn.execute("DELETE FROM favorite_folders WHERE name = ?", (name,))
|
||||
|
||||
def rename_folder(self, old: str, new: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = ? WHERE folder = ?", (new.strip(), old)
|
||||
)
|
||||
self.conn.execute(
|
||||
"UPDATE favorite_folders SET name = ? WHERE name = ?", (new.strip(), old)
|
||||
)
|
||||
self.conn.commit()
|
||||
new_name = _validate_folder_name(new.strip())
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = ? WHERE folder = ?", (new_name, old)
|
||||
)
|
||||
self.conn.execute(
|
||||
"UPDATE favorite_folders SET name = ? WHERE name = ?", (new_name, old)
|
||||
)
|
||||
|
||||
def move_bookmark_to_folder(self, fav_id: int, folder: str | None) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = ? WHERE id = ?", (folder, fav_id)
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"UPDATE favorites SET folder = ? WHERE id = ?", (folder, fav_id)
|
||||
)
|
||||
|
||||
# Back-compat shim
|
||||
move_favorite_to_folder = move_bookmark_to_folder
|
||||
@ -437,18 +583,18 @@ class Database:
|
||||
# -- Blacklist --
|
||||
|
||||
def add_blacklisted_tag(self, tag: str) -> None:
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO blacklisted_tags (tag) VALUES (?)",
|
||||
(tag.strip().lower(),),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO blacklisted_tags (tag) VALUES (?)",
|
||||
(tag.strip().lower(),),
|
||||
)
|
||||
|
||||
def remove_blacklisted_tag(self, tag: str) -> None:
|
||||
self.conn.execute(
|
||||
"DELETE FROM blacklisted_tags WHERE tag = ?",
|
||||
(tag.strip().lower(),),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"DELETE FROM blacklisted_tags WHERE tag = ?",
|
||||
(tag.strip().lower(),),
|
||||
)
|
||||
|
||||
def get_blacklisted_tags(self) -> list[str]:
|
||||
rows = self.conn.execute("SELECT tag FROM blacklisted_tags ORDER BY tag").fetchall()
|
||||
@ -457,12 +603,12 @@ class Database:
|
||||
# -- Blacklisted Posts --
|
||||
|
||||
def add_blacklisted_post(self, url: str) -> None:
|
||||
self.conn.execute("INSERT OR IGNORE INTO blacklisted_posts (url) VALUES (?)", (url,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("INSERT OR IGNORE INTO blacklisted_posts (url) VALUES (?)", (url,))
|
||||
|
||||
def remove_blacklisted_post(self, url: str) -> None:
|
||||
self.conn.execute("DELETE FROM blacklisted_posts WHERE url = ?", (url,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM blacklisted_posts WHERE url = ?", (url,))
|
||||
|
||||
def get_blacklisted_posts(self) -> set[str]:
|
||||
rows = self.conn.execute("SELECT url FROM blacklisted_posts").fetchall()
|
||||
@ -472,16 +618,139 @@ class Database:
|
||||
|
||||
def save_library_meta(self, post_id: int, tags: str = "", tag_categories: dict = None,
|
||||
score: int = 0, rating: str = None, source: str = None,
|
||||
file_url: str = None) -> None:
|
||||
file_url: str = None, filename: str = "") -> None:
|
||||
cats_json = json.dumps(tag_categories) if tag_categories else ""
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO library_meta "
|
||||
"(post_id, tags, tag_categories, score, rating, source, file_url, saved_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(post_id, tags, cats_json, score, rating, source, file_url,
|
||||
datetime.now(timezone.utc).isoformat()),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO library_meta "
|
||||
"(post_id, tags, tag_categories, score, rating, source, file_url, saved_at, filename) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(post_id, tags, cats_json, score, rating, source, file_url,
|
||||
datetime.now(timezone.utc).isoformat(), filename),
|
||||
)
|
||||
|
||||
def reconcile_library_meta(self) -> int:
|
||||
"""Drop library_meta rows whose files are no longer on disk.
|
||||
|
||||
Walks every row, checks for both digit-stem (legacy v0.2.3)
|
||||
and templated (post-refactor) filenames in saved_dir() + one
|
||||
level of subdirectories, and deletes rows where neither is
|
||||
found. Returns the number of rows removed.
|
||||
|
||||
Cleans up the orphan rows that were leaked by the old
|
||||
delete_from_library before it learned to clean up after
|
||||
itself. Safe to call repeatedly — a no-op once the DB is
|
||||
consistent with disk.
|
||||
|
||||
Skips reconciliation entirely if saved_dir() is missing or
|
||||
empty (defensive — a removable drive temporarily unmounted
|
||||
shouldn't trigger a wholesale meta wipe).
|
||||
"""
|
||||
from .config import saved_dir, MEDIA_EXTENSIONS
|
||||
sd = saved_dir()
|
||||
if not sd.is_dir():
|
||||
return 0
|
||||
|
||||
# Build the set of (post_id present on disk). Walks shallow:
|
||||
# root + one level of subdirectories.
|
||||
on_disk_files: list[Path] = []
|
||||
for entry in sd.iterdir():
|
||||
if entry.is_file() and entry.suffix.lower() in MEDIA_EXTENSIONS:
|
||||
on_disk_files.append(entry)
|
||||
elif entry.is_dir():
|
||||
for sub in entry.iterdir():
|
||||
if sub.is_file() and sub.suffix.lower() in MEDIA_EXTENSIONS:
|
||||
on_disk_files.append(sub)
|
||||
if not on_disk_files:
|
||||
# No files at all — refuse to reconcile. Could be an
|
||||
# unmounted drive, a freshly-cleared library, etc. The
|
||||
# cost of a false positive (wiping every meta row) is
|
||||
# higher than the cost of leaving stale rows.
|
||||
return 0
|
||||
|
||||
present_post_ids: set[int] = set()
|
||||
for f in on_disk_files:
|
||||
if f.stem.isdigit():
|
||||
present_post_ids.add(int(f.stem))
|
||||
# Templated files: look up by filename
|
||||
for f in on_disk_files:
|
||||
if not f.stem.isdigit():
|
||||
row = self.conn.execute(
|
||||
"SELECT post_id FROM library_meta WHERE filename = ? LIMIT 1",
|
||||
(f.name,),
|
||||
).fetchone()
|
||||
if row is not None:
|
||||
present_post_ids.add(row["post_id"])
|
||||
|
||||
all_meta_ids = self.get_saved_post_ids()
|
||||
stale = all_meta_ids - present_post_ids
|
||||
if not stale:
|
||||
return 0
|
||||
|
||||
with self._write():
|
||||
BATCH = 500
|
||||
stale_list = list(stale)
|
||||
for i in range(0, len(stale_list), BATCH):
|
||||
chunk = stale_list[i:i + BATCH]
|
||||
placeholders = ",".join("?" * len(chunk))
|
||||
self.conn.execute(
|
||||
f"DELETE FROM library_meta WHERE post_id IN ({placeholders})",
|
||||
chunk,
|
||||
)
|
||||
return len(stale)
|
||||
|
||||
def is_post_in_library(self, post_id: int) -> bool:
|
||||
"""True iff a `library_meta` row exists for `post_id`.
|
||||
|
||||
Cheap, indexed lookup. Use this instead of walking the
|
||||
filesystem when you only need a yes/no for a single post —
|
||||
e.g. the bookmark context-menu's "Unsave from Library"
|
||||
visibility check, or the bookmark→library copy's existence
|
||||
guard. Replaces digit-stem matching, which can't see
|
||||
templated filenames.
|
||||
"""
|
||||
row = self.conn.execute(
|
||||
"SELECT 1 FROM library_meta WHERE post_id = ? LIMIT 1",
|
||||
(post_id,),
|
||||
).fetchone()
|
||||
return row is not None
|
||||
|
||||
def get_saved_post_ids(self) -> set[int]:
|
||||
"""Return every post_id that has a library_meta row.
|
||||
|
||||
Used for batch saved-locally dot population on grids — load
|
||||
the set once, do per-thumb membership checks against it.
|
||||
Single SELECT, much cheaper than per-post DB lookups or
|
||||
per-grid filesystem walks. Format-agnostic: handles both
|
||||
templated and digit-stem filenames as long as the file's
|
||||
save flow wrote a meta row (every save site does after the
|
||||
unified save_post_file refactor).
|
||||
"""
|
||||
rows = self.conn.execute(
|
||||
"SELECT post_id FROM library_meta"
|
||||
).fetchall()
|
||||
return {r["post_id"] for r in rows}
|
||||
|
||||
def get_library_post_id_by_filename(self, filename: str) -> int | None:
|
||||
"""Look up which post a saved-library file belongs to, by basename.
|
||||
|
||||
Returns the post_id if a `library_meta` row exists with that
|
||||
filename, or None if no row matches. Used by the unified save
|
||||
flow's same-post-on-disk check to make re-saves idempotent and
|
||||
to apply sequential `_1`, `_2`, ... suffixes only when a name
|
||||
collides with a *different* post.
|
||||
|
||||
Empty-string filenames (the legacy v0.2.3 sentinel) deliberately
|
||||
do not match — callers fall back to the digit-stem heuristic for
|
||||
those rows.
|
||||
"""
|
||||
if not filename:
|
||||
return None
|
||||
row = self.conn.execute(
|
||||
"SELECT post_id FROM library_meta WHERE filename = ? LIMIT 1",
|
||||
(filename,),
|
||||
).fetchone()
|
||||
return row["post_id"] if row else None
|
||||
|
||||
def get_library_meta(self, post_id: int) -> dict | None:
|
||||
row = self.conn.execute("SELECT * FROM library_meta WHERE post_id = ?", (post_id,)).fetchone()
|
||||
@ -494,15 +763,95 @@ class Database:
|
||||
|
||||
def search_library_meta(self, query: str) -> set[int]:
|
||||
"""Search library metadata by tags. Returns matching post IDs."""
|
||||
escaped = (
|
||||
query.replace("\\", "\\\\")
|
||||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
)
|
||||
rows = self.conn.execute(
|
||||
"SELECT post_id FROM library_meta WHERE tags LIKE ?",
|
||||
(f"%{query}%",),
|
||||
"SELECT post_id FROM library_meta WHERE tags LIKE ? ESCAPE '\\'",
|
||||
(f"%{escaped}%",),
|
||||
).fetchall()
|
||||
return {r["post_id"] for r in rows}
|
||||
|
||||
def remove_library_meta(self, post_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM library_meta WHERE post_id = ?", (post_id,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM library_meta WHERE post_id = ?", (post_id,))
|
||||
|
||||
# -- Tag-type cache --
|
||||
|
||||
def get_tag_labels(self, site_id: int, names: list[str]) -> dict[str, str]:
|
||||
"""Return cached string labels for `names` on `site_id`.
|
||||
|
||||
Result dict only contains tags with a cache entry — callers
|
||||
fetch the misses via CategoryFetcher and call set_tag_labels
|
||||
to backfill. Chunked to stay under SQLite's variable limit.
|
||||
"""
|
||||
if not names:
|
||||
return {}
|
||||
result: dict[str, str] = {}
|
||||
BATCH = 500
|
||||
for i in range(0, len(names), BATCH):
|
||||
chunk = names[i:i + BATCH]
|
||||
placeholders = ",".join("?" * len(chunk))
|
||||
rows = self.conn.execute(
|
||||
f"SELECT name, label FROM tag_types WHERE site_id = ? AND name IN ({placeholders})",
|
||||
[site_id, *chunk],
|
||||
).fetchall()
|
||||
for r in rows:
|
||||
result[r["name"]] = r["label"]
|
||||
return result
|
||||
|
||||
def set_tag_labels(self, site_id: int, mapping: dict[str, str]) -> None:
|
||||
"""Bulk INSERT OR REPLACE (name -> label) entries for one site.
|
||||
|
||||
Auto-prunes oldest entries when the table exceeds
|
||||
_TAG_CACHE_MAX_ROWS to prevent unbounded growth.
|
||||
"""
|
||||
if not mapping:
|
||||
return
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
rows = [(site_id, name, label, now) for name, label in mapping.items()]
|
||||
with self._write():
|
||||
self.conn.executemany(
|
||||
"INSERT OR REPLACE INTO tag_types (site_id, name, label, fetched_at) "
|
||||
"VALUES (?, ?, ?, ?)",
|
||||
rows,
|
||||
)
|
||||
self._prune_tag_cache()
|
||||
|
||||
_TAG_CACHE_MAX_ROWS = 50_000 # ~50k tags ≈ several months of browsing
|
||||
|
||||
def _prune_tag_cache(self) -> None:
|
||||
"""Delete the oldest tag_types rows if the table exceeds the cap.
|
||||
|
||||
Keeps the most-recently-fetched entries. Runs inside an
|
||||
existing _write() context from set_tag_labels, so no extra
|
||||
transaction overhead. The cap is generous enough that
|
||||
normal usage never hits it; it's a safety valve for users
|
||||
who browse dozens of boorus over months without clearing.
|
||||
"""
|
||||
count = self.conn.execute("SELECT COUNT(*) FROM tag_types").fetchone()[0]
|
||||
if count <= self._TAG_CACHE_MAX_ROWS:
|
||||
return
|
||||
excess = count - self._TAG_CACHE_MAX_ROWS
|
||||
self.conn.execute(
|
||||
"DELETE FROM tag_types WHERE rowid IN ("
|
||||
" SELECT rowid FROM tag_types ORDER BY fetched_at ASC LIMIT ?"
|
||||
")",
|
||||
(excess,),
|
||||
)
|
||||
|
||||
def clear_tag_cache(self, site_id: int | None = None) -> int:
|
||||
"""Delete cached tag types. Pass site_id to clear one site,
|
||||
or None to clear all. Returns rows deleted. Exposed for
|
||||
future Settings UI "Clear tag cache" button."""
|
||||
with self._write():
|
||||
if site_id is not None:
|
||||
cur = self.conn.execute("DELETE FROM tag_types WHERE site_id = ?", (site_id,))
|
||||
else:
|
||||
cur = self.conn.execute("DELETE FROM tag_types")
|
||||
return cur.rowcount
|
||||
|
||||
# -- Settings --
|
||||
|
||||
@ -519,11 +868,11 @@ class Database:
|
||||
return self.get_setting(key) == "1"
|
||||
|
||||
def set_setting(self, key: str, value: str) -> None:
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO settings (key, value) VALUES (?, ?)",
|
||||
(key, str(value)),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO settings (key, value) VALUES (?, ?)",
|
||||
(key, str(value)),
|
||||
)
|
||||
|
||||
def get_all_settings(self) -> dict[str, str]:
|
||||
result = dict(_DEFAULTS)
|
||||
@ -538,21 +887,21 @@ class Database:
|
||||
if not query.strip():
|
||||
return
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
# Remove duplicate if exists, keep latest
|
||||
self.conn.execute(
|
||||
"DELETE FROM search_history WHERE query = ? AND (site_id = ? OR (site_id IS NULL AND ? IS NULL))",
|
||||
(query.strip(), site_id, site_id),
|
||||
)
|
||||
self.conn.execute(
|
||||
"INSERT INTO search_history (query, site_id, searched_at) VALUES (?, ?, ?)",
|
||||
(query.strip(), site_id, now),
|
||||
)
|
||||
# Keep only last 50
|
||||
self.conn.execute(
|
||||
"DELETE FROM search_history WHERE id NOT IN "
|
||||
"(SELECT id FROM search_history ORDER BY searched_at DESC LIMIT 50)"
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
# Remove duplicate if exists, keep latest
|
||||
self.conn.execute(
|
||||
"DELETE FROM search_history WHERE query = ? AND (site_id = ? OR (site_id IS NULL AND ? IS NULL))",
|
||||
(query.strip(), site_id, site_id),
|
||||
)
|
||||
self.conn.execute(
|
||||
"INSERT INTO search_history (query, site_id, searched_at) VALUES (?, ?, ?)",
|
||||
(query.strip(), site_id, now),
|
||||
)
|
||||
# Keep only last 50
|
||||
self.conn.execute(
|
||||
"DELETE FROM search_history WHERE id NOT IN "
|
||||
"(SELECT id FROM search_history ORDER BY searched_at DESC LIMIT 50)"
|
||||
)
|
||||
|
||||
def get_search_history(self, limit: int = 20) -> list[str]:
|
||||
rows = self.conn.execute(
|
||||
@ -562,21 +911,21 @@ class Database:
|
||||
return [r["query"] for r in rows]
|
||||
|
||||
def clear_search_history(self) -> None:
|
||||
self.conn.execute("DELETE FROM search_history")
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM search_history")
|
||||
|
||||
def remove_search_history(self, query: str) -> None:
|
||||
self.conn.execute("DELETE FROM search_history WHERE query = ?", (query,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM search_history WHERE query = ?", (query,))
|
||||
|
||||
# -- Saved Searches --
|
||||
|
||||
def add_saved_search(self, name: str, query: str, site_id: int | None = None) -> None:
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO saved_searches (name, query, site_id) VALUES (?, ?, ?)",
|
||||
(name.strip(), query.strip(), site_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute(
|
||||
"INSERT OR REPLACE INTO saved_searches (name, query, site_id) VALUES (?, ?, ?)",
|
||||
(name.strip(), query.strip(), site_id),
|
||||
)
|
||||
|
||||
def get_saved_searches(self) -> list[tuple[int, str, str]]:
|
||||
"""Returns list of (id, name, query)."""
|
||||
@ -586,5 +935,5 @@ class Database:
|
||||
return [(r["id"], r["name"], r["query"]) for r in rows]
|
||||
|
||||
def remove_saved_search(self, search_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM saved_searches WHERE id = ?", (search_id,))
|
||||
self.conn.commit()
|
||||
with self._write():
|
||||
self.conn.execute("DELETE FROM saved_searches WHERE id = ?", (search_id,))
|
||||
|
||||
73
booru_viewer/core/http.py
Normal file
73
booru_viewer/core/http.py
Normal file
@ -0,0 +1,73 @@
|
||||
"""Shared httpx.AsyncClient constructor.
|
||||
|
||||
Three call sites build near-identical clients: the cache module's
|
||||
download pool, ``BooruClient``'s shared API pool, and
|
||||
``detect.detect_site_type``'s reach into that same pool. Centralising
|
||||
the construction in one place means a future change (new SSRF hook,
|
||||
new connection limit, different default UA) doesn't have to be made
|
||||
three times and kept in sync.
|
||||
|
||||
The module does NOT manage the singletons themselves — each call site
|
||||
keeps its own ``_shared_client`` and its own lock, so the cache
|
||||
pool's long-lived large transfers don't compete with short JSON
|
||||
requests from the API layer. ``make_client`` is a pure constructor.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable, Iterable
|
||||
|
||||
import httpx
|
||||
|
||||
from .config import USER_AGENT
|
||||
from .api._safety import validate_public_request
|
||||
|
||||
|
||||
# Connection pool limits are identical across all three call sites.
|
||||
# Keeping the default here centralises any future tuning.
|
||||
_DEFAULT_LIMITS = httpx.Limits(max_connections=10, max_keepalive_connections=5)
|
||||
|
||||
|
||||
def make_client(
|
||||
*,
|
||||
timeout: float = 20.0,
|
||||
accept: str | None = None,
|
||||
extra_request_hooks: Iterable[Callable] | None = None,
|
||||
) -> httpx.AsyncClient:
|
||||
"""Return a fresh ``httpx.AsyncClient`` with the project's defaults.
|
||||
|
||||
Defaults applied unconditionally:
|
||||
- ``User-Agent`` header from ``core.config.USER_AGENT``
|
||||
- ``follow_redirects=True``
|
||||
- ``validate_public_request`` SSRF hook (always first on the
|
||||
request-hook chain; extras run after it)
|
||||
- Connection limits: 10 max, 5 keepalive
|
||||
|
||||
Parameters:
|
||||
timeout: per-request timeout in seconds. Cache downloads pass
|
||||
60s for large videos; the API pool uses 20s.
|
||||
accept: optional ``Accept`` header value. The cache pool sets
|
||||
``image/*,video/*,*/*``; the API pool leaves it unset so
|
||||
httpx's ``*/*`` default takes effect.
|
||||
extra_request_hooks: optional extra callables to run after
|
||||
``validate_public_request``. The API clients pass their
|
||||
connection-logging hook here; detect passes the same.
|
||||
|
||||
Call sites are responsible for their own singleton caching —
|
||||
``make_client`` always returns a fresh instance.
|
||||
"""
|
||||
headers: dict[str, str] = {"User-Agent": USER_AGENT}
|
||||
if accept is not None:
|
||||
headers["Accept"] = accept
|
||||
|
||||
hooks: list[Callable] = [validate_public_request]
|
||||
if extra_request_hooks:
|
||||
hooks.extend(extra_request_hooks)
|
||||
|
||||
return httpx.AsyncClient(
|
||||
headers=headers,
|
||||
follow_redirects=True,
|
||||
timeout=timeout,
|
||||
event_hooks={"request": hooks},
|
||||
limits=_DEFAULT_LIMITS,
|
||||
)
|
||||
@ -1,31 +0,0 @@
|
||||
"""Image thumbnailing and format helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from PIL import Image
|
||||
|
||||
from .config import DEFAULT_THUMBNAIL_SIZE, thumbnails_dir
|
||||
|
||||
|
||||
def make_thumbnail(
|
||||
source: Path,
|
||||
size: tuple[int, int] = DEFAULT_THUMBNAIL_SIZE,
|
||||
dest: Path | None = None,
|
||||
) -> Path:
|
||||
"""Create a thumbnail, returning its path. Returns existing if already made."""
|
||||
dest = dest or thumbnails_dir() / f"thumb_{source.stem}_{size[0]}x{size[1]}.jpg"
|
||||
if dest.exists():
|
||||
return dest
|
||||
with Image.open(source) as img:
|
||||
img.thumbnail(size, Image.Resampling.LANCZOS)
|
||||
if img.mode in ("RGBA", "P"):
|
||||
img = img.convert("RGB")
|
||||
img.save(dest, "JPEG", quality=85)
|
||||
return dest
|
||||
|
||||
|
||||
def image_dimensions(path: Path) -> tuple[int, int]:
|
||||
with Image.open(path) as img:
|
||||
return img.size
|
||||
242
booru_viewer/core/library_save.py
Normal file
242
booru_viewer/core/library_save.py
Normal file
@ -0,0 +1,242 @@
|
||||
"""Unified save flow for writing Post media to disk.
|
||||
|
||||
This module owns the single function (`save_post_file`) that every save
|
||||
site in the app routes through. It exists to keep filename-template
|
||||
rendering, sequential collision suffixes, same-post idempotency, and
|
||||
the conditional `library_meta` write all in one place instead of
|
||||
duplicated across the save sites that used to live in
|
||||
`gui/main_window.py` and `gui/bookmarks.py`.
|
||||
|
||||
Boundary rule: this module imports from `core.cache`, `core.config`,
|
||||
`core.db`. It does NOT import from `gui/`. That's how both `bookmarks.py`
|
||||
and `main_window.py` can call into it without dragging in a circular
|
||||
import.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Callable
|
||||
|
||||
from .config import render_filename_template, saved_dir
|
||||
from .db import Database
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .api.base import Post
|
||||
from .api.category_fetcher import CategoryFetcher
|
||||
|
||||
|
||||
_CATEGORY_TOKENS = {"%artist%", "%character%", "%copyright%", "%general%", "%meta%", "%species%"}
|
||||
|
||||
|
||||
async def save_post_file(
|
||||
src: Path,
|
||||
post: "Post",
|
||||
dest_dir: Path,
|
||||
db: Database,
|
||||
in_flight: set[str] | None = None,
|
||||
explicit_name: str | None = None,
|
||||
*,
|
||||
category_fetcher: "CategoryFetcher | None",
|
||||
) -> Path:
|
||||
"""Copy a Post's already-cached media file into `dest_dir`.
|
||||
|
||||
Single source of truth for "write a Post to disk." Every save site
|
||||
— Browse Save, multi-select bulk save, Save As, Download All, multi-
|
||||
select Download All, bookmark→library, bookmark Save As — routes
|
||||
through this function.
|
||||
|
||||
Filename comes from the `library_filename_template` setting,
|
||||
rendered against the Post via `render_filename_template`. If
|
||||
`explicit_name` is set (the user typed a name into a Save As
|
||||
dialog), the template is bypassed and `explicit_name` is used as
|
||||
the basename. Collision resolution still runs in case the user
|
||||
picked an existing path that belongs to a different post.
|
||||
|
||||
Collision resolution: if the chosen basename exists at `dest_dir`
|
||||
or is already claimed by an earlier iteration of the current batch
|
||||
(via `in_flight`), and the existing copy belongs to a *different*
|
||||
post, sequential `_1`, `_2`, `_3`, ... suffixes are appended until
|
||||
a free name is found. If the existing copy is the same post
|
||||
(verified by `library_meta` lookup or the legacy digit-stem
|
||||
fallback), the chosen basename is returned unchanged and the copy
|
||||
is skipped — the re-save is idempotent.
|
||||
|
||||
`library_meta` write: if the resolved destination is inside
|
||||
`saved_dir()`, a `library_meta` row is written for the post,
|
||||
including the resolved filename. This is the case for Save to
|
||||
Library (any folder), bulk Save to Library, batch Download into a
|
||||
library folder, multi-select batch Download into a library folder,
|
||||
Save As into a library folder (a deliberate behavior change from
|
||||
v0.2.3 — Save As never wrote meta before), and bookmark→library
|
||||
copies.
|
||||
|
||||
Parameters:
|
||||
src: cached media file to copy from. Must already exist on disk
|
||||
(caller is responsible for `download_image()` or
|
||||
`cached_path_for()`).
|
||||
post: Post object whose tags drive template rendering and
|
||||
populate the `library_meta` row.
|
||||
dest_dir: target directory. Created if missing. Anywhere on
|
||||
disk; only matters for the `library_meta` write whether
|
||||
it's inside `saved_dir()`.
|
||||
db: Database instance. Used for the same-post-on-disk lookup
|
||||
during collision resolution and the conditional meta write.
|
||||
in_flight: optional set of basenames already claimed by earlier
|
||||
iterations of the current batch. The chosen basename is
|
||||
added to this set before return. Pass `None` for single-
|
||||
file saves; pass a shared `set()` (one per batch
|
||||
invocation, never reused across invocations) for batches.
|
||||
explicit_name: optional override. When set, the template is
|
||||
bypassed and this basename (already including extension)
|
||||
is used as the starting point for collision resolution.
|
||||
category_fetcher: keyword-only, required. The CategoryFetcher
|
||||
for the post's site, or None when the site categorises tags
|
||||
inline (Danbooru, e621) so ``post.tag_categories`` is always
|
||||
pre-populated. Pass ``None`` explicitly rather than omitting
|
||||
the argument — the ``=None`` default was removed so saves
|
||||
can't silently render templates with empty category tokens
|
||||
just because a caller forgot to plumb the fetcher through.
|
||||
|
||||
Returns:
|
||||
The actual `Path` the file landed at after collision
|
||||
resolution. Callers use this for status messages and signal
|
||||
emission.
|
||||
"""
|
||||
if explicit_name is not None:
|
||||
basename = explicit_name
|
||||
else:
|
||||
template = db.get_setting("library_filename_template")
|
||||
# If the template uses category tokens and the post has no
|
||||
# categories yet, fetch them synchronously before rendering.
|
||||
# This guarantees the filename is correct even when saving
|
||||
# a post the user hasn't clicked (no prior ensure from the
|
||||
# info panel path).
|
||||
if (
|
||||
category_fetcher is not None
|
||||
and not post.tag_categories
|
||||
and template
|
||||
and any(tok in template for tok in _CATEGORY_TOKENS)
|
||||
):
|
||||
await category_fetcher.ensure_categories(post)
|
||||
basename = render_filename_template(template, post, src.suffix)
|
||||
|
||||
in_flight_set: set[str] = in_flight if in_flight is not None else set()
|
||||
final_basename = _resolve_collision(
|
||||
dest_dir,
|
||||
basename,
|
||||
post.id,
|
||||
in_flight_set,
|
||||
lambda path, pid: _same_post_on_disk(db, path, pid),
|
||||
)
|
||||
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
dest = dest_dir / final_basename
|
||||
|
||||
# Skip the copy if same-post-on-disk made the chosen basename
|
||||
# match an existing copy of this post (idempotent re-save).
|
||||
if not dest.exists():
|
||||
shutil.copy2(src, dest)
|
||||
|
||||
if in_flight is not None:
|
||||
in_flight.add(final_basename)
|
||||
|
||||
if _is_in_library(dest):
|
||||
db.save_library_meta(
|
||||
post_id=post.id,
|
||||
tags=post.tags,
|
||||
tag_categories=post.tag_categories,
|
||||
score=post.score,
|
||||
rating=post.rating,
|
||||
source=post.source,
|
||||
file_url=post.file_url,
|
||||
filename=final_basename,
|
||||
)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _is_in_library(path: Path) -> bool:
|
||||
"""True if `path` is inside `saved_dir()`. Wraps `is_relative_to`
|
||||
in a try/except for older Pythons where it raises on non-relative
|
||||
paths instead of returning False."""
|
||||
try:
|
||||
return path.is_relative_to(saved_dir())
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def _same_post_on_disk(db: Database, path: Path, post_id: int) -> bool:
|
||||
"""True if `path` is already a saved copy of `post_id`.
|
||||
|
||||
Looks up the path's basename in `library_meta` first; if no row,
|
||||
falls back to the legacy v0.2.3 digit-stem heuristic (a file named
|
||||
`12345.jpg` is treated as belonging to post 12345). Returns False
|
||||
when `path` is outside `saved_dir()` — we can't tell who owns
|
||||
files anywhere else.
|
||||
"""
|
||||
try:
|
||||
if not path.is_relative_to(saved_dir()):
|
||||
return False
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
existing_id = db.get_library_post_id_by_filename(path.name)
|
||||
if existing_id is not None:
|
||||
return existing_id == post_id
|
||||
|
||||
# Legacy v0.2.3 fallback: rows whose filename column is empty
|
||||
# belong to digit-stem files. Mirrors the digit-stem checks in
|
||||
# gui/library.py.
|
||||
if path.stem.isdigit():
|
||||
return int(path.stem) == post_id
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _resolve_collision(
|
||||
dest_dir: Path,
|
||||
basename: str,
|
||||
post_id: int,
|
||||
in_flight: set[str],
|
||||
same_post_check: Callable[[Path, int], bool],
|
||||
) -> str:
|
||||
"""Return a basename that won't collide at `dest_dir`.
|
||||
|
||||
Same-post collisions — the basename already belongs to this post,
|
||||
on disk — are returned unchanged so the caller skips the copy and
|
||||
the re-save is idempotent. Different-post collisions get sequential
|
||||
`_1`, `_2`, `_3`, ... suffixes until a free name is found.
|
||||
|
||||
The `in_flight` set is consulted alongside on-disk state so that
|
||||
earlier iterations of the same batch don't get re-picked for later
|
||||
posts in the same call.
|
||||
"""
|
||||
target = dest_dir / basename
|
||||
if basename not in in_flight and not target.exists():
|
||||
return basename
|
||||
if target.exists() and same_post_check(target, post_id):
|
||||
return basename
|
||||
|
||||
stem, dot, ext = basename.rpartition(".")
|
||||
if not dot:
|
||||
stem, ext = basename, ""
|
||||
else:
|
||||
ext = "." + ext
|
||||
|
||||
n = 1
|
||||
while n <= 9999:
|
||||
candidate = f"{stem}_{n}{ext}"
|
||||
cand_path = dest_dir / candidate
|
||||
if candidate not in in_flight and not cand_path.exists():
|
||||
return candidate
|
||||
if cand_path.exists() and same_post_check(cand_path, post_id):
|
||||
return candidate
|
||||
n += 1
|
||||
|
||||
# Defensive fallback. 10k collisions for one rendered name means
|
||||
# something is structurally wrong (template renders to a constant?
|
||||
# filesystem state corruption?); break the loop with the post id
|
||||
# so the user gets *some* file rather than an exception.
|
||||
return f"{stem}_{post_id}{ext}"
|
||||
34
booru_viewer/gui/_source_html.py
Normal file
34
booru_viewer/gui/_source_html.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""Pure helper for the info-panel Source line.
|
||||
|
||||
Lives in its own module so the helper can be unit-tested from CI
|
||||
without pulling in PySide6. ``info_panel.py`` imports it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from html import escape
|
||||
|
||||
|
||||
def build_source_html(source: str | None) -> str:
|
||||
"""Build the rich-text fragment for the Source line in the info panel.
|
||||
|
||||
The fragment is inserted into a QLabel set to RichText format with
|
||||
setOpenExternalLinks(True) — that means QTextBrowser parses any HTML
|
||||
in *source* as markup. Without escaping, a hostile booru can break
|
||||
out of the href attribute, inject ``<img>`` tracking pixels, or make
|
||||
the visible text disagree with the click target.
|
||||
|
||||
The href is only emitted for an http(s) URL; everything else is
|
||||
rendered as escaped plain text. Both the href value and the visible
|
||||
display text are HTML-escaped (audit finding #6).
|
||||
"""
|
||||
if not source:
|
||||
return "none"
|
||||
# Truncate display text but keep the full URL for the link target.
|
||||
display = source if len(source) <= 60 else source[:57] + "..."
|
||||
if source.startswith(("http://", "https://")):
|
||||
return (
|
||||
f'<a href="{escape(source, quote=True)}" '
|
||||
f'style="color: #4fc3f7;">{escape(display)}</a>'
|
||||
)
|
||||
return escape(display)
|
||||
File diff suppressed because it is too large
Load Diff
356
booru_viewer/gui/app_runtime.py
Normal file
356
booru_viewer/gui/app_runtime.py
Normal file
@ -0,0 +1,356 @@
|
||||
"""Application entry point and Qt-style loading."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtCore import Qt
|
||||
from PySide6.QtWidgets import QApplication
|
||||
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
def _apply_windows_dark_mode(app: QApplication) -> None:
|
||||
"""Detect Windows dark mode and apply Fusion dark palette if needed."""
|
||||
try:
|
||||
import winreg
|
||||
key = winreg.OpenKey(
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
r"Software\Microsoft\Windows\CurrentVersion\Themes\Personalize",
|
||||
)
|
||||
value, _ = winreg.QueryValueEx(key, "AppsUseLightTheme")
|
||||
winreg.CloseKey(key)
|
||||
if value == 0:
|
||||
from PySide6.QtGui import QPalette, QColor
|
||||
app.setStyle("Fusion")
|
||||
palette = QPalette()
|
||||
palette.setColor(QPalette.ColorRole.Window, QColor(32, 32, 32))
|
||||
palette.setColor(QPalette.ColorRole.WindowText, QColor(255, 255, 255))
|
||||
palette.setColor(QPalette.ColorRole.Base, QColor(25, 25, 25))
|
||||
palette.setColor(QPalette.ColorRole.AlternateBase, QColor(38, 38, 38))
|
||||
palette.setColor(QPalette.ColorRole.ToolTipBase, QColor(50, 50, 50))
|
||||
palette.setColor(QPalette.ColorRole.ToolTipText, QColor(255, 255, 255))
|
||||
palette.setColor(QPalette.ColorRole.Text, QColor(255, 255, 255))
|
||||
palette.setColor(QPalette.ColorRole.Button, QColor(51, 51, 51))
|
||||
palette.setColor(QPalette.ColorRole.ButtonText, QColor(255, 255, 255))
|
||||
palette.setColor(QPalette.ColorRole.BrightText, QColor(255, 0, 0))
|
||||
palette.setColor(QPalette.ColorRole.Link, QColor(0, 120, 215))
|
||||
palette.setColor(QPalette.ColorRole.Highlight, QColor(0, 120, 215))
|
||||
palette.setColor(QPalette.ColorRole.HighlightedText, QColor(255, 255, 255))
|
||||
palette.setColor(QPalette.ColorRole.Mid, QColor(51, 51, 51))
|
||||
palette.setColor(QPalette.ColorRole.Dark, QColor(25, 25, 25))
|
||||
palette.setColor(QPalette.ColorRole.Shadow, QColor(0, 0, 0))
|
||||
palette.setColor(QPalette.ColorRole.Light, QColor(60, 60, 60))
|
||||
palette.setColor(QPalette.ColorRole.Midlight, QColor(55, 55, 55))
|
||||
palette.setColor(QPalette.ColorGroup.Disabled, QPalette.ColorRole.Text, QColor(127, 127, 127))
|
||||
palette.setColor(QPalette.ColorGroup.Disabled, QPalette.ColorRole.ButtonText, QColor(127, 127, 127))
|
||||
app.setPalette(palette)
|
||||
# Flatten Fusion's 3D look
|
||||
app.setStyleSheet(app.styleSheet() + """
|
||||
QPushButton {
|
||||
border: 1px solid #555;
|
||||
border-radius: 2px;
|
||||
padding: 4px 12px;
|
||||
}
|
||||
QPushButton:hover { background-color: #444; }
|
||||
QPushButton:pressed { background-color: #333; }
|
||||
QComboBox {
|
||||
border: 1px solid #555;
|
||||
border-radius: 2px;
|
||||
padding: 3px 6px;
|
||||
}
|
||||
QComboBox::drop-down {
|
||||
border: none;
|
||||
}
|
||||
QSpinBox {
|
||||
border: 1px solid #555;
|
||||
border-radius: 2px;
|
||||
}
|
||||
QLineEdit, QTextEdit {
|
||||
border: 1px solid #555;
|
||||
border-radius: 2px;
|
||||
padding: 3px;
|
||||
color: #fff;
|
||||
background-color: #191919;
|
||||
}
|
||||
QScrollBar:vertical {
|
||||
background: #252525;
|
||||
width: 12px;
|
||||
}
|
||||
QScrollBar::handle:vertical {
|
||||
background: #555;
|
||||
border-radius: 4px;
|
||||
min-height: 20px;
|
||||
}
|
||||
QScrollBar::add-line:vertical, QScrollBar::sub-line:vertical {
|
||||
height: 0;
|
||||
}
|
||||
""")
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
|
||||
|
||||
# Base popout overlay style — always loaded *before* the user QSS so the
|
||||
# floating top toolbar (`#_slideshow_toolbar`) and bottom video controls
|
||||
# (`#_slideshow_controls`) get a sane translucent-black-with-white-text
|
||||
# look on themes that don't define their own overlay rules. Bundled themes
|
||||
# in `themes/` redefine the same selectors with their @palette colors and
|
||||
# win on tie (last rule of equal specificity wins in QSS), so anyone using
|
||||
# a packaged theme keeps the themed overlay; anyone with a stripped-down
|
||||
# custom.qss still gets a usable overlay instead of bare letterbox.
|
||||
_BASE_POPOUT_OVERLAY_QSS = """
|
||||
QWidget#_slideshow_toolbar,
|
||||
QWidget#_slideshow_controls {
|
||||
background: rgba(0, 0, 0, 160);
|
||||
}
|
||||
QWidget#_slideshow_toolbar *,
|
||||
QWidget#_slideshow_controls * {
|
||||
background: transparent;
|
||||
color: white;
|
||||
border: none;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QPushButton,
|
||||
QWidget#_slideshow_controls QPushButton {
|
||||
background: transparent;
|
||||
color: white;
|
||||
border: 1px solid rgba(255, 255, 255, 80);
|
||||
padding: 2px 6px;
|
||||
font-size: 15px;
|
||||
font-weight: bold;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QPushButton:hover,
|
||||
QWidget#_slideshow_controls QPushButton:hover {
|
||||
background: rgba(255, 255, 255, 30);
|
||||
}
|
||||
QWidget#_slideshow_toolbar QSlider::groove:horizontal,
|
||||
QWidget#_slideshow_controls QSlider::groove:horizontal {
|
||||
background: rgba(255, 255, 255, 40);
|
||||
height: 4px;
|
||||
border: none;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QSlider::handle:horizontal,
|
||||
QWidget#_slideshow_controls QSlider::handle:horizontal {
|
||||
background: white;
|
||||
width: 10px;
|
||||
margin: -4px 0;
|
||||
border: none;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QSlider::sub-page:horizontal,
|
||||
QWidget#_slideshow_controls QSlider::sub-page:horizontal {
|
||||
background: white;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QLabel,
|
||||
QWidget#_slideshow_controls QLabel {
|
||||
background: transparent;
|
||||
color: white;
|
||||
}
|
||||
/* Hide the standard icon column on every QMessageBox (question mark,
|
||||
* warning triangle, info circle) so confirm dialogs are text-only. */
|
||||
QMessageBox QLabel#qt_msgboxex_icon_label {
|
||||
image: none;
|
||||
max-width: 0px;
|
||||
max-height: 0px;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def _load_user_qss(path: Path) -> str:
|
||||
"""Load a QSS file with optional @palette variable substitution.
|
||||
|
||||
Qt's QSS dialect has no native variables, so we add a tiny preprocessor:
|
||||
|
||||
/* @palette
|
||||
accent: #cba6f7
|
||||
bg: #1e1e2e
|
||||
text: #cdd6f4
|
||||
*/
|
||||
|
||||
QWidget {
|
||||
background-color: ${bg};
|
||||
color: ${text};
|
||||
selection-background-color: ${accent};
|
||||
}
|
||||
|
||||
The header comment block is parsed for `name: value` pairs and any
|
||||
`${name}` reference elsewhere in the file is substituted with the
|
||||
corresponding value before the QSS is handed to Qt. This lets users
|
||||
recolor a bundled theme by editing the palette block alone, without
|
||||
hunting through the body for every hex literal.
|
||||
|
||||
Backward compatibility: a file without an @palette block is returned
|
||||
as-is, so plain hand-written Qt-standard QSS still loads unchanged.
|
||||
Unknown ${name} references are left in place verbatim and logged as
|
||||
warnings so typos are visible in the log.
|
||||
"""
|
||||
import re
|
||||
text = path.read_text()
|
||||
palette_match = re.search(r'/\*\s*@palette\b(.*?)\*/', text, re.DOTALL)
|
||||
if not palette_match:
|
||||
return text
|
||||
|
||||
palette: dict[str, str] = {}
|
||||
for raw_line in palette_match.group(1).splitlines():
|
||||
# Strip leading whitespace and any leading * from C-style continuation
|
||||
line = raw_line.strip().lstrip('*').strip()
|
||||
if not line or ':' not in line:
|
||||
continue
|
||||
key, value = line.split(':', 1)
|
||||
key = key.strip()
|
||||
value = value.strip().rstrip(';').strip()
|
||||
# Allow trailing comments on the same line
|
||||
if '/*' in value:
|
||||
value = value.split('/*', 1)[0].strip()
|
||||
if key and value:
|
||||
palette[key] = value
|
||||
|
||||
refs = set(re.findall(r'\$\{([a-zA-Z_][a-zA-Z0-9_]*)\}', text))
|
||||
missing = refs - palette.keys()
|
||||
if missing:
|
||||
log.warning(
|
||||
f"QSS @palette: unknown vars {sorted(missing)} in {path.name} "
|
||||
f"— left in place verbatim, fix the @palette block to define them"
|
||||
)
|
||||
|
||||
def replace(m):
|
||||
return palette.get(m.group(1), m.group(0))
|
||||
|
||||
return re.sub(r'\$\{([a-zA-Z_][a-zA-Z0-9_]*)\}', replace, text)
|
||||
|
||||
|
||||
def run() -> None:
|
||||
from ..core.config import data_dir
|
||||
|
||||
app = QApplication(sys.argv)
|
||||
|
||||
# Set a stable Wayland app_id so Hyprland and other compositors can
|
||||
# consistently identify our windows by class (not by title, which
|
||||
# changes when search terms appear in the title bar). Qt translates
|
||||
# setDesktopFileName into the xdg-shell app_id on Wayland.
|
||||
app.setApplicationName("booru-viewer")
|
||||
app.setDesktopFileName("booru-viewer")
|
||||
|
||||
# mpv requires LC_NUMERIC=C — Qt resets the locale in QApplication(),
|
||||
# so we must restore it after Qt init but before creating any mpv instances.
|
||||
import locale
|
||||
locale.setlocale(locale.LC_NUMERIC, "C")
|
||||
|
||||
# Apply dark mode on Windows 10+ if system is set to dark
|
||||
if sys.platform == "win32":
|
||||
_apply_windows_dark_mode(app)
|
||||
|
||||
# Load user custom stylesheet if it exists
|
||||
custom_css = data_dir() / "custom.qss"
|
||||
if custom_css.exists():
|
||||
try:
|
||||
# Use Fusion style with arrow color fix
|
||||
from PySide6.QtWidgets import QProxyStyle
|
||||
from PySide6.QtGui import QPalette, QColor, QPainter as _P
|
||||
from PySide6.QtCore import QPoint as _QP
|
||||
|
||||
import re
|
||||
# Run through the @palette preprocessor (see _load_user_qss
|
||||
# for the dialect). Plain Qt-standard QSS files without an
|
||||
# @palette block are returned unchanged.
|
||||
css_text = _load_user_qss(custom_css)
|
||||
|
||||
# Extract text color for arrows
|
||||
m = re.search(r'QWidget\s*\{[^}]*?(?:^|\s)color\s*:\s*(#[0-9a-fA-F]{3,8})', css_text, re.MULTILINE)
|
||||
arrow_color = QColor(m.group(1)) if m else QColor(200, 200, 200)
|
||||
|
||||
class _DarkArrowStyle(QProxyStyle):
|
||||
"""Fusion proxy that draws visible arrows on dark themes."""
|
||||
def drawPrimitive(self, element, option, painter, widget=None):
|
||||
if element in (self.PrimitiveElement.PE_IndicatorSpinUp,
|
||||
self.PrimitiveElement.PE_IndicatorSpinDown,
|
||||
self.PrimitiveElement.PE_IndicatorArrowDown,
|
||||
self.PrimitiveElement.PE_IndicatorArrowUp):
|
||||
painter.save()
|
||||
painter.setRenderHint(_P.RenderHint.Antialiasing)
|
||||
painter.setPen(Qt.PenStyle.NoPen)
|
||||
painter.setBrush(arrow_color)
|
||||
r = option.rect
|
||||
cx, cy = r.center().x(), r.center().y()
|
||||
s = min(r.width(), r.height()) // 3
|
||||
from PySide6.QtGui import QPolygon
|
||||
if element in (self.PrimitiveElement.PE_IndicatorSpinUp,
|
||||
self.PrimitiveElement.PE_IndicatorArrowUp):
|
||||
painter.drawPolygon(QPolygon([
|
||||
_QP(cx, cy - s), _QP(cx - s, cy + s), _QP(cx + s, cy + s)
|
||||
]))
|
||||
else:
|
||||
painter.drawPolygon(QPolygon([
|
||||
_QP(cx - s, cy - s), _QP(cx + s, cy - s), _QP(cx, cy + s)
|
||||
]))
|
||||
painter.restore()
|
||||
return
|
||||
super().drawPrimitive(element, option, painter, widget)
|
||||
|
||||
app.setStyle(_DarkArrowStyle("Fusion"))
|
||||
# Prepend the base overlay defaults so even minimal custom.qss
|
||||
# files get a usable popout overlay. User rules with the same
|
||||
# selectors come last and win on tie.
|
||||
app.setStyleSheet(_BASE_POPOUT_OVERLAY_QSS + "\n" + css_text)
|
||||
|
||||
# Extract selection color for grid highlight
|
||||
pal = app.palette()
|
||||
m = re.search(r'selection-background-color\s*:\s*(#[0-9a-fA-F]{3,8})', css_text)
|
||||
if m:
|
||||
pal.setColor(QPalette.ColorRole.Highlight, QColor(m.group(1)))
|
||||
app.setPalette(pal)
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
else:
|
||||
# No custom.qss — force Fusion widgets so distro pyside6 builds linked
|
||||
# against system Qt don't pick up Breeze (or whatever the platform
|
||||
# theme plugin supplies) and diverge from the bundled-Qt look that
|
||||
# source-from-pip users get.
|
||||
app.setStyle("Fusion")
|
||||
# If no system theme is detected, apply a dark Fusion palette so
|
||||
# fresh installs don't land on blinding white. KDE/GNOME users
|
||||
# keep their palette (dark or light) — we only intervene when
|
||||
# Qt is running on its built-in defaults with no Trolltech.conf.
|
||||
from PySide6.QtGui import QPalette, QColor
|
||||
pal = app.palette()
|
||||
_has_system_theme = Path("~/.config/Trolltech.conf").expanduser().exists()
|
||||
if not _has_system_theme and pal.color(QPalette.ColorRole.Window).lightness() > 128:
|
||||
dark = QPalette()
|
||||
dark.setColor(QPalette.ColorRole.Window, QColor("#2b2b2b"))
|
||||
dark.setColor(QPalette.ColorRole.WindowText, QColor("#d4d4d4"))
|
||||
dark.setColor(QPalette.ColorRole.Base, QColor("#232323"))
|
||||
dark.setColor(QPalette.ColorRole.AlternateBase, QColor("#2b2b2b"))
|
||||
dark.setColor(QPalette.ColorRole.Text, QColor("#d4d4d4"))
|
||||
dark.setColor(QPalette.ColorRole.Button, QColor("#353535"))
|
||||
dark.setColor(QPalette.ColorRole.ButtonText, QColor("#d4d4d4"))
|
||||
dark.setColor(QPalette.ColorRole.BrightText, QColor("#ff4444"))
|
||||
dark.setColor(QPalette.ColorRole.Highlight, QColor("#3daee9"))
|
||||
dark.setColor(QPalette.ColorRole.HighlightedText, QColor("#1e1e1e"))
|
||||
dark.setColor(QPalette.ColorRole.ToolTipBase, QColor("#353535"))
|
||||
dark.setColor(QPalette.ColorRole.ToolTipText, QColor("#d4d4d4"))
|
||||
dark.setColor(QPalette.ColorRole.PlaceholderText, QColor("#7a7a7a"))
|
||||
dark.setColor(QPalette.ColorRole.Link, QColor("#3daee9"))
|
||||
app.setPalette(dark)
|
||||
# Install the popout overlay defaults so the floating toolbar/controls
|
||||
# have a sane background instead of bare letterbox color.
|
||||
app.setStyleSheet(_BASE_POPOUT_OVERLAY_QSS)
|
||||
|
||||
# Set app icon (works in taskbar on all platforms)
|
||||
from PySide6.QtGui import QIcon
|
||||
# PyInstaller sets _MEIPASS for bundled data
|
||||
base_dir = Path(getattr(sys, '_MEIPASS', Path(__file__).parent.parent.parent))
|
||||
icon_path = base_dir / "icon.png"
|
||||
if not icon_path.exists():
|
||||
icon_path = Path(__file__).parent.parent.parent / "icon.png"
|
||||
if not icon_path.exists():
|
||||
icon_path = data_dir() / "icon.png"
|
||||
if icon_path.exists():
|
||||
app.setWindowIcon(QIcon(str(icon_path)))
|
||||
|
||||
window = BooruApp()
|
||||
window.show()
|
||||
sys.exit(app.exec())
|
||||
30
booru_viewer/gui/async_signals.py
Normal file
30
booru_viewer/gui/async_signals.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""Qt signal hub for async worker results."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from PySide6.QtCore import QObject, Signal
|
||||
|
||||
|
||||
class AsyncSignals(QObject):
|
||||
"""Signals for async worker results."""
|
||||
search_done = Signal(list)
|
||||
search_append = Signal(list)
|
||||
search_error = Signal(str)
|
||||
thumb_done = Signal(int, str)
|
||||
image_done = Signal(str, str)
|
||||
image_error = Signal(str)
|
||||
# Fast-path for uncached video posts: emit the remote URL directly
|
||||
# so mpv can start streaming + decoding immediately instead of
|
||||
# waiting for download_image to write the whole file to disk first.
|
||||
# download_image still runs in parallel to populate the cache for
|
||||
# next time. Args: (url, info, width, height) — width/height come
|
||||
# from post.width/post.height for the popout pre-fit optimization.
|
||||
video_stream = Signal(str, str, int, int)
|
||||
bookmark_done = Signal(int, str)
|
||||
bookmark_error = Signal(str)
|
||||
autocomplete_done = Signal(list)
|
||||
batch_progress = Signal(int, int, int) # current, total, post_id (of the just-finished item)
|
||||
batch_done = Signal(str)
|
||||
download_progress = Signal(int, int) # bytes_downloaded, total_bytes
|
||||
prefetch_progress = Signal(int, float) # index, progress (0-1 or -1 to hide)
|
||||
categories_updated = Signal(object) # Post whose tag_categories just got populated
|
||||
@ -3,9 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Callable, TYPE_CHECKING
|
||||
|
||||
from PySide6.QtCore import Qt, Signal, QObject, QTimer
|
||||
from PySide6.QtGui import QPixmap
|
||||
@ -24,14 +23,20 @@ from PySide6.QtWidgets import (
|
||||
)
|
||||
|
||||
from ..core.db import Database, Bookmark
|
||||
from ..core.api.base import Post
|
||||
from ..core.cache import download_thumbnail
|
||||
from ..core.concurrency import run_on_app_loop
|
||||
from .grid import ThumbnailGrid
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..core.api.category_fetcher import CategoryFetcher
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
class BookmarkThumbSignals(QObject):
|
||||
thumb_ready = Signal(int, str)
|
||||
save_done = Signal(int) # post_id
|
||||
|
||||
|
||||
class BookmarksView(QWidget):
|
||||
@ -42,12 +47,23 @@ class BookmarksView(QWidget):
|
||||
bookmarks_changed = Signal() # emitted after bookmark add/remove/unsave
|
||||
open_in_browser_requested = Signal(int, int) # (site_id, post_id)
|
||||
|
||||
def __init__(self, db: Database, parent: QWidget | None = None) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
db: Database,
|
||||
category_fetcher_factory: Callable[[], "CategoryFetcher | None"],
|
||||
parent: QWidget | None = None,
|
||||
) -> None:
|
||||
super().__init__(parent)
|
||||
self._db = db
|
||||
# Factory returns the fetcher for the currently-active site, or
|
||||
# None when the site categorises tags inline (Danbooru, e621).
|
||||
# Called at save time so a site switch between BookmarksView
|
||||
# construction and a save picks up the new site's fetcher.
|
||||
self._category_fetcher_factory = category_fetcher_factory
|
||||
self._bookmarks: list[Bookmark] = []
|
||||
self._signals = BookmarkThumbSignals()
|
||||
self._signals.thumb_ready.connect(self._on_thumb_ready, Qt.ConnectionType.QueuedConnection)
|
||||
self._signals.save_done.connect(self._on_save_done, Qt.ConnectionType.QueuedConnection)
|
||||
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
@ -72,12 +88,28 @@ class BookmarksView(QWidget):
|
||||
top.addWidget(self._folder_combo)
|
||||
|
||||
manage_btn = QPushButton("+ Folder")
|
||||
manage_btn.setToolTip("New folder")
|
||||
manage_btn.setToolTip("New bookmark folder")
|
||||
manage_btn.setFixedWidth(75)
|
||||
manage_btn.setStyleSheet(_btn_style)
|
||||
manage_btn.clicked.connect(self._new_folder)
|
||||
top.addWidget(manage_btn)
|
||||
|
||||
# Delete the currently-selected bookmark folder. Disabled when
|
||||
# the combo is on a virtual entry (All Bookmarks / Unfiled).
|
||||
# This only removes the DB row — bookmarks in that folder become
|
||||
# Unfiled (per remove_folder's UPDATE … SET folder = NULL). The
|
||||
# library filesystem is untouched: bookmark folders and library
|
||||
# folders are independent name spaces.
|
||||
self._delete_folder_btn = QPushButton("− Folder")
|
||||
self._delete_folder_btn.setToolTip("Delete the selected bookmark folder")
|
||||
self._delete_folder_btn.setFixedWidth(75)
|
||||
self._delete_folder_btn.setStyleSheet(_btn_style)
|
||||
self._delete_folder_btn.clicked.connect(self._delete_folder)
|
||||
top.addWidget(self._delete_folder_btn)
|
||||
self._folder_combo.currentTextChanged.connect(
|
||||
self._update_delete_folder_enabled
|
||||
)
|
||||
|
||||
self._search_input = QLineEdit()
|
||||
self._search_input.setPlaceholderText("Search bookmarks by tag")
|
||||
# Enter still triggers an immediate search.
|
||||
@ -121,6 +153,39 @@ class BookmarksView(QWidget):
|
||||
if idx >= 0:
|
||||
self._folder_combo.setCurrentIndex(idx)
|
||||
self._folder_combo.blockSignals(False)
|
||||
self._update_delete_folder_enabled()
|
||||
|
||||
def _update_delete_folder_enabled(self, *_args) -> None:
|
||||
"""Enable the delete-folder button only on real folder rows."""
|
||||
text = self._folder_combo.currentText()
|
||||
self._delete_folder_btn.setEnabled(text not in ("", "All Bookmarks", "Unfiled"))
|
||||
|
||||
def _delete_folder(self) -> None:
|
||||
"""Delete the currently-selected bookmark folder.
|
||||
|
||||
Bookmarks filed under it become Unfiled (remove_folder UPDATEs
|
||||
favorites.folder = NULL before DELETE FROM favorite_folders).
|
||||
Library files on disk are unaffected — bookmark folders and
|
||||
library folders are separate concepts after the decoupling.
|
||||
"""
|
||||
name = self._folder_combo.currentText()
|
||||
if name in ("", "All Bookmarks", "Unfiled"):
|
||||
return
|
||||
reply = QMessageBox.question(
|
||||
self,
|
||||
"Delete Bookmark Folder",
|
||||
f"Delete bookmark folder '{name}'?\n\n"
|
||||
f"Bookmarks in this folder will become Unfiled. "
|
||||
f"Library files on disk are not affected.",
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
)
|
||||
if reply != QMessageBox.StandardButton.Yes:
|
||||
return
|
||||
self._db.remove_folder(name)
|
||||
# Drop back to All Bookmarks so the now-orphan filter doesn't
|
||||
# leave the combo on a missing row.
|
||||
self._folder_combo.setCurrentText("All Bookmarks")
|
||||
self.refresh()
|
||||
|
||||
def refresh(self, search: str | None = None) -> None:
|
||||
self._refresh_folders()
|
||||
@ -146,22 +211,16 @@ class BookmarksView(QWidget):
|
||||
self._count_label.setText(f"{len(self._bookmarks)} bookmarks")
|
||||
thumbs = self._grid.set_posts(len(self._bookmarks))
|
||||
|
||||
from ..core.config import saved_dir, saved_folder_dir, MEDIA_EXTENSIONS
|
||||
# Batch the "is this saved?" check via library_meta. One indexed
|
||||
# query gives us a set of every saved post_id, then per-thumb
|
||||
# membership is O(1). Format-agnostic — works for digit-stem
|
||||
# legacy files AND templated post-refactor saves, where the
|
||||
# old find_library_files(post_id)+digit-stem check silently
|
||||
# failed because the on-disk basename no longer matches the id.
|
||||
saved_ids = self._db.get_saved_post_ids()
|
||||
for i, (fav, thumb) in enumerate(zip(self._bookmarks, thumbs)):
|
||||
thumb.set_bookmarked(True)
|
||||
# Check if saved to library
|
||||
saved = False
|
||||
if fav.folder:
|
||||
saved = any(
|
||||
(saved_folder_dir(fav.folder) / f"{fav.post_id}{ext}").exists()
|
||||
for ext in MEDIA_EXTENSIONS
|
||||
)
|
||||
else:
|
||||
saved = any(
|
||||
(saved_dir() / f"{fav.post_id}{ext}").exists()
|
||||
for ext in MEDIA_EXTENSIONS
|
||||
)
|
||||
thumb.set_saved_locally(saved)
|
||||
thumb.set_saved_locally(fav.post_id in saved_ids)
|
||||
# Set cached path for drag-and-drop and copy
|
||||
if fav.cached_path and Path(fav.cached_path).exists():
|
||||
thumb._cached_path = fav.cached_path
|
||||
@ -170,23 +229,35 @@ class BookmarksView(QWidget):
|
||||
elif fav.cached_path and Path(fav.cached_path).exists():
|
||||
pix = QPixmap(fav.cached_path)
|
||||
if not pix.isNull():
|
||||
thumb.set_pixmap(pix)
|
||||
thumb.set_pixmap(pix, fav.cached_path)
|
||||
|
||||
def _load_thumb_async(self, index: int, url: str) -> None:
|
||||
# Schedule the download on the persistent event loop instead of
|
||||
# spawning a daemon thread that runs its own throwaway loop. This
|
||||
# is the fix for the loop-affinity bug where the cache module's
|
||||
# shared httpx client would get bound to the throwaway loop and
|
||||
# then fail every subsequent use from the persistent loop.
|
||||
async def _dl():
|
||||
try:
|
||||
path = await download_thumbnail(url)
|
||||
self._signals.thumb_ready.emit(index, str(path))
|
||||
except Exception as e:
|
||||
log.warning(f"Bookmark thumb {index} failed: {e}")
|
||||
threading.Thread(target=lambda: asyncio.run(_dl()), daemon=True).start()
|
||||
run_on_app_loop(_dl())
|
||||
|
||||
def _on_thumb_ready(self, index: int, path: str) -> None:
|
||||
thumbs = self._grid._thumbs
|
||||
if 0 <= index < len(thumbs):
|
||||
pix = QPixmap(path)
|
||||
if not pix.isNull():
|
||||
thumbs[index].set_pixmap(pix)
|
||||
thumbs[index].set_pixmap(pix, path)
|
||||
|
||||
def _on_save_done(self, post_id: int) -> None:
|
||||
"""Light the saved-locally dot on the thumbnail for post_id."""
|
||||
for i, fav in enumerate(self._bookmarks):
|
||||
if fav.post_id == post_id and i < len(self._grid._thumbs):
|
||||
self._grid._thumbs[i].set_saved_locally(True)
|
||||
break
|
||||
|
||||
def _do_search(self) -> None:
|
||||
text = self._search_input.text().strip()
|
||||
@ -200,30 +271,75 @@ class BookmarksView(QWidget):
|
||||
if 0 <= index < len(self._bookmarks):
|
||||
self.bookmark_activated.emit(self._bookmarks[index])
|
||||
|
||||
def _bookmark_to_post(self, fav: Bookmark) -> Post:
|
||||
"""Adapt a Bookmark into a Post for the renderer / save flow.
|
||||
|
||||
The unified save_post_file flow takes a Post (because it's
|
||||
called from the browse side too), so bookmarks borrow Post
|
||||
shape just for the duration of the save call. Bookmark already
|
||||
carries every field the renderer reads — this adapter is the
|
||||
one place to update if Post's field set drifts later.
|
||||
"""
|
||||
return Post(
|
||||
id=fav.post_id,
|
||||
file_url=fav.file_url,
|
||||
preview_url=fav.preview_url,
|
||||
tags=fav.tags,
|
||||
score=fav.score or 0,
|
||||
rating=fav.rating,
|
||||
source=fav.source,
|
||||
tag_categories=fav.tag_categories or {},
|
||||
)
|
||||
|
||||
def _save_bookmark_to_library(self, fav: Bookmark, folder: str | None) -> None:
|
||||
"""Copy a bookmarked image into the library, optionally inside
|
||||
a subfolder, routing through the unified save_post_file flow.
|
||||
|
||||
Fixes the latent v0.2.3 bug where bookmark→library copies
|
||||
wrote files but never registered library_meta rows — those
|
||||
files were on disk but invisible to Library tag-search."""
|
||||
from ..core.config import saved_dir, saved_folder_dir
|
||||
from ..core.library_save import save_post_file
|
||||
|
||||
if not (fav.cached_path and Path(fav.cached_path).exists()):
|
||||
return
|
||||
try:
|
||||
dest_dir = saved_folder_dir(folder) if folder else saved_dir()
|
||||
except ValueError:
|
||||
return
|
||||
src = Path(fav.cached_path)
|
||||
post = self._bookmark_to_post(fav)
|
||||
|
||||
fetcher = self._category_fetcher_factory()
|
||||
|
||||
async def _do():
|
||||
try:
|
||||
await save_post_file(
|
||||
src, post, dest_dir, self._db,
|
||||
category_fetcher=fetcher,
|
||||
)
|
||||
self._signals.save_done.emit(fav.post_id)
|
||||
except Exception as e:
|
||||
log.warning(f"Bookmark→library save #{fav.post_id} failed: {e}")
|
||||
|
||||
run_on_app_loop(_do())
|
||||
|
||||
def _copy_to_library_unsorted(self, fav: Bookmark) -> None:
|
||||
"""Copy a bookmarked image to the unsorted library folder."""
|
||||
from ..core.config import saved_dir
|
||||
if fav.cached_path and Path(fav.cached_path).exists():
|
||||
import shutil
|
||||
src = Path(fav.cached_path)
|
||||
dest = saved_dir() / f"{fav.post_id}{src.suffix}"
|
||||
if not dest.exists():
|
||||
shutil.copy2(src, dest)
|
||||
self._save_bookmark_to_library(fav, None)
|
||||
|
||||
def _copy_to_library(self, fav: Bookmark, folder: str) -> None:
|
||||
"""Copy a bookmarked image to the library folder on disk."""
|
||||
from ..core.config import saved_folder_dir
|
||||
if fav.cached_path and Path(fav.cached_path).exists():
|
||||
import shutil
|
||||
src = Path(fav.cached_path)
|
||||
dest = saved_folder_dir(folder) / f"{fav.post_id}{src.suffix}"
|
||||
if not dest.exists():
|
||||
shutil.copy2(src, dest)
|
||||
"""Copy a bookmarked image to the named library subfolder."""
|
||||
self._save_bookmark_to_library(fav, folder)
|
||||
|
||||
def _new_folder(self) -> None:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self._db.add_folder(name.strip())
|
||||
try:
|
||||
self._db.add_folder(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||
return
|
||||
self._refresh_folders()
|
||||
|
||||
def _on_context_menu(self, index: int, pos) -> None:
|
||||
@ -242,32 +358,25 @@ class BookmarksView(QWidget):
|
||||
menu.addSeparator()
|
||||
save_as = menu.addAction("Save As...")
|
||||
|
||||
# Save to Library submenu
|
||||
save_lib_menu = menu.addMenu("Save to Library")
|
||||
save_lib_unsorted = save_lib_menu.addAction("Unsorted")
|
||||
save_lib_menu.addSeparator()
|
||||
# Save to Library / Unsave — mutually exclusive based on
|
||||
# whether the post is already in the library.
|
||||
from ..core.config import library_folders
|
||||
save_lib_menu = None
|
||||
save_lib_unsorted = None
|
||||
save_lib_new = None
|
||||
save_lib_folders = {}
|
||||
for folder in self._db.get_folders():
|
||||
a = save_lib_menu.addAction(folder)
|
||||
save_lib_folders[id(a)] = folder
|
||||
save_lib_menu.addSeparator()
|
||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||
|
||||
unsave_lib = None
|
||||
# Only show unsave if the post is saved locally
|
||||
from ..core.config import saved_dir, saved_folder_dir, MEDIA_EXTENSIONS
|
||||
_saved = False
|
||||
_sd = saved_dir()
|
||||
if _sd.exists():
|
||||
_saved = any((_sd / f"{fav.post_id}{ext}").exists() for ext in MEDIA_EXTENSIONS)
|
||||
if not _saved:
|
||||
for folder in self._db.get_folders():
|
||||
d = saved_folder_dir(folder)
|
||||
if d.exists() and any((d / f"{fav.post_id}{ext}").exists() for ext in MEDIA_EXTENSIONS):
|
||||
_saved = True
|
||||
break
|
||||
if _saved:
|
||||
if self._db.is_post_in_library(fav.post_id):
|
||||
unsave_lib = menu.addAction("Unsave from Library")
|
||||
else:
|
||||
save_lib_menu = menu.addMenu("Save to Library")
|
||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||
save_lib_menu.addSeparator()
|
||||
for folder in library_folders():
|
||||
a = save_lib_menu.addAction(folder)
|
||||
save_lib_folders[id(a)] = folder
|
||||
save_lib_menu.addSeparator()
|
||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||
copy_file = menu.addAction("Copy File to Clipboard")
|
||||
copy_url = menu.addAction("Copy Image URL")
|
||||
copy_tags = menu.addAction("Copy Tags")
|
||||
@ -293,18 +402,19 @@ class BookmarksView(QWidget):
|
||||
|
||||
if action == save_lib_unsorted:
|
||||
self._copy_to_library_unsorted(fav)
|
||||
self.refresh()
|
||||
elif action == save_lib_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self._db.add_folder(name.strip())
|
||||
try:
|
||||
from ..core.config import saved_folder_dir
|
||||
saved_folder_dir(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||
return
|
||||
self._copy_to_library(fav, name.strip())
|
||||
self._db.move_bookmark_to_folder(fav.id, name.strip())
|
||||
self.refresh()
|
||||
elif id(action) in save_lib_folders:
|
||||
folder_name = save_lib_folders[id(action)]
|
||||
self._copy_to_library(fav, folder_name)
|
||||
self.refresh()
|
||||
elif action == open_browser:
|
||||
self.open_in_browser_requested.emit(fav.site_id, fav.post_id)
|
||||
elif action == open_default:
|
||||
@ -312,16 +422,36 @@ class BookmarksView(QWidget):
|
||||
QDesktopServices.openUrl(QUrl.fromLocalFile(fav.cached_path))
|
||||
elif action == save_as:
|
||||
if fav.cached_path and Path(fav.cached_path).exists():
|
||||
from ..core.config import render_filename_template
|
||||
from ..core.library_save import save_post_file
|
||||
src = Path(fav.cached_path)
|
||||
dest = save_file(self, "Save Image", f"post_{fav.post_id}{src.suffix}", f"Images (*{src.suffix})")
|
||||
post = self._bookmark_to_post(fav)
|
||||
template = self._db.get_setting("library_filename_template")
|
||||
default_name = render_filename_template(template, post, src.suffix)
|
||||
dest = save_file(self, "Save Image", default_name, f"Images (*{src.suffix})")
|
||||
if dest:
|
||||
import shutil
|
||||
shutil.copy2(src, dest)
|
||||
dest_path = Path(dest)
|
||||
fetcher = self._category_fetcher_factory()
|
||||
|
||||
async def _do_save_as():
|
||||
try:
|
||||
await save_post_file(
|
||||
src, post, dest_path.parent, self._db,
|
||||
explicit_name=dest_path.name,
|
||||
category_fetcher=fetcher,
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning(f"Bookmark Save As #{fav.post_id} failed: {e}")
|
||||
|
||||
run_on_app_loop(_do_save_as())
|
||||
elif action == unsave_lib:
|
||||
from ..core.cache import delete_from_library
|
||||
if delete_from_library(fav.post_id, fav.folder):
|
||||
self.refresh()
|
||||
self.bookmarks_changed.emit()
|
||||
delete_from_library(fav.post_id, db=self._db)
|
||||
for i, f in enumerate(self._bookmarks):
|
||||
if f.post_id == fav.post_id and i < len(self._grid._thumbs):
|
||||
self._grid._thumbs[i].set_saved_locally(False)
|
||||
break
|
||||
self.bookmarks_changed.emit()
|
||||
elif action == copy_file:
|
||||
path = fav.cached_path
|
||||
if path and Path(path).exists():
|
||||
@ -343,14 +473,19 @@ class BookmarksView(QWidget):
|
||||
elif action == move_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self._db.add_folder(name.strip())
|
||||
try:
|
||||
self._db.add_folder(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||
return
|
||||
# Pure bookmark organization: file the bookmark, don't
|
||||
# touch the library filesystem. Save to Library is now a
|
||||
# separate, explicit action.
|
||||
self._db.move_bookmark_to_folder(fav.id, name.strip())
|
||||
self._copy_to_library(fav, name.strip())
|
||||
self.refresh()
|
||||
elif id(action) in folder_actions:
|
||||
folder_name = folder_actions[id(action)]
|
||||
self._db.move_bookmark_to_folder(fav.id, folder_name)
|
||||
self._copy_to_library(fav, folder_name)
|
||||
self.refresh()
|
||||
elif action == remove_bookmark:
|
||||
self._db.remove_bookmark(fav.site_id, fav.post_id)
|
||||
@ -362,11 +497,32 @@ class BookmarksView(QWidget):
|
||||
if not favs:
|
||||
return
|
||||
|
||||
from ..core.config import library_folders
|
||||
|
||||
menu = QMenu(self)
|
||||
save_all = menu.addAction(f"Save All ({len(favs)}) to Library")
|
||||
unsave_all = menu.addAction(f"Unsave All ({len(favs)}) from Library")
|
||||
|
||||
any_unsaved = any(not self._db.is_post_in_library(f.post_id) for f in favs)
|
||||
any_saved = any(self._db.is_post_in_library(f.post_id) for f in favs)
|
||||
|
||||
save_lib_menu = None
|
||||
save_lib_unsorted = None
|
||||
save_lib_new = None
|
||||
save_lib_folder_actions: dict[int, str] = {}
|
||||
unsave_all = None
|
||||
if any_unsaved:
|
||||
save_lib_menu = menu.addMenu(f"Save All ({len(favs)}) to Library")
|
||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||
save_lib_menu.addSeparator()
|
||||
for folder in library_folders():
|
||||
a = save_lib_menu.addAction(folder)
|
||||
save_lib_folder_actions[id(a)] = folder
|
||||
save_lib_menu.addSeparator()
|
||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||
if any_saved:
|
||||
unsave_all = menu.addAction(f"Unsave All ({len(favs)}) from Library")
|
||||
menu.addSeparator()
|
||||
|
||||
# Move to Folder is bookmark organization — reads from the DB.
|
||||
move_menu = menu.addMenu(f"Move All ({len(favs)}) to Folder")
|
||||
move_none = move_menu.addAction("Unfiled")
|
||||
move_menu.addSeparator()
|
||||
@ -382,18 +538,36 @@ class BookmarksView(QWidget):
|
||||
if not action:
|
||||
return
|
||||
|
||||
if action == save_all:
|
||||
def _save_all_into(folder_name: str | None) -> None:
|
||||
for fav in favs:
|
||||
if fav.folder:
|
||||
self._copy_to_library(fav, fav.folder)
|
||||
if folder_name:
|
||||
self._copy_to_library(fav, folder_name)
|
||||
else:
|
||||
self._copy_to_library_unsorted(fav)
|
||||
self.refresh()
|
||||
|
||||
if action == save_lib_unsorted:
|
||||
_save_all_into(None)
|
||||
elif action == save_lib_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
try:
|
||||
from ..core.config import saved_folder_dir
|
||||
saved_folder_dir(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||
return
|
||||
_save_all_into(name.strip())
|
||||
elif id(action) in save_lib_folder_actions:
|
||||
_save_all_into(save_lib_folder_actions[id(action)])
|
||||
elif action == unsave_all:
|
||||
from ..core.cache import delete_from_library
|
||||
unsaved_ids = set()
|
||||
for fav in favs:
|
||||
delete_from_library(fav.post_id, fav.folder)
|
||||
self.refresh()
|
||||
delete_from_library(fav.post_id, db=self._db)
|
||||
unsaved_ids.add(fav.post_id)
|
||||
for i, fav in enumerate(self._bookmarks):
|
||||
if fav.post_id in unsaved_ids and i < len(self._grid._thumbs):
|
||||
self._grid._thumbs[i].set_saved_locally(False)
|
||||
self.bookmarks_changed.emit()
|
||||
elif action == move_none:
|
||||
for fav in favs:
|
||||
@ -401,9 +575,9 @@ class BookmarksView(QWidget):
|
||||
self.refresh()
|
||||
elif id(action) in folder_actions:
|
||||
folder_name = folder_actions[id(action)]
|
||||
# Bookmark organization only — Save to Library is separate.
|
||||
for fav in favs:
|
||||
self._db.move_bookmark_to_folder(fav.id, folder_name)
|
||||
self._copy_to_library(fav, folder_name)
|
||||
self.refresh()
|
||||
elif action == remove_all:
|
||||
for fav in favs:
|
||||
|
||||
248
booru_viewer/gui/context_menus.py
Normal file
248
booru_viewer/gui/context_menus.py
Normal file
@ -0,0 +1,248 @@
|
||||
"""Single-post and multi-select right-click context menus."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from PySide6.QtWidgets import QApplication, QMenu
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
|
||||
class ContextMenuHandler:
|
||||
"""Builds and dispatches context menus for the thumbnail grid."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
|
||||
@staticmethod
|
||||
def _is_child_of_menu(action, menu) -> bool:
|
||||
parent = action.parent()
|
||||
while parent:
|
||||
if parent == menu:
|
||||
return True
|
||||
parent = getattr(parent, 'parent', lambda: None)()
|
||||
return False
|
||||
|
||||
def show_single(self, index: int, pos) -> None:
|
||||
if index < 0 or index >= len(self._app._posts):
|
||||
return
|
||||
post = self._app._posts[index]
|
||||
menu = QMenu(self._app)
|
||||
|
||||
open_browser = menu.addAction("Open in Browser")
|
||||
open_default = menu.addAction("Open in Default App")
|
||||
menu.addSeparator()
|
||||
save_as = menu.addAction("Save As...")
|
||||
|
||||
from ..core.config import library_folders
|
||||
save_lib_menu = None
|
||||
save_lib_unsorted = None
|
||||
save_lib_new = None
|
||||
save_lib_folders = {}
|
||||
unsave_lib = None
|
||||
if self._app._post_actions.is_post_saved(post.id):
|
||||
unsave_lib = menu.addAction("Unsave from Library")
|
||||
else:
|
||||
save_lib_menu = menu.addMenu("Save to Library")
|
||||
save_lib_unsorted = save_lib_menu.addAction("Unfiled")
|
||||
save_lib_menu.addSeparator()
|
||||
for folder in library_folders():
|
||||
a = save_lib_menu.addAction(folder)
|
||||
save_lib_folders[id(a)] = folder
|
||||
save_lib_menu.addSeparator()
|
||||
save_lib_new = save_lib_menu.addAction("+ New Folder...")
|
||||
copy_clipboard = menu.addAction("Copy File to Clipboard")
|
||||
copy_url = menu.addAction("Copy Image URL")
|
||||
copy_tags = menu.addAction("Copy Tags")
|
||||
menu.addSeparator()
|
||||
|
||||
fav_action = None
|
||||
bm_folder_actions: dict[int, str] = {}
|
||||
bm_unfiled = None
|
||||
bm_new = None
|
||||
if self._app._post_actions.is_current_bookmarked(index):
|
||||
fav_action = menu.addAction("Remove Bookmark")
|
||||
else:
|
||||
fav_menu = menu.addMenu("Bookmark as")
|
||||
bm_unfiled = fav_menu.addAction("Unfiled")
|
||||
fav_menu.addSeparator()
|
||||
for folder in self._app._db.get_folders():
|
||||
a = fav_menu.addAction(folder)
|
||||
bm_folder_actions[id(a)] = folder
|
||||
fav_menu.addSeparator()
|
||||
bm_new = fav_menu.addAction("+ New Folder...")
|
||||
menu.addSeparator()
|
||||
bl_menu = menu.addMenu("Blacklist Tag")
|
||||
if post.tag_categories:
|
||||
for category, tags in post.tag_categories.items():
|
||||
cat_menu = bl_menu.addMenu(category)
|
||||
for tag in tags[:30]:
|
||||
cat_menu.addAction(tag)
|
||||
else:
|
||||
for tag in post.tag_list[:30]:
|
||||
bl_menu.addAction(tag)
|
||||
bl_post_action = menu.addAction("Blacklist Post")
|
||||
|
||||
action = menu.exec(pos)
|
||||
if not action:
|
||||
return
|
||||
|
||||
if action == open_browser:
|
||||
self._app._open_in_browser(post)
|
||||
elif action == open_default:
|
||||
self._app._open_in_default(post)
|
||||
elif action == save_as:
|
||||
self._app._post_actions.save_as(post)
|
||||
elif action == save_lib_unsorted:
|
||||
self._app._post_actions.save_to_library(post, None)
|
||||
elif action == save_lib_new:
|
||||
from PySide6.QtWidgets import QInputDialog, QMessageBox
|
||||
name, ok = QInputDialog.getText(self._app, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
try:
|
||||
from ..core.config import saved_folder_dir
|
||||
saved_folder_dir(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self._app, "Invalid Folder Name", str(e))
|
||||
return
|
||||
self._app._post_actions.save_to_library(post, name.strip())
|
||||
elif id(action) in save_lib_folders:
|
||||
self._app._post_actions.save_to_library(post, save_lib_folders[id(action)])
|
||||
elif action == unsave_lib:
|
||||
self._app._post_actions.unsave_from_preview()
|
||||
elif action == copy_clipboard:
|
||||
self._app._copy_file_to_clipboard()
|
||||
elif action == copy_url:
|
||||
QApplication.clipboard().setText(post.file_url)
|
||||
self._app._status.showMessage("URL copied")
|
||||
elif action == copy_tags:
|
||||
QApplication.clipboard().setText(post.tags)
|
||||
self._app._status.showMessage("Tags copied")
|
||||
elif fav_action is not None and action == fav_action:
|
||||
self._app._post_actions.toggle_bookmark(index)
|
||||
elif bm_unfiled is not None and action == bm_unfiled:
|
||||
self._app._post_actions.toggle_bookmark(index, None)
|
||||
elif bm_new is not None and action == bm_new:
|
||||
from PySide6.QtWidgets import QInputDialog, QMessageBox
|
||||
name, ok = QInputDialog.getText(self._app, "New Bookmark Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
try:
|
||||
self._app._db.add_folder(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self._app, "Invalid Folder Name", str(e))
|
||||
return
|
||||
self._app._post_actions.toggle_bookmark(index, name.strip())
|
||||
elif id(action) in bm_folder_actions:
|
||||
self._app._post_actions.toggle_bookmark(index, bm_folder_actions[id(action)])
|
||||
elif self._is_child_of_menu(action, bl_menu):
|
||||
tag = action.text()
|
||||
self._app._db.add_blacklisted_tag(tag)
|
||||
self._app._db.set_setting("blacklist_enabled", "1")
|
||||
if self._app._preview._current_path and tag in post.tag_list:
|
||||
from ..core.cache import cached_path_for
|
||||
cp = str(cached_path_for(post.file_url))
|
||||
if cp == self._app._preview._current_path:
|
||||
self._app._preview.clear()
|
||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||
self._app._popout_ctrl.window.stop_media()
|
||||
self._app._status.showMessage(f"Blacklisted: {tag}")
|
||||
self._app._search_ctrl.remove_blacklisted_from_grid(tag=tag)
|
||||
elif action == bl_post_action:
|
||||
self._app._db.add_blacklisted_post(post.file_url)
|
||||
self._app._search_ctrl.remove_blacklisted_from_grid(post_url=post.file_url)
|
||||
self._app._status.showMessage(f"Post #{post.id} blacklisted")
|
||||
self._app._search_ctrl.do_search()
|
||||
|
||||
def show_multi(self, indices: list, pos) -> None:
|
||||
posts = [self._app._posts[i] for i in indices if 0 <= i < len(self._app._posts)]
|
||||
if not posts:
|
||||
return
|
||||
count = len(posts)
|
||||
|
||||
site_id = self._app._site_combo.currentData()
|
||||
any_bookmarked = bool(site_id) and any(self._app._db.is_bookmarked(site_id, p.id) for p in posts)
|
||||
any_unbookmarked = bool(site_id) and any(not self._app._db.is_bookmarked(site_id, p.id) for p in posts)
|
||||
any_saved = any(self._app._post_actions.is_post_saved(p.id) for p in posts)
|
||||
any_unsaved = any(not self._app._post_actions.is_post_saved(p.id) for p in posts)
|
||||
|
||||
menu = QMenu(self._app)
|
||||
|
||||
save_menu = None
|
||||
save_unsorted = None
|
||||
save_new = None
|
||||
save_folder_actions: dict[int, str] = {}
|
||||
if any_unsaved:
|
||||
from ..core.config import library_folders
|
||||
save_menu = menu.addMenu(f"Save All to Library ({count})")
|
||||
save_unsorted = save_menu.addAction("Unfiled")
|
||||
for folder in library_folders():
|
||||
a = save_menu.addAction(folder)
|
||||
save_folder_actions[id(a)] = folder
|
||||
save_menu.addSeparator()
|
||||
save_new = save_menu.addAction("+ New Folder...")
|
||||
|
||||
unsave_lib_all = None
|
||||
if any_saved:
|
||||
unsave_lib_all = menu.addAction(f"Unsave All from Library ({count})")
|
||||
|
||||
if (any_unsaved or any_saved) and (any_unbookmarked or any_bookmarked):
|
||||
menu.addSeparator()
|
||||
|
||||
fav_all = None
|
||||
if any_unbookmarked:
|
||||
fav_all = menu.addAction(f"Bookmark All ({count})")
|
||||
|
||||
unfav_all = None
|
||||
if any_bookmarked:
|
||||
unfav_all = menu.addAction(f"Remove All Bookmarks ({count})")
|
||||
|
||||
if any_unsaved or any_saved or any_unbookmarked or any_bookmarked:
|
||||
menu.addSeparator()
|
||||
batch_dl = menu.addAction(f"Download All ({count})...")
|
||||
copy_urls = menu.addAction("Copy All URLs")
|
||||
|
||||
action = menu.exec(pos)
|
||||
if not action:
|
||||
return
|
||||
|
||||
if fav_all is not None and action == fav_all:
|
||||
self._app._post_actions.bulk_bookmark(indices, posts)
|
||||
elif save_unsorted is not None and action == save_unsorted:
|
||||
self._app._post_actions.bulk_save(indices, posts, None)
|
||||
elif save_new is not None and action == save_new:
|
||||
from PySide6.QtWidgets import QInputDialog, QMessageBox
|
||||
name, ok = QInputDialog.getText(self._app, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
try:
|
||||
from ..core.config import saved_folder_dir
|
||||
saved_folder_dir(name.strip())
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self._app, "Invalid Folder Name", str(e))
|
||||
return
|
||||
self._app._post_actions.bulk_save(indices, posts, name.strip())
|
||||
elif id(action) in save_folder_actions:
|
||||
self._app._post_actions.bulk_save(indices, posts, save_folder_actions[id(action)])
|
||||
elif unsave_lib_all is not None and action == unsave_lib_all:
|
||||
self._app._post_actions.bulk_unsave(indices, posts)
|
||||
elif action == batch_dl:
|
||||
from .dialogs import select_directory
|
||||
dest = select_directory(self._app, "Download to folder")
|
||||
if dest:
|
||||
self._app._post_actions.batch_download_posts(posts, dest)
|
||||
elif unfav_all is not None and action == unfav_all:
|
||||
if site_id:
|
||||
for post in posts:
|
||||
self._app._db.remove_bookmark(site_id, post.id)
|
||||
for idx in indices:
|
||||
if 0 <= idx < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[idx].set_bookmarked(False)
|
||||
self._app._grid._clear_multi()
|
||||
self._app._status.showMessage(f"Removed {count} bookmarks")
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
elif action == copy_urls:
|
||||
urls = "\n".join(p.file_url for p in posts)
|
||||
QApplication.clipboard().setText(urls)
|
||||
self._app._status.showMessage(f"Copied {count} URLs")
|
||||
@ -3,25 +3,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtWidgets import QFileDialog, QWidget
|
||||
|
||||
from ..core.config import IS_WINDOWS
|
||||
|
||||
|
||||
_gtk_cached: bool | None = None
|
||||
|
||||
def _use_gtk() -> bool:
|
||||
global _gtk_cached
|
||||
if IS_WINDOWS:
|
||||
return False
|
||||
if _gtk_cached is not None:
|
||||
return _gtk_cached
|
||||
try:
|
||||
from ..core.db import Database
|
||||
db = Database()
|
||||
val = db.get_setting("file_dialog_platform")
|
||||
db.close()
|
||||
return val == "gtk"
|
||||
_gtk_cached = val == "gtk"
|
||||
except Exception:
|
||||
return False
|
||||
_gtk_cached = False
|
||||
return _gtk_cached
|
||||
|
||||
|
||||
def reset_gtk_cache() -> None:
|
||||
"""Called after settings change so the next dialog picks up the new value."""
|
||||
global _gtk_cached
|
||||
_gtk_cached = None
|
||||
|
||||
|
||||
def save_file(
|
||||
|
||||
@ -2,22 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
||||
from PySide6.QtCore import Qt, Signal, QSize, QRect, QRectF, QMimeData, QUrl, QPoint, Property
|
||||
from PySide6.QtGui import QPixmap, QPainter, QPainterPath, QColor, QPen, QKeyEvent, QWheelEvent, QDrag, QMouseEvent
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
from PySide6.QtCore import Qt, Signal, QSize, QRect, QRectF, QMimeData, QUrl, QPoint, Property, QPropertyAnimation, QEasingCurve
|
||||
from PySide6.QtGui import QPixmap, QPainter, QColor, QPen, QKeyEvent, QWheelEvent, QDrag, QMouseEvent
|
||||
from PySide6.QtWidgets import (
|
||||
QWidget,
|
||||
QScrollArea,
|
||||
QMenu,
|
||||
QApplication,
|
||||
QRubberBand,
|
||||
)
|
||||
|
||||
from ..core.api.base import Post
|
||||
|
||||
THUMB_SIZE = 180
|
||||
THUMB_SPACING = 8
|
||||
THUMB_SPACING = 2
|
||||
BORDER_WIDTH = 2
|
||||
|
||||
|
||||
@ -65,10 +63,18 @@ class ThumbnailWidget(QWidget):
|
||||
def _set_idle_color(self, c): self._idle_color = QColor(c) if isinstance(c, str) else c
|
||||
idleColor = Property(QColor, _get_idle_color, _set_idle_color)
|
||||
|
||||
# Thumbnail fade-in opacity (0.0 → 1.0 on pixmap arrival)
|
||||
def _get_thumb_opacity(self): return self._thumb_opacity
|
||||
def _set_thumb_opacity(self, v):
|
||||
self._thumb_opacity = v
|
||||
self.update()
|
||||
thumbOpacity = Property(float, _get_thumb_opacity, _set_thumb_opacity)
|
||||
|
||||
def __init__(self, index: int, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self.index = index
|
||||
self._pixmap: QPixmap | None = None
|
||||
self._source_path: str | None = None # on-disk path, for re-scaling on size change
|
||||
self._selected = False
|
||||
self._multi_selected = False
|
||||
self._bookmarked = False
|
||||
@ -77,6 +83,7 @@ class ThumbnailWidget(QWidget):
|
||||
self._drag_start: QPoint | None = None
|
||||
self._cached_path: str | None = None
|
||||
self._prefetch_progress: float = -1 # -1 = not prefetching, 0-1 = progress
|
||||
self._thumb_opacity: float = 0.0
|
||||
# Seed selection colors from the palette so non-themed environments
|
||||
# (no custom.qss) automatically use the system highlight color.
|
||||
# The qproperty setters above override these later when the QSS is
|
||||
@ -88,16 +95,31 @@ class ThumbnailWidget(QWidget):
|
||||
self._hover_color = self._selection_color.lighter(150)
|
||||
self._idle_color = pal.color(QPalette.ColorRole.Mid)
|
||||
self.setFixedSize(THUMB_SIZE, THUMB_SIZE)
|
||||
self.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||
self.setMouseTracking(True)
|
||||
|
||||
def set_pixmap(self, pixmap: QPixmap) -> None:
|
||||
def set_pixmap(self, pixmap: QPixmap, path: str | None = None) -> None:
|
||||
if path is not None:
|
||||
self._source_path = path
|
||||
self._pixmap = pixmap.scaled(
|
||||
THUMB_SIZE - 4, THUMB_SIZE - 4,
|
||||
Qt.AspectRatioMode.KeepAspectRatio,
|
||||
Qt.TransformationMode.SmoothTransformation,
|
||||
)
|
||||
self.update()
|
||||
self._thumb_opacity = 0.0
|
||||
anim = QPropertyAnimation(self, b"thumbOpacity")
|
||||
anim.setDuration(80)
|
||||
anim.setStartValue(0.0)
|
||||
anim.setEndValue(1.0)
|
||||
anim.setEasingCurve(QEasingCurve.Type.OutCubic)
|
||||
anim.finished.connect(lambda: self._on_fade_done(anim))
|
||||
self._fade_anim = anim
|
||||
anim.start()
|
||||
|
||||
def _on_fade_done(self, anim: QPropertyAnimation) -> None:
|
||||
"""Clear the reference then schedule deletion."""
|
||||
if self._fade_anim is anim:
|
||||
self._fade_anim = None
|
||||
anim.deleteLater()
|
||||
|
||||
def set_selected(self, selected: bool) -> None:
|
||||
self._selected = selected
|
||||
@ -130,7 +152,6 @@ class ThumbnailWidget(QWidget):
|
||||
# Defaults were seeded from the palette in __init__.
|
||||
highlight = self._selection_color
|
||||
base = pal.color(pal.ColorRole.Base)
|
||||
mid = self._idle_color
|
||||
window = pal.color(pal.ColorRole.Window)
|
||||
|
||||
# Fill entire cell with window color
|
||||
@ -182,7 +203,11 @@ class ThumbnailWidget(QWidget):
|
||||
if self._pixmap:
|
||||
x = (self.width() - self._pixmap.width()) // 2
|
||||
y = (self.height() - self._pixmap.height()) // 2
|
||||
if self._thumb_opacity < 1.0:
|
||||
p.setOpacity(self._thumb_opacity)
|
||||
p.drawPixmap(x, y, self._pixmap)
|
||||
if self._thumb_opacity < 1.0:
|
||||
p.setOpacity(1.0)
|
||||
|
||||
# Border drawn AFTER the pixmap. Plain rectangle (no rounding) so
|
||||
# it lines up exactly with the pixmap's square edges — no corner
|
||||
@ -252,24 +277,32 @@ class ThumbnailWidget(QWidget):
|
||||
|
||||
p.end()
|
||||
|
||||
def enterEvent(self, event) -> None:
|
||||
self._hover = True
|
||||
self.update()
|
||||
|
||||
def leaveEvent(self, event) -> None:
|
||||
self._hover = False
|
||||
self.update()
|
||||
|
||||
def mousePressEvent(self, event) -> None:
|
||||
if event.button() == Qt.MouseButton.LeftButton:
|
||||
self._drag_start = event.position().toPoint()
|
||||
self.clicked.emit(self.index, event)
|
||||
elif event.button() == Qt.MouseButton.RightButton:
|
||||
self.right_clicked.emit(self.index, event.globalPosition().toPoint())
|
||||
if self._hover:
|
||||
self._hover = False
|
||||
self.setCursor(Qt.CursorShape.ArrowCursor)
|
||||
self.update()
|
||||
|
||||
def mouseMoveEvent(self, event) -> None:
|
||||
# If the grid has a pending or active rubber band, forward the move
|
||||
grid = self._grid()
|
||||
if grid and (grid._rb_origin or grid._rb_pending_origin):
|
||||
vp_pos = self.mapTo(grid.viewport(), event.position().toPoint())
|
||||
if grid._rb_origin:
|
||||
grid._rb_drag(vp_pos)
|
||||
return
|
||||
if grid._maybe_start_rb(vp_pos):
|
||||
grid._rb_drag(vp_pos)
|
||||
return
|
||||
return
|
||||
# Update hover and cursor based on whether cursor is over the pixmap
|
||||
over = self._hit_pixmap(event.position().toPoint()) if self._pixmap else False
|
||||
if over != self._hover:
|
||||
self._hover = over
|
||||
self.setCursor(Qt.CursorShape.PointingHandCursor if over else Qt.CursorShape.ArrowCursor)
|
||||
self.update()
|
||||
if (self._drag_start and self._cached_path
|
||||
and (event.position().toPoint() - self._drag_start).manhattanLength() > 10):
|
||||
and (event.position().toPoint() - self._drag_start).manhattanLength() > 30):
|
||||
drag = QDrag(self)
|
||||
mime = QMimeData()
|
||||
mime.setUrls([QUrl.fromLocalFile(self._cached_path)])
|
||||
@ -278,15 +311,65 @@ class ThumbnailWidget(QWidget):
|
||||
drag.setPixmap(self._pixmap.scaled(64, 64, Qt.AspectRatioMode.KeepAspectRatio))
|
||||
drag.exec(Qt.DropAction.CopyAction)
|
||||
self._drag_start = None
|
||||
self.setCursor(Qt.CursorShape.ArrowCursor)
|
||||
return
|
||||
super().mouseMoveEvent(event)
|
||||
|
||||
def _hit_pixmap(self, pos) -> bool:
|
||||
"""True if pos is within the drawn pixmap area."""
|
||||
if not self._pixmap:
|
||||
return False
|
||||
px = (self.width() - self._pixmap.width()) // 2
|
||||
py = (self.height() - self._pixmap.height()) // 2
|
||||
return QRect(px, py, self._pixmap.width(), self._pixmap.height()).contains(pos)
|
||||
|
||||
def _grid(self):
|
||||
"""Walk up to the ThumbnailGrid ancestor."""
|
||||
w = self.parentWidget()
|
||||
while w:
|
||||
if isinstance(w, ThumbnailGrid):
|
||||
return w
|
||||
w = w.parentWidget()
|
||||
return None
|
||||
|
||||
def mousePressEvent(self, event) -> None:
|
||||
if event.button() == Qt.MouseButton.LeftButton:
|
||||
pos = event.position().toPoint()
|
||||
if not self._hit_pixmap(pos):
|
||||
grid = self._grid()
|
||||
if grid:
|
||||
grid.on_padding_click(self, pos)
|
||||
event.accept()
|
||||
return
|
||||
# Pixmap click — clear any stale rubber band state from a
|
||||
# previous interrupted drag before starting a new interaction.
|
||||
grid = self._grid()
|
||||
if grid:
|
||||
grid._clear_stale_rubber_band()
|
||||
self._drag_start = pos
|
||||
self.clicked.emit(self.index, event)
|
||||
elif event.button() == Qt.MouseButton.RightButton:
|
||||
self.right_clicked.emit(self.index, event.globalPosition().toPoint())
|
||||
|
||||
def mouseReleaseEvent(self, event) -> None:
|
||||
self._drag_start = None
|
||||
grid = self._grid()
|
||||
if grid:
|
||||
if grid._rb_origin:
|
||||
grid._rb_end()
|
||||
elif grid._rb_pending_origin is not None:
|
||||
# Click without drag — treat as deselect
|
||||
grid._rb_pending_origin = None
|
||||
grid.clear_selection()
|
||||
|
||||
def mouseDoubleClickEvent(self, event) -> None:
|
||||
self._drag_start = None
|
||||
if event.button() == Qt.MouseButton.LeftButton:
|
||||
pos = event.position().toPoint()
|
||||
if not self._hit_pixmap(pos):
|
||||
grid = self._grid()
|
||||
if grid:
|
||||
grid.on_padding_click(self, pos)
|
||||
return
|
||||
self.double_clicked.emit(self.index)
|
||||
|
||||
|
||||
@ -304,6 +387,8 @@ class FlowLayout(QWidget):
|
||||
|
||||
def clear(self) -> None:
|
||||
for w in self._items:
|
||||
if hasattr(w, '_fade_anim') and w._fade_anim is not None:
|
||||
w._fade_anim.stop()
|
||||
w.setParent(None) # type: ignore
|
||||
w.deleteLater()
|
||||
self._items.clear()
|
||||
@ -313,28 +398,64 @@ class FlowLayout(QWidget):
|
||||
self._do_layout()
|
||||
|
||||
def _do_layout(self) -> None:
|
||||
"""Position children in a deterministic grid.
|
||||
|
||||
Uses the THUMB_SIZE / THUMB_SPACING constants instead of each
|
||||
widget's actual `width()` so the layout is independent of per-
|
||||
widget size variance. This matters because:
|
||||
|
||||
1. ThumbnailWidget calls `setFixedSize(THUMB_SIZE, THUMB_SIZE)`
|
||||
in `__init__`, capturing the constant at construction time.
|
||||
If `THUMB_SIZE` is later mutated (`_apply_settings` writes
|
||||
`grid_mod.THUMB_SIZE = new_size` in main_window.py:2953),
|
||||
existing thumbs keep their old fixed size while new ones
|
||||
(e.g. from infinite-scroll backfill via `append_posts`) get
|
||||
the new one. Mixed widths break a width-summing wrap loop.
|
||||
|
||||
2. The previous wrap loop walked each thumb summing
|
||||
`widget.width() + THUMB_SPACING` and wrapped on
|
||||
`x + item_w > self.width()`. At column boundaries
|
||||
(window width within a few pixels of `N * step + margin`)
|
||||
the boundary depends on every per-widget width, and any
|
||||
sub-pixel or mid-mutation drift could collapse the column
|
||||
count by 1.
|
||||
|
||||
Now: compute the column count once from the container width
|
||||
and the constant step, then position thumbs by `(col, row)`
|
||||
index. The layout is a function of `self.width()` and the
|
||||
constants only — no per-widget reads.
|
||||
"""
|
||||
if not self._items:
|
||||
return
|
||||
x, y = THUMB_SPACING, THUMB_SPACING
|
||||
row_height = 0
|
||||
width = self.width() or 800
|
||||
step = THUMB_SIZE + THUMB_SPACING
|
||||
# Account for the leading THUMB_SPACING margin: a row that fits
|
||||
# N thumbs needs `THUMB_SPACING + N * step` pixels minimum, not
|
||||
# `N * step`. The previous formula `w // step` overcounted by 1
|
||||
# at the boundary (e.g. width=1135 returned 6 columns where the
|
||||
# actual fit is 5).
|
||||
cols = max(1, (width - THUMB_SPACING) // step)
|
||||
|
||||
for widget in self._items:
|
||||
item_w = widget.width() + THUMB_SPACING
|
||||
item_h = widget.height() + THUMB_SPACING
|
||||
if x + item_w > width and x > THUMB_SPACING:
|
||||
x = THUMB_SPACING
|
||||
y += row_height
|
||||
row_height = 0
|
||||
for i, widget in enumerate(self._items):
|
||||
col = i % cols
|
||||
row = i // cols
|
||||
x = THUMB_SPACING + col * step
|
||||
y = THUMB_SPACING + row * step
|
||||
widget.move(x, y)
|
||||
widget.show()
|
||||
x += item_w
|
||||
row_height = max(row_height, item_h)
|
||||
|
||||
self.setMinimumHeight(y + row_height + THUMB_SPACING)
|
||||
rows = (len(self._items) + cols - 1) // cols
|
||||
self.setMinimumHeight(THUMB_SPACING + rows * step)
|
||||
|
||||
@property
|
||||
def columns(self) -> int:
|
||||
"""Same formula as `_do_layout`'s column count.
|
||||
|
||||
Both must agree exactly so callers (e.g. main_window's
|
||||
keyboard Up/Down nav step) get the value the visual layout
|
||||
actually used. The previous version was off-by-one because it
|
||||
omitted the leading THUMB_SPACING from the calculation.
|
||||
"""
|
||||
if not self._items:
|
||||
return 1
|
||||
# Use parent viewport width if inside a QScrollArea
|
||||
@ -343,7 +464,8 @@ class FlowLayout(QWidget):
|
||||
w = parent.viewport().width()
|
||||
else:
|
||||
w = self.width() or 800
|
||||
return max(1, w // (THUMB_SIZE + THUMB_SPACING))
|
||||
step = THUMB_SIZE + THUMB_SPACING
|
||||
return max(1, (w - THUMB_SPACING) // step)
|
||||
|
||||
|
||||
class ThumbnailGrid(QScrollArea):
|
||||
@ -372,6 +494,7 @@ class ThumbnailGrid(QScrollArea):
|
||||
self.verticalScrollBar().valueChanged.connect(self._check_scroll_bottom)
|
||||
# Rubber band drag selection
|
||||
self._rubber_band: QRubberBand | None = None
|
||||
self._rb_pending_origin: QPoint | None = None # press position, not yet confirmed as drag
|
||||
self._rb_origin: QPoint | None = None
|
||||
|
||||
@property
|
||||
@ -399,6 +522,7 @@ class ThumbnailGrid(QScrollArea):
|
||||
thumb.clicked.connect(self._on_thumb_click)
|
||||
thumb.double_clicked.connect(self._on_thumb_double_click)
|
||||
thumb.right_clicked.connect(self._on_thumb_right_click)
|
||||
|
||||
self._flow.add_widget(thumb)
|
||||
self._thumbs.append(thumb)
|
||||
|
||||
@ -413,6 +537,7 @@ class ThumbnailGrid(QScrollArea):
|
||||
thumb.clicked.connect(self._on_thumb_click)
|
||||
thumb.double_clicked.connect(self._on_thumb_double_click)
|
||||
thumb.right_clicked.connect(self._on_thumb_right_click)
|
||||
|
||||
self._flow.add_widget(thumb)
|
||||
self._thumbs.append(thumb)
|
||||
new_thumbs.append(thumb)
|
||||
@ -431,6 +556,21 @@ class ThumbnailGrid(QScrollArea):
|
||||
self._thumbs[self._selected_index].set_selected(False)
|
||||
self._selected_index = -1
|
||||
|
||||
def _clear_stale_rubber_band(self) -> None:
|
||||
"""Reset any leftover rubber band state before starting a new interaction.
|
||||
|
||||
Rubber band state can get stuck if a drag is interrupted without
|
||||
a matching release event — Wayland focus steal, drag outside the
|
||||
window, tab switch mid-drag, etc. Every new mouse press calls this
|
||||
so the next interaction starts from a clean slate instead of
|
||||
reusing a stale origin (which would make the rubber band "not
|
||||
work" until the app is restarted).
|
||||
"""
|
||||
if self._rubber_band is not None:
|
||||
self._rubber_band.hide()
|
||||
self._rb_origin = None
|
||||
self._rb_pending_origin = None
|
||||
|
||||
def _select(self, index: int) -> None:
|
||||
if index < 0 or index >= len(self._thumbs):
|
||||
return
|
||||
@ -493,42 +633,97 @@ class ThumbnailGrid(QScrollArea):
|
||||
self.ensureWidgetVisible(self._thumbs[index])
|
||||
self.context_requested.emit(index, pos)
|
||||
|
||||
def _start_rubber_band(self, pos: QPoint) -> None:
|
||||
"""Start a rubber band selection and deselect."""
|
||||
self._rb_origin = pos
|
||||
if not self._rubber_band:
|
||||
self._rubber_band = QRubberBand(QRubberBand.Shape.Rectangle, self.viewport())
|
||||
self._rubber_band.setGeometry(QRect(self._rb_origin, QSize()))
|
||||
self._rubber_band.show()
|
||||
self.clear_selection()
|
||||
|
||||
def on_padding_click(self, thumb, local_pos) -> None:
|
||||
"""Called directly by ThumbnailWidget when a click misses the pixmap."""
|
||||
self._clear_stale_rubber_band()
|
||||
vp_pos = thumb.mapTo(self.viewport(), local_pos)
|
||||
self._rb_pending_origin = vp_pos
|
||||
|
||||
def mousePressEvent(self, event: QMouseEvent) -> None:
|
||||
# Clicks on viewport/flow (gaps, space below thumbs) start rubber band
|
||||
if event.button() == Qt.MouseButton.LeftButton:
|
||||
# Only start rubber band if click is on empty grid space (not a thumbnail)
|
||||
self._clear_stale_rubber_band()
|
||||
child = self.childAt(event.position().toPoint())
|
||||
if child is self.widget() or child is self.viewport():
|
||||
self._rb_origin = event.position().toPoint()
|
||||
if not self._rubber_band:
|
||||
self._rubber_band = QRubberBand(QRubberBand.Shape.Rectangle, self.viewport())
|
||||
self._rubber_band.setGeometry(QRect(self._rb_origin, QSize()))
|
||||
self._rubber_band.show()
|
||||
self._clear_multi()
|
||||
self._rb_pending_origin = event.position().toPoint()
|
||||
return
|
||||
super().mousePressEvent(event)
|
||||
|
||||
def _rb_drag(self, vp_pos: QPoint) -> None:
|
||||
"""Update rubber band geometry and intersected thumb selection."""
|
||||
if not (self._rb_origin and self._rubber_band):
|
||||
return
|
||||
rb_rect = QRect(self._rb_origin, vp_pos).normalized()
|
||||
self._rubber_band.setGeometry(rb_rect)
|
||||
# rb_rect is in viewport coords; thumb.geometry() is in widget (content)
|
||||
# coords. Convert rb_rect to widget coords for the intersection test —
|
||||
# widget.mapFrom(viewport, (0,0)) gives the widget-coord of viewport's
|
||||
# origin, which is exactly the translation needed when scrolled.
|
||||
vp_offset = self.widget().mapFrom(self.viewport(), QPoint(0, 0))
|
||||
rb_widget = rb_rect.translated(vp_offset)
|
||||
self._clear_multi()
|
||||
for i, thumb in enumerate(self._thumbs):
|
||||
if rb_widget.intersects(thumb.geometry()):
|
||||
self._multi_selected.add(i)
|
||||
thumb.set_multi_selected(True)
|
||||
|
||||
def _rb_end(self) -> None:
|
||||
"""Hide the rubber band and clear origin."""
|
||||
if self._rubber_band:
|
||||
self._rubber_band.hide()
|
||||
self._rb_origin = None
|
||||
|
||||
def _maybe_start_rb(self, vp_pos: QPoint) -> bool:
|
||||
"""If a rubber band press is pending and we've moved past threshold, start it."""
|
||||
if self._rb_pending_origin is None:
|
||||
return False
|
||||
if (vp_pos - self._rb_pending_origin).manhattanLength() < 30:
|
||||
return False
|
||||
self._start_rubber_band(self._rb_pending_origin)
|
||||
self._rb_pending_origin = None
|
||||
return True
|
||||
|
||||
def mouseMoveEvent(self, event: QMouseEvent) -> None:
|
||||
pos = event.position().toPoint()
|
||||
if self._rb_origin and self._rubber_band:
|
||||
rb_rect = QRect(self._rb_origin, event.position().toPoint()).normalized()
|
||||
self._rubber_band.setGeometry(rb_rect)
|
||||
# Select thumbnails that intersect the rubber band
|
||||
vp_offset = self.widget().mapFrom(self.viewport(), QPoint(0, 0))
|
||||
self._clear_multi()
|
||||
for i, thumb in enumerate(self._thumbs):
|
||||
thumb_rect = thumb.geometry().translated(vp_offset)
|
||||
if rb_rect.intersects(thumb_rect):
|
||||
self._multi_selected.add(i)
|
||||
thumb.set_multi_selected(True)
|
||||
self._rb_drag(pos)
|
||||
return
|
||||
if self._maybe_start_rb(pos):
|
||||
self._rb_drag(pos)
|
||||
return
|
||||
super().mouseMoveEvent(event)
|
||||
|
||||
def mouseReleaseEvent(self, event: QMouseEvent) -> None:
|
||||
if self._rb_origin and self._rubber_band:
|
||||
self._rubber_band.hide()
|
||||
self._rb_origin = None
|
||||
self._rb_end()
|
||||
return
|
||||
if self._rb_pending_origin is not None:
|
||||
# Click without drag — treat as deselect
|
||||
self._rb_pending_origin = None
|
||||
self.clear_selection()
|
||||
return
|
||||
self.unsetCursor()
|
||||
super().mouseReleaseEvent(event)
|
||||
|
||||
def leaveEvent(self, event) -> None:
|
||||
# Clear stuck hover states — Wayland doesn't always fire
|
||||
# leaveEvent on individual child widgets when the mouse
|
||||
# exits the scroll area quickly.
|
||||
for thumb in self._thumbs:
|
||||
if thumb._hover:
|
||||
thumb._hover = False
|
||||
thumb.update()
|
||||
super().leaveEvent(event)
|
||||
|
||||
def select_all(self) -> None:
|
||||
self._clear_multi()
|
||||
for i in range(len(self._thumbs)):
|
||||
@ -573,6 +768,8 @@ class ThumbnailGrid(QScrollArea):
|
||||
elif key == Qt.Key.Key_Return or key == Qt.Key.Key_Enter:
|
||||
if 0 <= idx < len(self._thumbs):
|
||||
self.post_activated.emit(idx)
|
||||
elif key == Qt.Key.Key_Escape:
|
||||
self.clear_selection()
|
||||
elif key == Qt.Key.Key_Home:
|
||||
self._select(0)
|
||||
elif key == Qt.Key.Key_End:
|
||||
@ -594,6 +791,58 @@ class ThumbnailGrid(QScrollArea):
|
||||
self.reached_bottom.emit()
|
||||
if value <= 0 and sb.maximum() > 0:
|
||||
self.reached_top.emit()
|
||||
self._recycle_offscreen()
|
||||
|
||||
def _recycle_offscreen(self) -> None:
|
||||
"""Release decoded pixmaps for thumbnails far from the viewport.
|
||||
|
||||
Thumbnails within the visible area plus a buffer zone keep their
|
||||
pixmaps. Thumbnails outside that zone have their pixmap set to
|
||||
None to free decoded-image memory. When they scroll back into
|
||||
view, the pixmap is re-decoded from the on-disk thumbnail cache
|
||||
via ``_source_path``.
|
||||
|
||||
This caps decoded-thumbnail memory to roughly (visible + buffer)
|
||||
widgets instead of every widget ever created during infinite scroll.
|
||||
"""
|
||||
if not self._thumbs:
|
||||
return
|
||||
step = THUMB_SIZE + THUMB_SPACING
|
||||
if step == 0:
|
||||
return
|
||||
cols = self._flow.columns
|
||||
vp_top = self.verticalScrollBar().value()
|
||||
vp_height = self.viewport().height()
|
||||
|
||||
# Row range that's visible (0-based row indices)
|
||||
first_visible_row = max(0, (vp_top - THUMB_SPACING) // step)
|
||||
last_visible_row = (vp_top + vp_height) // step
|
||||
|
||||
# Buffer: keep ±5 rows of decoded pixmaps beyond the viewport
|
||||
buffer_rows = 5
|
||||
keep_first = max(0, first_visible_row - buffer_rows)
|
||||
keep_last = last_visible_row + buffer_rows
|
||||
|
||||
keep_start = keep_first * cols
|
||||
keep_end = min(len(self._thumbs), (keep_last + 1) * cols)
|
||||
|
||||
for i, thumb in enumerate(self._thumbs):
|
||||
if keep_start <= i < keep_end:
|
||||
# Inside keep zone — restore if missing
|
||||
if thumb._pixmap is None and thumb._source_path:
|
||||
pix = QPixmap(thumb._source_path)
|
||||
if not pix.isNull():
|
||||
thumb._pixmap = pix.scaled(
|
||||
THUMB_SIZE - 4, THUMB_SIZE - 4,
|
||||
Qt.AspectRatioMode.KeepAspectRatio,
|
||||
Qt.TransformationMode.SmoothTransformation,
|
||||
)
|
||||
thumb._thumb_opacity = 1.0
|
||||
thumb.update()
|
||||
else:
|
||||
# Outside keep zone — release
|
||||
if thumb._pixmap is not None:
|
||||
thumb._pixmap = None
|
||||
|
||||
def _nav_horizontal(self, direction: int) -> None:
|
||||
"""Move selection one cell left (-1) or right (+1); emit edge signals at boundaries."""
|
||||
@ -619,3 +868,10 @@ class ThumbnailGrid(QScrollArea):
|
||||
super().resizeEvent(event)
|
||||
if self._flow:
|
||||
self._flow.resize(self.viewport().size().width(), self._flow.minimumHeight())
|
||||
# Column count can change on resize (splitter drag, tile/float
|
||||
# toggle). Thumbs that were outside the keep zone had their
|
||||
# pixmap freed by _recycle_offscreen and will paint as empty
|
||||
# cells if the row shift moves them into view without a scroll
|
||||
# event to refresh them. Re-run the recycle pass against the
|
||||
# new geometry so newly-visible thumbs get their pixmap back.
|
||||
self._recycle_offscreen()
|
||||
|
||||
203
booru_viewer/gui/info_panel.py
Normal file
203
booru_viewer/gui/info_panel.py
Normal file
@ -0,0 +1,203 @@
|
||||
"""Toggleable info panel showing post details with category-coloured tags."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from html import escape
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtCore import Qt, Property, Signal
|
||||
from PySide6.QtGui import QColor
|
||||
from PySide6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QLabel, QScrollArea, QPushButton, QSizePolicy,
|
||||
)
|
||||
|
||||
from ..core.api.base import Post
|
||||
from ._source_html import build_source_html
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# -- Info Panel --
|
||||
|
||||
class InfoPanel(QWidget):
|
||||
"""Toggleable panel showing post details."""
|
||||
|
||||
tag_clicked = Signal(str)
|
||||
|
||||
# Tag category colors. Defaults follow the booru convention (Danbooru,
|
||||
# Gelbooru, etc.) so the panel reads naturally to anyone coming from a
|
||||
# booru site. Each is exposed as a Qt Property so a custom.qss can
|
||||
# override it via `qproperty-tag<Category>Color` selectors on
|
||||
# `InfoPanel`. An empty string means "use the default text color"
|
||||
# (the General category) and is preserved as a sentinel.
|
||||
_tag_artist_color = QColor("#f2ac08")
|
||||
_tag_character_color = QColor("#0a0")
|
||||
_tag_copyright_color = QColor("#c0f")
|
||||
_tag_species_color = QColor("#e44")
|
||||
_tag_meta_color = QColor("#888")
|
||||
_tag_lore_color = QColor("#888")
|
||||
|
||||
def _get_artist(self): return self._tag_artist_color
|
||||
def _set_artist(self, c): self._tag_artist_color = QColor(c) if isinstance(c, str) else c
|
||||
tagArtistColor = Property(QColor, _get_artist, _set_artist)
|
||||
|
||||
def _get_character(self): return self._tag_character_color
|
||||
def _set_character(self, c): self._tag_character_color = QColor(c) if isinstance(c, str) else c
|
||||
tagCharacterColor = Property(QColor, _get_character, _set_character)
|
||||
|
||||
def _get_copyright(self): return self._tag_copyright_color
|
||||
def _set_copyright(self, c): self._tag_copyright_color = QColor(c) if isinstance(c, str) else c
|
||||
tagCopyrightColor = Property(QColor, _get_copyright, _set_copyright)
|
||||
|
||||
def _get_species(self): return self._tag_species_color
|
||||
def _set_species(self, c): self._tag_species_color = QColor(c) if isinstance(c, str) else c
|
||||
tagSpeciesColor = Property(QColor, _get_species, _set_species)
|
||||
|
||||
def _get_meta(self): return self._tag_meta_color
|
||||
def _set_meta(self, c): self._tag_meta_color = QColor(c) if isinstance(c, str) else c
|
||||
tagMetaColor = Property(QColor, _get_meta, _set_meta)
|
||||
|
||||
def _get_lore(self): return self._tag_lore_color
|
||||
def _set_lore(self, c): self._tag_lore_color = QColor(c) if isinstance(c, str) else c
|
||||
tagLoreColor = Property(QColor, _get_lore, _set_lore)
|
||||
|
||||
def _category_color(self, category: str) -> str:
|
||||
"""Resolve a category name to a hex color string for inline QSS use.
|
||||
Returns "" for the General category (no override → use default text
|
||||
color) and unrecognized categories (so callers can render them with
|
||||
no color attribute set)."""
|
||||
cat = (category or "").lower()
|
||||
m = {
|
||||
"artist": self._tag_artist_color,
|
||||
"character": self._tag_character_color,
|
||||
"copyright": self._tag_copyright_color,
|
||||
"species": self._tag_species_color,
|
||||
"meta": self._tag_meta_color,
|
||||
"lore": self._tag_lore_color,
|
||||
}
|
||||
c = m.get(cat)
|
||||
return c.name() if c is not None else ""
|
||||
|
||||
def __init__(self, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._categories_pending = False
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(6, 6, 6, 6)
|
||||
|
||||
self._title = QLabel("No post selected")
|
||||
self._title.setStyleSheet("font-weight: bold;")
|
||||
self._title.setMinimumWidth(0)
|
||||
self._title.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Preferred)
|
||||
layout.addWidget(self._title)
|
||||
|
||||
self._details = QLabel()
|
||||
self._details.setWordWrap(True)
|
||||
self._details.setTextInteractionFlags(Qt.TextInteractionFlag.TextSelectableByMouse | Qt.TextInteractionFlag.TextBrowserInteraction)
|
||||
self._details.setMaximumHeight(120)
|
||||
self._details.setMinimumWidth(0)
|
||||
self._details.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Preferred)
|
||||
layout.addWidget(self._details)
|
||||
|
||||
self._tags_label = QLabel("Tags:")
|
||||
self._tags_label.setStyleSheet("font-weight: bold; margin-top: 8px;")
|
||||
layout.addWidget(self._tags_label)
|
||||
|
||||
self._tags_scroll = QScrollArea()
|
||||
self._tags_scroll.setWidgetResizable(True)
|
||||
self._tags_scroll.setStyleSheet("QScrollArea { border: none; }")
|
||||
self._tags_widget = QWidget()
|
||||
self._tags_flow = QVBoxLayout(self._tags_widget)
|
||||
self._tags_flow.setContentsMargins(0, 0, 0, 0)
|
||||
self._tags_flow.setSpacing(2)
|
||||
self._tags_scroll.setWidget(self._tags_widget)
|
||||
layout.addWidget(self._tags_scroll, stretch=1)
|
||||
|
||||
def set_post(self, post: Post) -> None:
|
||||
log.debug(f"InfoPanel: tag_categories={list(post.tag_categories.keys()) if post.tag_categories else 'empty'}")
|
||||
self._title.setText(f"Post #{post.id}")
|
||||
filetype = Path(post.file_url.split("?")[0]).suffix.lstrip(".").upper() if post.file_url else "unknown"
|
||||
source_html = build_source_html(post.source)
|
||||
self._details.setTextFormat(Qt.TextFormat.RichText)
|
||||
self._details.setText(
|
||||
f"Score: {post.score}<br>"
|
||||
f"Rating: {escape(post.rating or 'unknown')}<br>"
|
||||
f"Filetype: {escape(filetype)}<br>"
|
||||
f"Source: {source_html}"
|
||||
)
|
||||
self._details.setOpenExternalLinks(True)
|
||||
# Clear old tags
|
||||
while self._tags_flow.count():
|
||||
item = self._tags_flow.takeAt(0)
|
||||
if item.widget():
|
||||
item.widget().deleteLater()
|
||||
|
||||
if post.tag_categories:
|
||||
# Display tags grouped by category. Colors come from the
|
||||
# tag*Color Qt Properties so a custom.qss can override any of
|
||||
# them via `InfoPanel { qproperty-tagCharacterColor: ...; }`.
|
||||
rendered: set[str] = set()
|
||||
for category, tags in post.tag_categories.items():
|
||||
color = self._category_color(category)
|
||||
header = QLabel(f"{category}:")
|
||||
header.setStyleSheet(
|
||||
"font-weight: bold; margin-top: 6px; margin-bottom: 2px;"
|
||||
+ (f" color: {color};" if color else "")
|
||||
)
|
||||
self._tags_flow.addWidget(header)
|
||||
for tag in tags:
|
||||
rendered.add(tag)
|
||||
btn = QPushButton(tag)
|
||||
btn.setFlat(True)
|
||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||
style = "QPushButton { text-align: left; padding: 1px 4px; border: none;"
|
||||
if color:
|
||||
style += f" color: {color};"
|
||||
style += " }"
|
||||
btn.setStyleSheet(style)
|
||||
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
||||
self._tags_flow.addWidget(btn)
|
||||
# Safety net: any tag in post.tag_list that didn't land in
|
||||
# a cached category (batch tag API returned partial results,
|
||||
# HTML scrape fell short, cache stale, etc.) is still shown
|
||||
# under an "Other" bucket so tags can't silently disappear
|
||||
# from the info panel.
|
||||
leftover = [t for t in post.tag_list if t and t not in rendered]
|
||||
if leftover:
|
||||
header = QLabel("Other:")
|
||||
header.setStyleSheet(
|
||||
"font-weight: bold; margin-top: 6px; margin-bottom: 2px;"
|
||||
)
|
||||
self._tags_flow.addWidget(header)
|
||||
for tag in leftover:
|
||||
btn = QPushButton(tag)
|
||||
btn.setFlat(True)
|
||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||
btn.setStyleSheet(
|
||||
"QPushButton { text-align: left; padding: 1px 4px; border: none; }"
|
||||
)
|
||||
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
||||
self._tags_flow.addWidget(btn)
|
||||
elif not self._categories_pending:
|
||||
# Flat tag fallback — only when no category fetch is
|
||||
# in-flight. When a fetch IS pending, leaving the tags
|
||||
# area empty avoids the flat→categorized re-layout hitch
|
||||
# (categories arrive ~200ms later and render in one pass).
|
||||
for tag in post.tag_list:
|
||||
btn = QPushButton(tag)
|
||||
btn.setFlat(True)
|
||||
btn.setCursor(Qt.CursorShape.PointingHandCursor)
|
||||
btn.setStyleSheet(
|
||||
"QPushButton { text-align: left; padding: 1px 4px; border: none; }"
|
||||
)
|
||||
btn.clicked.connect(lambda checked, t=tag: self.tag_clicked.emit(t))
|
||||
self._tags_flow.addWidget(btn)
|
||||
self._tags_flow.addStretch()
|
||||
|
||||
def clear(self) -> None:
|
||||
self._title.setText("No post selected")
|
||||
self._details.setText("")
|
||||
while self._tags_flow.count():
|
||||
item = self._tags_flow.takeAt(0)
|
||||
if item.widget():
|
||||
item.widget().deleteLater()
|
||||
@ -19,6 +19,7 @@ from PySide6.QtWidgets import (
|
||||
QComboBox,
|
||||
QMenu,
|
||||
QMessageBox,
|
||||
QInputDialog,
|
||||
QApplication,
|
||||
)
|
||||
|
||||
@ -75,8 +76,10 @@ class LibraryView(QWidget):
|
||||
top.addWidget(self._folder_combo)
|
||||
|
||||
self._sort_combo = QComboBox()
|
||||
self._sort_combo.addItems(["Date", "Name", "Size"])
|
||||
self._sort_combo.setFixedWidth(80)
|
||||
self._sort_combo.addItems(["Date", "Post ID", "Size"])
|
||||
# 75 is the tight floor: 68 clipped the trailing D under the
|
||||
# bundled themes (font metrics ate more than the math suggested).
|
||||
self._sort_combo.setFixedWidth(75)
|
||||
self._sort_combo.currentTextChanged.connect(lambda _: self.refresh())
|
||||
top.addWidget(self._sort_combo)
|
||||
|
||||
@ -155,7 +158,16 @@ class LibraryView(QWidget):
|
||||
if query and self._db:
|
||||
matching_ids = self._db.search_library_meta(query)
|
||||
if matching_ids:
|
||||
self._files = [f for f in self._files if f.stem.isdigit() and int(f.stem) in matching_ids]
|
||||
def _file_matches(f: Path) -> bool:
|
||||
# Templated filenames: look up post_id via library_meta.filename
|
||||
pid = self._db.get_library_post_id_by_filename(f.name)
|
||||
if pid is not None:
|
||||
return pid in matching_ids
|
||||
# Legacy digit-stem fallback
|
||||
if f.stem.isdigit():
|
||||
return int(f.stem) in matching_ids
|
||||
return False
|
||||
self._files = [f for f in self._files if _file_matches(f)]
|
||||
else:
|
||||
self._files = []
|
||||
|
||||
@ -177,11 +189,22 @@ class LibraryView(QWidget):
|
||||
thumb._cached_path = str(filepath)
|
||||
thumb.setToolTip(filepath.name)
|
||||
thumb.set_saved_locally(True)
|
||||
cached_thumb = lib_thumb_dir / f"{filepath.stem}.jpg"
|
||||
# Thumbnails are stored by post_id (from _copy_library_thumb),
|
||||
# not by filename stem. Resolve post_id so templated filenames
|
||||
# like artist_12345.jpg find their thumbnail correctly.
|
||||
thumb_name = filepath.stem # default: digit-stem fallback
|
||||
if self._db:
|
||||
pid = self._db.get_library_post_id_by_filename(filepath.name)
|
||||
if pid is not None:
|
||||
thumb_name = str(pid)
|
||||
elif filepath.stem.isdigit():
|
||||
thumb_name = filepath.stem
|
||||
cached_thumb = lib_thumb_dir / f"{thumb_name}.jpg"
|
||||
if cached_thumb.exists():
|
||||
pix = QPixmap(str(cached_thumb))
|
||||
thumb_path = str(cached_thumb)
|
||||
pix = QPixmap(thumb_path)
|
||||
if not pix.isNull():
|
||||
thumb.set_pixmap(pix)
|
||||
thumb.set_pixmap(pix, thumb_path)
|
||||
continue
|
||||
self._generate_thumb_async(i, filepath, cached_thumb)
|
||||
|
||||
@ -194,7 +217,7 @@ class LibraryView(QWidget):
|
||||
self._folder_combo.blockSignals(True)
|
||||
self._folder_combo.clear()
|
||||
self._folder_combo.addItem("All Files")
|
||||
self._folder_combo.addItem("Unsorted")
|
||||
self._folder_combo.addItem("Unfiled")
|
||||
|
||||
root = saved_dir()
|
||||
if root.is_dir():
|
||||
@ -217,7 +240,7 @@ class LibraryView(QWidget):
|
||||
|
||||
if folder_text == "All Files":
|
||||
return self._collect_recursive(root)
|
||||
elif folder_text == "Unsorted":
|
||||
elif folder_text == "Unfiled":
|
||||
return self._collect_top_level(root)
|
||||
else:
|
||||
sub = root / folder_text
|
||||
@ -251,8 +274,20 @@ class LibraryView(QWidget):
|
||||
|
||||
def _sort_files(self) -> None:
|
||||
mode = self._sort_combo.currentText()
|
||||
if mode == "Name":
|
||||
self._files.sort(key=lambda p: p.name.lower())
|
||||
if mode == "Post ID":
|
||||
# Numeric sort by post id. Resolves templated filenames
|
||||
# (e.g. artist_12345.jpg) via library_meta DB lookup, falls
|
||||
# back to digit-stem parsing for legacy files. Anything
|
||||
# without a resolvable post_id sorts to the end alphabetically.
|
||||
def _key(p: Path) -> tuple:
|
||||
if self._db:
|
||||
pid = self._db.get_library_post_id_by_filename(p.name)
|
||||
if pid is not None:
|
||||
return (0, pid)
|
||||
if p.stem.isdigit():
|
||||
return (0, int(p.stem))
|
||||
return (1, p.stem.lower())
|
||||
self._files.sort(key=_key)
|
||||
elif mode == "Size":
|
||||
self._files.sort(key=lambda p: p.stat().st_size, reverse=True)
|
||||
else:
|
||||
@ -291,21 +326,56 @@ class LibraryView(QWidget):
|
||||
threading.Thread(target=_work, daemon=True).start()
|
||||
|
||||
def _capture_video_thumb(self, index: int, source: str, dest: str) -> None:
|
||||
"""Grab first frame from video. Tries ffmpeg, falls back to placeholder."""
|
||||
"""Grab first frame from video using mpv, falls back to placeholder."""
|
||||
def _work():
|
||||
extracted = False
|
||||
try:
|
||||
import subprocess
|
||||
result = subprocess.run(
|
||||
["ffmpeg", "-y", "-i", source, "-vframes", "1",
|
||||
"-vf", f"scale={LIBRARY_THUMB_SIZE}:{LIBRARY_THUMB_SIZE}:force_original_aspect_ratio=decrease",
|
||||
"-q:v", "5", dest],
|
||||
capture_output=True, timeout=10,
|
||||
import threading as _threading
|
||||
import mpv as mpvlib
|
||||
|
||||
frame_ready = _threading.Event()
|
||||
m = mpvlib.MPV(
|
||||
vo='null', ao='null', aid='no',
|
||||
pause=True, keep_open='yes',
|
||||
terminal=False, config=False,
|
||||
# Seek to 10% before first frame decode so a video that
|
||||
# opens on a black frame (fade-in, title card, codec
|
||||
# warmup) doesn't produce a black thumbnail. mpv clamps
|
||||
# `start` to valid range so very short clips still land
|
||||
# on a real frame.
|
||||
start='10%',
|
||||
hr_seek='yes',
|
||||
)
|
||||
if Path(dest).exists():
|
||||
self._signals.thumb_ready.emit(index, dest)
|
||||
return
|
||||
except (FileNotFoundError, Exception):
|
||||
pass
|
||||
try:
|
||||
@m.property_observer('video-params')
|
||||
def _on_params(_name, value):
|
||||
if isinstance(value, dict) and value.get('w'):
|
||||
frame_ready.set()
|
||||
|
||||
m.loadfile(source)
|
||||
if frame_ready.wait(timeout=10):
|
||||
m.command('screenshot-to-file', dest, 'video')
|
||||
finally:
|
||||
m.terminate()
|
||||
|
||||
if Path(dest).exists() and Path(dest).stat().st_size > 0:
|
||||
from PIL import Image
|
||||
with Image.open(dest) as img:
|
||||
img.thumbnail(
|
||||
(LIBRARY_THUMB_SIZE, LIBRARY_THUMB_SIZE),
|
||||
Image.LANCZOS,
|
||||
)
|
||||
if img.mode in ("RGBA", "P"):
|
||||
img = img.convert("RGB")
|
||||
img.save(dest, "JPEG", quality=85)
|
||||
extracted = True
|
||||
except Exception as e:
|
||||
log.debug("mpv thumb extraction failed for %s: %s", source, e)
|
||||
|
||||
if extracted and Path(dest).exists():
|
||||
self._signals.thumb_ready.emit(index, dest)
|
||||
return
|
||||
|
||||
# Fallback: generate a placeholder
|
||||
from PySide6.QtGui import QPainter, QColor, QFont
|
||||
from PySide6.QtGui import QPolygon
|
||||
@ -333,7 +403,7 @@ class LibraryView(QWidget):
|
||||
if 0 <= index < len(thumbs):
|
||||
pix = QPixmap(path)
|
||||
if not pix.isNull():
|
||||
thumbs[index].set_pixmap(pix)
|
||||
thumbs[index].set_pixmap(pix, path)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Selection signals
|
||||
@ -347,6 +417,76 @@ class LibraryView(QWidget):
|
||||
if 0 <= index < len(self._files):
|
||||
self.file_activated.emit(str(self._files[index]))
|
||||
|
||||
def _move_files_to_folder(
|
||||
self, files: list[Path], target_folder: str | None
|
||||
) -> None:
|
||||
"""Move library files into target_folder (None = Unfiled root).
|
||||
|
||||
Uses Path.rename for an atomic same-filesystem move. That matters
|
||||
here because the bug we're fixing is "move produces a duplicate" —
|
||||
a copy-then-delete sequence can leave both files behind if the
|
||||
delete fails or the process is killed mid-step. rename() is one
|
||||
syscall and either fully succeeds or doesn't happen at all. If
|
||||
the rename crosses filesystems (rare — only if the user pointed
|
||||
the library at a different mount than its parent), Python raises
|
||||
OSError(EXDEV) and we fall back to shutil.move which copies-then-
|
||||
unlinks; in that path the unlink failure is the only window for
|
||||
a duplicate, and it's logged.
|
||||
"""
|
||||
import shutil
|
||||
|
||||
try:
|
||||
if target_folder:
|
||||
dest_dir = saved_folder_dir(target_folder)
|
||||
else:
|
||||
dest_dir = saved_dir()
|
||||
except ValueError as e:
|
||||
QMessageBox.warning(self, "Invalid Folder Name", str(e))
|
||||
return
|
||||
|
||||
dest_resolved = dest_dir.resolve()
|
||||
moved = 0
|
||||
skipped_same = 0
|
||||
collisions: list[str] = []
|
||||
errors: list[str] = []
|
||||
|
||||
for src in files:
|
||||
if not src.exists():
|
||||
continue
|
||||
if src.parent.resolve() == dest_resolved:
|
||||
skipped_same += 1
|
||||
continue
|
||||
target = dest_dir / src.name
|
||||
if target.exists():
|
||||
collisions.append(src.name)
|
||||
continue
|
||||
try:
|
||||
src.rename(target)
|
||||
moved += 1
|
||||
except OSError:
|
||||
# Cross-device move — fall back to copy + delete.
|
||||
try:
|
||||
shutil.move(str(src), str(target))
|
||||
moved += 1
|
||||
except Exception as e:
|
||||
log.warning("Failed to move %s → %s: %s", src, target, e)
|
||||
errors.append(f"{src.name}: {e}")
|
||||
|
||||
self.refresh()
|
||||
|
||||
if collisions:
|
||||
sample = "\n".join(collisions[:10])
|
||||
more = f"\n... and {len(collisions) - 10} more" if len(collisions) > 10 else ""
|
||||
QMessageBox.warning(
|
||||
self,
|
||||
"Move Conflicts",
|
||||
f"Skipped {len(collisions)} file(s) — destination already "
|
||||
f"contains a file with the same name:\n\n{sample}{more}",
|
||||
)
|
||||
if errors:
|
||||
sample = "\n".join(errors[:10])
|
||||
QMessageBox.warning(self, "Move Errors", sample)
|
||||
|
||||
def _on_context_menu(self, index: int, pos) -> None:
|
||||
if index < 0 or index >= len(self._files):
|
||||
return
|
||||
@ -361,6 +501,25 @@ class LibraryView(QWidget):
|
||||
menu.addSeparator()
|
||||
copy_file = menu.addAction("Copy File to Clipboard")
|
||||
copy_path = menu.addAction("Copy File Path")
|
||||
menu.addSeparator()
|
||||
|
||||
# Move to Folder submenu — atomic rename, no copy step, so a
|
||||
# crash mid-move can never leave a duplicate behind. The current
|
||||
# location is included in the list (no-op'd in the move helper)
|
||||
# so the menu shape stays predictable for the user.
|
||||
move_menu = menu.addMenu("Move to Folder")
|
||||
move_unsorted = move_menu.addAction("Unfiled")
|
||||
move_menu.addSeparator()
|
||||
move_folder_actions: dict[int, str] = {}
|
||||
root = saved_dir()
|
||||
if root.is_dir():
|
||||
for entry in sorted(root.iterdir()):
|
||||
if entry.is_dir():
|
||||
a = move_menu.addAction(entry.name)
|
||||
move_folder_actions[id(a)] = entry.name
|
||||
move_menu.addSeparator()
|
||||
move_new = move_menu.addAction("+ New Folder...")
|
||||
|
||||
menu.addSeparator()
|
||||
delete_action = menu.addAction("Delete from Library")
|
||||
|
||||
@ -372,6 +531,14 @@ class LibraryView(QWidget):
|
||||
QDesktopServices.openUrl(QUrl.fromLocalFile(str(filepath)))
|
||||
elif action == open_folder:
|
||||
QDesktopServices.openUrl(QUrl.fromLocalFile(str(filepath.parent)))
|
||||
elif action == move_unsorted:
|
||||
self._move_files_to_folder([filepath], None)
|
||||
elif action == move_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self._move_files_to_folder([filepath], name.strip())
|
||||
elif id(action) in move_folder_actions:
|
||||
self._move_files_to_folder([filepath], move_folder_actions[id(action)])
|
||||
elif action == copy_file:
|
||||
from PySide6.QtCore import QMimeData
|
||||
from PySide6.QtGui import QPixmap as _QP
|
||||
@ -389,10 +556,15 @@ class LibraryView(QWidget):
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
)
|
||||
if reply == QMessageBox.StandardButton.Yes:
|
||||
post_id = int(filepath.stem) if filepath.stem.isdigit() else None
|
||||
post_id = self._db.get_library_post_id_by_filename(filepath.name)
|
||||
if post_id is None and filepath.stem.isdigit():
|
||||
post_id = int(filepath.stem)
|
||||
filepath.unlink(missing_ok=True)
|
||||
lib_thumb = thumbnails_dir() / "library" / f"{filepath.stem}.jpg"
|
||||
thumb_key = str(post_id) if post_id is not None else filepath.stem
|
||||
lib_thumb = thumbnails_dir() / "library" / f"{thumb_key}.jpg"
|
||||
lib_thumb.unlink(missing_ok=True)
|
||||
if post_id is not None:
|
||||
self._db.remove_library_meta(post_id)
|
||||
self.refresh()
|
||||
if post_id is not None:
|
||||
self.files_deleted.emit([post_id])
|
||||
@ -403,13 +575,36 @@ class LibraryView(QWidget):
|
||||
return
|
||||
|
||||
menu = QMenu(self)
|
||||
|
||||
move_menu = menu.addMenu(f"Move {len(files)} files to Folder")
|
||||
move_unsorted = move_menu.addAction("Unfiled")
|
||||
move_menu.addSeparator()
|
||||
move_folder_actions: dict[int, str] = {}
|
||||
root = saved_dir()
|
||||
if root.is_dir():
|
||||
for entry in sorted(root.iterdir()):
|
||||
if entry.is_dir():
|
||||
a = move_menu.addAction(entry.name)
|
||||
move_folder_actions[id(a)] = entry.name
|
||||
move_menu.addSeparator()
|
||||
move_new = move_menu.addAction("+ New Folder...")
|
||||
|
||||
menu.addSeparator()
|
||||
delete_all = menu.addAction(f"Delete {len(files)} files from Library")
|
||||
|
||||
action = menu.exec(pos)
|
||||
if not action:
|
||||
return
|
||||
|
||||
if action == delete_all:
|
||||
if action == move_unsorted:
|
||||
self._move_files_to_folder(files, None)
|
||||
elif action == move_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self._move_files_to_folder(files, name.strip())
|
||||
elif id(action) in move_folder_actions:
|
||||
self._move_files_to_folder(files, move_folder_actions[id(action)])
|
||||
elif action == delete_all:
|
||||
reply = QMessageBox.question(
|
||||
self, "Confirm", f"Delete {len(files)} files from library?",
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
@ -417,11 +612,16 @@ class LibraryView(QWidget):
|
||||
if reply == QMessageBox.StandardButton.Yes:
|
||||
deleted_ids = []
|
||||
for f in files:
|
||||
if f.stem.isdigit():
|
||||
deleted_ids.append(int(f.stem))
|
||||
post_id = self._db.get_library_post_id_by_filename(f.name)
|
||||
if post_id is None and f.stem.isdigit():
|
||||
post_id = int(f.stem)
|
||||
f.unlink(missing_ok=True)
|
||||
lib_thumb = thumbnails_dir() / "library" / f"{f.stem}.jpg"
|
||||
thumb_key = str(post_id) if post_id is not None else f.stem
|
||||
lib_thumb = thumbnails_dir() / "library" / f"{thumb_key}.jpg"
|
||||
lib_thumb.unlink(missing_ok=True)
|
||||
if post_id is not None:
|
||||
self._db.remove_library_meta(post_id)
|
||||
deleted_ids.append(post_id)
|
||||
self.refresh()
|
||||
if deleted_ids:
|
||||
self.files_deleted.emit(deleted_ids)
|
||||
|
||||
30
booru_viewer/gui/log_handler.py
Normal file
30
booru_viewer/gui/log_handler.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""Qt-aware logging handler that emits log lines to a QTextEdit."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from PySide6.QtCore import QObject, Signal
|
||||
from PySide6.QtWidgets import QTextEdit
|
||||
|
||||
|
||||
class LogHandler(logging.Handler, QObject):
|
||||
"""Logging handler that emits to a QTextEdit."""
|
||||
|
||||
log_signal = Signal(str)
|
||||
|
||||
def __init__(self, widget: QTextEdit) -> None:
|
||||
logging.Handler.__init__(self)
|
||||
QObject.__init__(self)
|
||||
self._widget = widget
|
||||
self.log_signal.connect(self._append)
|
||||
self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s", datefmt="%H:%M:%S"))
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
msg = self.format(record)
|
||||
self.log_signal.emit(msg)
|
||||
|
||||
def _append(self, msg: str) -> None:
|
||||
self._widget.append(msg)
|
||||
sb = self._widget.verticalScrollBar()
|
||||
sb.setValue(sb.maximum())
|
||||
1218
booru_viewer/gui/main_window.py
Normal file
1218
booru_viewer/gui/main_window.py
Normal file
File diff suppressed because it is too large
Load Diff
0
booru_viewer/gui/media/__init__.py
Normal file
0
booru_viewer/gui/media/__init__.py
Normal file
89
booru_viewer/gui/media/_mpv_options.py
Normal file
89
booru_viewer/gui/media/_mpv_options.py
Normal file
@ -0,0 +1,89 @@
|
||||
"""Pure helpers that build the kwargs dict passed to ``mpv.MPV`` and
|
||||
the post-construction options dict applied via the property API.
|
||||
|
||||
Kept free of any Qt or mpv imports so the options can be audited from
|
||||
a CI test that only installs the stdlib.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# FFmpeg ``protocol_whitelist`` value applied via mpv's
|
||||
# ``demuxer-lavf-o`` option (audit finding #2). ``file`` must stay so
|
||||
# cached local clips and ``.part`` files keep playing; ``http``/
|
||||
# ``https``/``tls``/``tcp`` are needed for fresh network video.
|
||||
# ``crypto`` is intentionally omitted — it's an FFmpeg pseudo-protocol
|
||||
# for AES-decrypted streams that boorus do not legitimately serve.
|
||||
LAVF_PROTOCOL_WHITELIST = "file,http,https,tls,tcp"
|
||||
|
||||
|
||||
def lavf_options() -> dict[str, str]:
|
||||
"""Return the FFmpeg lavf demuxer options to apply post-construction.
|
||||
|
||||
These cannot be set via ``mpv.MPV(**kwargs)`` because python-mpv's
|
||||
init path uses ``mpv_set_option_string``, which routes through
|
||||
mpv's keyvalue list parser. That parser splits on ``,`` to find
|
||||
entries, so the comma-laden ``protocol_whitelist`` value gets
|
||||
shredded into orphan tokens and mpv rejects the option with
|
||||
-7 OPT_FORMAT. mpv's documented backslash escape (``\\,``) is
|
||||
not unescaped on this code path either.
|
||||
|
||||
The post-construction property API DOES accept dict values for
|
||||
keyvalue-list options via the node API, so we set them after
|
||||
``mpv.MPV()`` returns. Caller pattern:
|
||||
|
||||
m = mpv.MPV(**build_mpv_kwargs(is_windows=...))
|
||||
for k, v in lavf_options().items():
|
||||
m["demuxer-lavf-o"] = {k: v}
|
||||
"""
|
||||
return {"protocol_whitelist": LAVF_PROTOCOL_WHITELIST}
|
||||
|
||||
|
||||
def build_mpv_kwargs(is_windows: bool) -> dict[str, object]:
|
||||
"""Return the kwargs dict for constructing ``mpv.MPV``.
|
||||
|
||||
The playback, audio, and network options are unchanged from
|
||||
pre-audit v0.2.5. The security hardening added by SECURITY_AUDIT.md
|
||||
finding #2 is:
|
||||
|
||||
- ``ytdl="no"``: refuse to delegate URL handling to yt-dlp. mpv's
|
||||
default enables a yt-dlp hook script that matches ~1500 hosts
|
||||
and shells out to ``yt-dlp`` on any URL it recognizes. A
|
||||
compromised booru returning ``file_url: "https://youtube.com/..."``
|
||||
would pull the user through whatever extractor CVE is current.
|
||||
|
||||
- ``load_scripts="no"``: do not auto-load Lua scripts from
|
||||
``~/.config/mpv/scripts``. These scripts run in mpv's context
|
||||
every time the widget is created.
|
||||
|
||||
- ``input_conf="/dev/null"`` (POSIX only): skip loading
|
||||
``~/.config/mpv/input.conf``. The existing
|
||||
``input_default_bindings=False`` + ``input_vo_keyboard=False``
|
||||
are the primary lockdown; this is defense-in-depth. Windows
|
||||
uses a different null-device path and the load behavior varies
|
||||
by mpv build, so it is skipped there.
|
||||
|
||||
The ffmpeg protocol whitelist (also part of finding #2) is NOT
|
||||
in this dict — see ``lavf_options`` for the explanation.
|
||||
"""
|
||||
kwargs: dict[str, object] = {
|
||||
"vo": "libmpv",
|
||||
"hwdec": "auto",
|
||||
"keep_open": "yes",
|
||||
"ao": "pulse,wasapi,",
|
||||
"audio_client_name": "booru-viewer",
|
||||
"input_default_bindings": False,
|
||||
"input_vo_keyboard": False,
|
||||
"osc": False,
|
||||
"vd_lavc_fast": "yes",
|
||||
"vd_lavc_skiploopfilter": "nonkey",
|
||||
"cache": "yes",
|
||||
"cache_pause": "no",
|
||||
"demuxer_max_bytes": "50MiB",
|
||||
"demuxer_readahead_secs": "20",
|
||||
"network_timeout": "10",
|
||||
"ytdl": "no",
|
||||
"load_scripts": "no",
|
||||
}
|
||||
if not is_windows:
|
||||
kwargs["input_conf"] = "/dev/null"
|
||||
return kwargs
|
||||
11
booru_viewer/gui/media/constants.py
Normal file
11
booru_viewer/gui/media/constants.py
Normal file
@ -0,0 +1,11 @@
|
||||
"""Shared constants and predicates for media files."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
VIDEO_EXTENSIONS = (".mp4", ".webm", ".mkv", ".avi", ".mov")
|
||||
|
||||
|
||||
def _is_video(path: str) -> bool:
|
||||
return Path(path).suffix.lower() in VIDEO_EXTENSIONS
|
||||
178
booru_viewer/gui/media/image_viewer.py
Normal file
178
booru_viewer/gui/media/image_viewer.py
Normal file
@ -0,0 +1,178 @@
|
||||
"""Zoom/pan image viewer used by both the embedded preview and the popout."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from PySide6.QtCore import Qt, QPointF, Signal
|
||||
from PySide6.QtGui import QPixmap, QPainter, QWheelEvent, QMouseEvent, QKeyEvent, QMovie
|
||||
from PySide6.QtWidgets import QWidget
|
||||
|
||||
|
||||
# -- Image Viewer (zoom/pan) --
|
||||
|
||||
class ImageViewer(QWidget):
|
||||
"""Zoomable, pannable image viewer."""
|
||||
|
||||
close_requested = Signal()
|
||||
|
||||
def __init__(self, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._pixmap: QPixmap | None = None
|
||||
self._movie: QMovie | None = None
|
||||
self._zoom = 1.0
|
||||
self._offset = QPointF(0, 0)
|
||||
self._drag_start: QPointF | None = None
|
||||
self._drag_offset = QPointF(0, 0)
|
||||
self._zoom_scroll_accum = 0
|
||||
self.setMouseTracking(True)
|
||||
self.setFocusPolicy(Qt.FocusPolicy.StrongFocus)
|
||||
self._info_text = ""
|
||||
|
||||
def set_image(self, pixmap: QPixmap, info: str = "") -> None:
|
||||
self._stop_movie()
|
||||
self._pixmap = pixmap
|
||||
self._zoom = 1.0
|
||||
self._offset = QPointF(0, 0)
|
||||
self._info_text = info
|
||||
self._fit_to_view()
|
||||
self.update()
|
||||
|
||||
def set_gif(self, path: str, info: str = "") -> None:
|
||||
self._stop_movie()
|
||||
self._movie = QMovie(path)
|
||||
self._movie.frameChanged.connect(self._on_gif_frame)
|
||||
self._movie.start()
|
||||
self._info_text = info
|
||||
# Set initial pixmap from first frame
|
||||
self._pixmap = self._movie.currentPixmap()
|
||||
self._zoom = 1.0
|
||||
self._offset = QPointF(0, 0)
|
||||
self._fit_to_view()
|
||||
self.update()
|
||||
|
||||
def _on_gif_frame(self) -> None:
|
||||
if self._movie:
|
||||
self._pixmap = self._movie.currentPixmap()
|
||||
self.update()
|
||||
|
||||
def _stop_movie(self) -> None:
|
||||
if self._movie:
|
||||
self._movie.stop()
|
||||
self._movie = None
|
||||
|
||||
def clear(self) -> None:
|
||||
self._stop_movie()
|
||||
self._pixmap = None
|
||||
self._info_text = ""
|
||||
self.update()
|
||||
|
||||
def _fit_to_view(self) -> None:
|
||||
if not self._pixmap:
|
||||
return
|
||||
vw, vh = self.width(), self.height()
|
||||
pw, ph = self._pixmap.width(), self._pixmap.height()
|
||||
if pw == 0 or ph == 0:
|
||||
return
|
||||
scale_w = vw / pw
|
||||
scale_h = vh / ph
|
||||
# No 1.0 cap — scale up to fill the available view, matching how
|
||||
# the video player fills its widget. In the popout the window is
|
||||
# already aspect-locked to the image's aspect, so scaling up
|
||||
# produces a clean fill with no letterbox. In the embedded
|
||||
# preview the user can drag the splitter past the image's native
|
||||
# size; letting it scale up there fills the pane the same way
|
||||
# the popout does.
|
||||
self._zoom = min(scale_w, scale_h)
|
||||
self._offset = QPointF(
|
||||
(vw - pw * self._zoom) / 2,
|
||||
(vh - ph * self._zoom) / 2,
|
||||
)
|
||||
|
||||
def paintEvent(self, event) -> None:
|
||||
p = QPainter(self)
|
||||
pal = self.palette()
|
||||
p.fillRect(self.rect(), pal.color(pal.ColorRole.Window))
|
||||
if self._pixmap:
|
||||
p.setRenderHint(QPainter.RenderHint.SmoothPixmapTransform)
|
||||
p.translate(self._offset)
|
||||
p.scale(self._zoom, self._zoom)
|
||||
p.drawPixmap(0, 0, self._pixmap)
|
||||
p.resetTransform()
|
||||
p.end()
|
||||
|
||||
def wheelEvent(self, event: QWheelEvent) -> None:
|
||||
if not self._pixmap:
|
||||
return
|
||||
delta = event.angleDelta().y()
|
||||
if delta == 0:
|
||||
# Pure horizontal tilt — let parent handle (navigation)
|
||||
event.ignore()
|
||||
return
|
||||
self._zoom_scroll_accum += delta
|
||||
steps = self._zoom_scroll_accum // 120
|
||||
if not steps:
|
||||
return
|
||||
self._zoom_scroll_accum -= steps * 120
|
||||
mouse_pos = event.position()
|
||||
old_zoom = self._zoom
|
||||
factor = 1.15 ** steps
|
||||
self._zoom = max(0.1, min(self._zoom * factor, 20.0))
|
||||
ratio = self._zoom / old_zoom
|
||||
self._offset = mouse_pos - ratio * (mouse_pos - self._offset)
|
||||
self.update()
|
||||
|
||||
def mousePressEvent(self, event: QMouseEvent) -> None:
|
||||
if event.button() == Qt.MouseButton.MiddleButton:
|
||||
self._fit_to_view()
|
||||
self.update()
|
||||
elif event.button() == Qt.MouseButton.LeftButton:
|
||||
self._drag_start = event.position()
|
||||
self._drag_offset = QPointF(self._offset)
|
||||
self.setCursor(Qt.CursorShape.ClosedHandCursor)
|
||||
|
||||
def mouseMoveEvent(self, event: QMouseEvent) -> None:
|
||||
if self._drag_start is not None:
|
||||
delta = event.position() - self._drag_start
|
||||
self._offset = self._drag_offset + delta
|
||||
self.update()
|
||||
|
||||
def mouseReleaseEvent(self, event: QMouseEvent) -> None:
|
||||
self._drag_start = None
|
||||
self.setCursor(Qt.CursorShape.ArrowCursor)
|
||||
|
||||
def keyPressEvent(self, event: QKeyEvent) -> None:
|
||||
if event.key() in (Qt.Key.Key_Escape, Qt.Key.Key_Q):
|
||||
self.close_requested.emit()
|
||||
elif event.key() == Qt.Key.Key_0:
|
||||
self._fit_to_view()
|
||||
self.update()
|
||||
elif event.key() in (Qt.Key.Key_Plus, Qt.Key.Key_Equal):
|
||||
self._zoom = min(self._zoom * 1.2, 20.0)
|
||||
self.update()
|
||||
elif event.key() == Qt.Key.Key_Minus:
|
||||
self._zoom = max(self._zoom / 1.2, 0.1)
|
||||
self.update()
|
||||
else:
|
||||
event.ignore()
|
||||
|
||||
def resizeEvent(self, event) -> None:
|
||||
if not self._pixmap:
|
||||
return
|
||||
pw, ph = self._pixmap.width(), self._pixmap.height()
|
||||
if pw == 0 or ph == 0:
|
||||
return
|
||||
# Only re-fit if the user was at fit-to-view at the *previous*
|
||||
# size. If they had explicitly zoomed/panned, leave _zoom and
|
||||
# _offset alone — clobbering them on every resize (F11 toggle,
|
||||
# manual window drag, splitter move) loses their state. Use
|
||||
# event.oldSize() to compute the prior fit-to-view zoom and
|
||||
# compare to current _zoom; the 0.001 epsilon absorbs float
|
||||
# drift but is tighter than any wheel/key zoom step (±20%).
|
||||
old = event.oldSize()
|
||||
if old.isValid() and old.width() > 0 and old.height() > 0:
|
||||
old_fit = min(old.width() / pw, old.height() / ph)
|
||||
if abs(self._zoom - old_fit) < 0.001:
|
||||
self._fit_to_view()
|
||||
else:
|
||||
# First resize (no valid old size) — default to fit.
|
||||
self._fit_to_view()
|
||||
self.update()
|
||||
161
booru_viewer/gui/media/mpv_gl.py
Normal file
161
booru_viewer/gui/media/mpv_gl.py
Normal file
@ -0,0 +1,161 @@
|
||||
"""mpv OpenGL render context host widgets."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from PySide6.QtCore import Signal
|
||||
from PySide6.QtOpenGLWidgets import QOpenGLWidget as _QOpenGLWidget
|
||||
from PySide6.QtWidgets import QWidget, QVBoxLayout
|
||||
|
||||
import mpv as mpvlib
|
||||
|
||||
from ._mpv_options import build_mpv_kwargs, lavf_options
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _MpvGLWidget(QWidget):
|
||||
"""OpenGL widget that hosts mpv rendering via the render API.
|
||||
|
||||
Subclasses QOpenGLWidget so initializeGL/paintGL are dispatched
|
||||
correctly by Qt's C++ virtual method mechanism.
|
||||
Works on both X11 and Wayland.
|
||||
"""
|
||||
|
||||
_frame_ready = Signal() # mpv thread → main thread repaint trigger
|
||||
|
||||
def __init__(self, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._gl: _MpvOpenGLSurface = _MpvOpenGLSurface(self)
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(self._gl)
|
||||
self._ctx: mpvlib.MpvRenderContext | None = None
|
||||
self._gl_inited = False
|
||||
self._proc_addr_fn = None
|
||||
self._frame_ready.connect(self._gl.update)
|
||||
# Create mpv eagerly on the main thread.
|
||||
#
|
||||
# Options come from `build_mpv_kwargs` (see `_mpv_options.py`
|
||||
# for the full rationale). Summary: Discord screen-share audio
|
||||
# fix via `ao=pulse`, fast-load vd-lavc options, network cache
|
||||
# tuning for the uncached-video fast path, and the SECURITY
|
||||
# hardening from audit #2 (ytdl=no, load_scripts=no, POSIX
|
||||
# input_conf null).
|
||||
self._mpv = mpvlib.MPV(
|
||||
**build_mpv_kwargs(is_windows=sys.platform == "win32"),
|
||||
)
|
||||
# The ffmpeg lavf demuxer protocol whitelist (also audit #2)
|
||||
# has to be applied via the property API, not as an init
|
||||
# kwarg — python-mpv's init path goes through
|
||||
# mpv_set_option_string which trips on the comma-laden value.
|
||||
# The property API uses the node API and accepts dict values.
|
||||
for key, value in lavf_options().items():
|
||||
self._mpv["demuxer-lavf-o"] = {key: value}
|
||||
# Wire up the GL surface's callbacks to us
|
||||
self._gl._owner = self
|
||||
|
||||
def _init_gl(self) -> None:
|
||||
if self._gl_inited or self._mpv is None:
|
||||
return
|
||||
from PySide6.QtGui import QOpenGLContext
|
||||
ctx = QOpenGLContext.currentContext()
|
||||
if not ctx:
|
||||
return
|
||||
|
||||
def _get_proc_address(_ctx, name):
|
||||
if isinstance(name, bytes):
|
||||
name_str = name
|
||||
else:
|
||||
name_str = name.encode('utf-8')
|
||||
addr = ctx.getProcAddress(name_str)
|
||||
if addr is not None:
|
||||
return int(addr)
|
||||
return 0
|
||||
|
||||
self._proc_addr_fn = mpvlib.MpvGlGetProcAddressFn(_get_proc_address)
|
||||
self._ctx = mpvlib.MpvRenderContext(
|
||||
self._mpv, 'opengl',
|
||||
opengl_init_params={'get_proc_address': self._proc_addr_fn},
|
||||
)
|
||||
self._ctx.update_cb = self._on_mpv_frame
|
||||
self._gl_inited = True
|
||||
|
||||
def _on_mpv_frame(self) -> None:
|
||||
"""Called from mpv thread when a new frame is ready."""
|
||||
self._frame_ready.emit()
|
||||
|
||||
def _paint_gl(self) -> None:
|
||||
if self._ctx is None:
|
||||
self._init_gl()
|
||||
if self._ctx is None:
|
||||
return
|
||||
ratio = self._gl.devicePixelRatioF()
|
||||
w = int(self._gl.width() * ratio)
|
||||
h = int(self._gl.height() * ratio)
|
||||
self._ctx.render(
|
||||
opengl_fbo={'w': w, 'h': h, 'fbo': self._gl.defaultFramebufferObject()},
|
||||
flip_y=True,
|
||||
)
|
||||
|
||||
def ensure_gl_init(self) -> None:
|
||||
"""Force GL context creation and render context setup.
|
||||
|
||||
Needed when the widget is hidden (e.g. inside a QStackedWidget)
|
||||
but mpv needs a render context before loadfile().
|
||||
"""
|
||||
if not self._gl_inited:
|
||||
log.debug("GL render context init (first-time for widget %s)", id(self))
|
||||
self._gl.makeCurrent()
|
||||
self._init_gl()
|
||||
|
||||
def release_render_context(self) -> None:
|
||||
"""Free the GL render context without terminating mpv.
|
||||
|
||||
Releases all GPU-side textures and FBOs that the render context
|
||||
holds. The next ``ensure_gl_init()`` call (from ``play_file``)
|
||||
recreates the context cheaply (~5ms). This is the difference
|
||||
between "mpv is idle but holding VRAM" and "mpv is idle and
|
||||
clean."
|
||||
|
||||
Safe to call when mpv has no active file (after
|
||||
``mpv.command('stop')``). After this, ``_paint_gl`` is a no-op
|
||||
(``_ctx is None`` guard) and mpv won't fire frame-ready
|
||||
callbacks because there's no render context to trigger them.
|
||||
"""
|
||||
if self._ctx:
|
||||
# GL context must be current so mpv can release its textures
|
||||
# and FBOs on the correct context. Without this, drivers that
|
||||
# enforce per-context resource ownership (not NVIDIA, but
|
||||
# Mesa/Intel) leak the GPU objects.
|
||||
self._gl.makeCurrent()
|
||||
try:
|
||||
self._ctx.free()
|
||||
finally:
|
||||
self._gl.doneCurrent()
|
||||
self._ctx = None
|
||||
self._gl_inited = False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
self.release_render_context()
|
||||
if self._mpv:
|
||||
self._mpv.terminate()
|
||||
self._mpv = None
|
||||
|
||||
|
||||
class _MpvOpenGLSurface(_QOpenGLWidget):
|
||||
"""QOpenGLWidget subclass — delegates initializeGL/paintGL to _MpvGLWidget."""
|
||||
|
||||
def __init__(self, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._owner: _MpvGLWidget | None = None
|
||||
|
||||
def initializeGL(self) -> None:
|
||||
if self._owner:
|
||||
self._owner._init_gl()
|
||||
|
||||
def paintGL(self) -> None:
|
||||
if self._owner:
|
||||
self._owner._paint_gl()
|
||||
695
booru_viewer/gui/media/video_player.py
Normal file
695
booru_viewer/gui/media/video_player.py
Normal file
@ -0,0 +1,695 @@
|
||||
"""mpv-backed video player widget with transport controls."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from PySide6.QtCore import Qt, QTimer, Signal, Property, QPoint
|
||||
from PySide6.QtGui import QColor, QIcon, QPixmap, QPainter, QPen, QPolygon, QPainterPath, QFont
|
||||
from PySide6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QHBoxLayout, QLabel, QPushButton, QSlider, QStyle,
|
||||
)
|
||||
|
||||
|
||||
def _paint_icon(shape: str, color: QColor, size: int = 16) -> QIcon:
|
||||
"""Paint a media control icon using the given color."""
|
||||
pix = QPixmap(size, size)
|
||||
pix.fill(Qt.GlobalColor.transparent)
|
||||
p = QPainter(pix)
|
||||
p.setRenderHint(QPainter.RenderHint.Antialiasing)
|
||||
p.setPen(Qt.PenStyle.NoPen)
|
||||
p.setBrush(color)
|
||||
s = size
|
||||
|
||||
if shape == "play":
|
||||
p.drawPolygon(QPolygon([QPoint(3, 2), QPoint(3, s - 2), QPoint(s - 2, s // 2)]))
|
||||
|
||||
elif shape == "pause":
|
||||
w = max(2, s // 4)
|
||||
p.drawRect(2, 2, w, s - 4)
|
||||
p.drawRect(s - 2 - w, 2, w, s - 4)
|
||||
|
||||
elif shape == "volume":
|
||||
# Speaker cone
|
||||
p.drawPolygon(QPolygon([
|
||||
QPoint(1, s // 2 - 2), QPoint(4, s // 2 - 2),
|
||||
QPoint(8, 2), QPoint(8, s - 2),
|
||||
QPoint(4, s // 2 + 2), QPoint(1, s // 2 + 2),
|
||||
]))
|
||||
# Sound waves
|
||||
p.setPen(QPen(color, 1.5))
|
||||
p.setBrush(Qt.BrushStyle.NoBrush)
|
||||
path = QPainterPath()
|
||||
path.arcMoveTo(8, 3, 6, s - 6, 45)
|
||||
path.arcTo(8, 3, 6, s - 6, 45, -90)
|
||||
p.drawPath(path)
|
||||
|
||||
elif shape == "muted":
|
||||
p.drawPolygon(QPolygon([
|
||||
QPoint(1, s // 2 - 2), QPoint(4, s // 2 - 2),
|
||||
QPoint(8, 2), QPoint(8, s - 2),
|
||||
QPoint(4, s // 2 + 2), QPoint(1, s // 2 + 2),
|
||||
]))
|
||||
p.setPen(QPen(color, 2))
|
||||
p.drawLine(10, 4, s - 2, s - 4)
|
||||
p.drawLine(10, s - 4, s - 2, 4)
|
||||
|
||||
elif shape == "loop":
|
||||
p.setPen(QPen(color, 1.5))
|
||||
p.setBrush(Qt.BrushStyle.NoBrush)
|
||||
path = QPainterPath()
|
||||
path.arcMoveTo(2, 2, s - 4, s - 4, 30)
|
||||
path.arcTo(2, 2, s - 4, s - 4, 30, 300)
|
||||
p.drawPath(path)
|
||||
# Arrowhead
|
||||
p.setPen(Qt.PenStyle.NoPen)
|
||||
p.setBrush(color)
|
||||
end = path.currentPosition().toPoint()
|
||||
p.drawPolygon(QPolygon([
|
||||
end, QPoint(end.x() - 4, end.y() - 3), QPoint(end.x() + 1, end.y() - 4),
|
||||
]))
|
||||
|
||||
elif shape == "once":
|
||||
p.setPen(QPen(color, 1))
|
||||
f = QFont()
|
||||
f.setPixelSize(s - 2)
|
||||
f.setBold(True)
|
||||
p.setFont(f)
|
||||
p.drawText(pix.rect(), Qt.AlignmentFlag.AlignCenter, "1\u00D7")
|
||||
|
||||
elif shape == "next":
|
||||
p.drawPolygon(QPolygon([QPoint(2, 2), QPoint(2, s - 2), QPoint(s - 5, s // 2)]))
|
||||
p.drawRect(s - 4, 2, 2, s - 4)
|
||||
|
||||
elif shape == "auto":
|
||||
mid = s // 2
|
||||
p.drawPolygon(QPolygon([QPoint(1, 3), QPoint(1, s - 3), QPoint(mid - 1, s // 2)]))
|
||||
p.drawPolygon(QPolygon([QPoint(mid, 3), QPoint(mid, s - 3), QPoint(s - 2, s // 2)]))
|
||||
|
||||
p.end()
|
||||
return QIcon(pix)
|
||||
|
||||
import mpv as mpvlib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from .mpv_gl import _MpvGLWidget
|
||||
|
||||
|
||||
class _ClickSeekSlider(QSlider):
|
||||
"""Slider that jumps to the clicked position instead of page-stepping."""
|
||||
clicked_position = Signal(int)
|
||||
|
||||
def mousePressEvent(self, event):
|
||||
if event.button() == Qt.MouseButton.LeftButton:
|
||||
val = QStyle.sliderValueFromPosition(
|
||||
self.minimum(), self.maximum(), int(event.position().x()), self.width()
|
||||
)
|
||||
self.setValue(val)
|
||||
self.clicked_position.emit(val)
|
||||
super().mousePressEvent(event)
|
||||
|
||||
|
||||
# -- Video Player (mpv backend via OpenGL render API) --
|
||||
|
||||
|
||||
class VideoPlayer(QWidget):
|
||||
"""Video player with transport controls, powered by mpv."""
|
||||
|
||||
play_next = Signal() # emitted when video ends in "Next" mode
|
||||
media_ready = Signal() # emitted when media is loaded and duration is known
|
||||
video_size = Signal(int, int) # (width, height) emitted when video dimensions are known
|
||||
# Emitted whenever mpv fires its `playback-restart` event. This event
|
||||
# arrives once after each loadfile (when playback actually starts
|
||||
# producing frames) and once after each completed seek. The popout's
|
||||
# state machine adapter listens to this signal and dispatches either
|
||||
# VideoStarted or SeekCompleted depending on which state it's in
|
||||
# (LoadingVideo vs SeekingVideo). The pre-state-machine code did not
|
||||
# need this signal because it used a 500ms timestamp window to fake
|
||||
# a seek-done edge; the state machine refactor replaces that window
|
||||
# with this real event. Probe results in docs/POPOUT_REFACTOR_PLAN.md
|
||||
# confirm exactly one event per load and one per seek.
|
||||
playback_restart = Signal()
|
||||
|
||||
# QSS-controllable letterbox / pillarbox color. mpv paints the area
|
||||
# around the video frame in this color instead of the default black,
|
||||
# so portrait videos in a landscape preview slot (or vice versa) blend
|
||||
# into the panel theme instead of sitting in a hard black box.
|
||||
# Set via `VideoPlayer { qproperty-letterboxColor: ${bg}; }` in a theme.
|
||||
# The class default below is just a fallback; __init__ replaces it
|
||||
# with the current palette's Window color so systems without a custom
|
||||
# QSS (e.g. Windows dark/light mode driven entirely by QPalette) get
|
||||
# a letterbox that automatically matches the OS background.
|
||||
_letterbox_color = QColor("#000000")
|
||||
|
||||
def _get_letterbox_color(self): return self._letterbox_color
|
||||
def _set_letterbox_color(self, c):
|
||||
self._letterbox_color = QColor(c) if isinstance(c, str) else c
|
||||
self._apply_letterbox_color()
|
||||
letterboxColor = Property(QColor, _get_letterbox_color, _set_letterbox_color)
|
||||
|
||||
def _apply_letterbox_color(self) -> None:
|
||||
"""Push the current letterbox color into mpv. No-op if mpv hasn't
|
||||
been initialized yet — _ensure_mpv() calls this after creating the
|
||||
instance so a QSS-set property still takes effect on first use."""
|
||||
if self._mpv is None:
|
||||
return
|
||||
try:
|
||||
self._mpv['background'] = 'color'
|
||||
self._mpv['background-color'] = self._letterbox_color.name()
|
||||
except Exception:
|
||||
# mpv not fully initialized or torn down; letterbox color
|
||||
# is a cosmetic fallback so a property-write refusal just
|
||||
# leaves the default black until next set.
|
||||
pass
|
||||
|
||||
def __init__(self, parent: QWidget | None = None, embed_controls: bool = True) -> None:
|
||||
"""
|
||||
embed_controls: When True (default), the transport controls bar is
|
||||
added to this VideoPlayer's own layout below the video — used by the
|
||||
popout window which then reparents the bar to its overlay layer.
|
||||
When False, the controls bar is constructed but never inserted into
|
||||
any layout, leaving the embedded preview a clean video surface with
|
||||
no transport controls visible. Use the popout for playback control.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
# Initialize the letterbox color from the current palette's Window
|
||||
# role so dark/light mode (or any system without a custom QSS)
|
||||
# gets a sensible default that matches the surrounding panel.
|
||||
# The QSS qproperty-letterboxColor on the bundled themes still
|
||||
# overrides this — Qt calls the setter during widget polish,
|
||||
# which happens AFTER __init__ when the widget is shown.
|
||||
from PySide6.QtGui import QPalette
|
||||
self._letterbox_color = self.palette().color(QPalette.ColorRole.Window)
|
||||
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.setSpacing(0)
|
||||
|
||||
# Video surface — mpv renders via OpenGL render API
|
||||
self._gl_widget = _MpvGLWidget()
|
||||
layout.addWidget(self._gl_widget, stretch=1)
|
||||
|
||||
# mpv reference (set by _ensure_mpv)
|
||||
self._mpv: mpvlib.MPV | None = None
|
||||
|
||||
# Controls bar — in preview panel this sits in the layout normally;
|
||||
# in slideshow mode, FullscreenPreview reparents it as a floating overlay.
|
||||
self._controls_bar = QWidget(self)
|
||||
controls = QHBoxLayout(self._controls_bar)
|
||||
controls.setContentsMargins(4, 2, 4, 2)
|
||||
|
||||
_btn_sz = 24
|
||||
_fg = self.palette().buttonText().color()
|
||||
|
||||
def _icon_btn(shape: str, name: str, tip: str) -> QPushButton:
|
||||
btn = QPushButton()
|
||||
btn.setObjectName(name)
|
||||
btn.setIcon(_paint_icon(shape, _fg))
|
||||
btn.setFixedSize(_btn_sz, _btn_sz)
|
||||
btn.setToolTip(tip)
|
||||
return btn
|
||||
|
||||
self._icon_fg = _fg
|
||||
self._play_icon = _paint_icon("play", _fg)
|
||||
self._pause_icon = _paint_icon("pause", _fg)
|
||||
|
||||
self._play_btn = _icon_btn("play", "_ctrl_play", "Play / Pause (Space)")
|
||||
self._play_btn.clicked.connect(self._toggle_play)
|
||||
controls.addWidget(self._play_btn)
|
||||
|
||||
self._time_label = QLabel("0:00")
|
||||
self._time_label.setMaximumWidth(45)
|
||||
controls.addWidget(self._time_label)
|
||||
|
||||
self._seek_slider = _ClickSeekSlider(Qt.Orientation.Horizontal)
|
||||
self._seek_slider.setRange(0, 0)
|
||||
self._seek_slider.sliderMoved.connect(self._seek)
|
||||
self._seek_slider.clicked_position.connect(self._seek)
|
||||
controls.addWidget(self._seek_slider, stretch=1)
|
||||
|
||||
self._duration_label = QLabel("0:00")
|
||||
self._duration_label.setMaximumWidth(45)
|
||||
controls.addWidget(self._duration_label)
|
||||
|
||||
self._vol_slider = QSlider(Qt.Orientation.Horizontal)
|
||||
self._vol_slider.setRange(0, 100)
|
||||
self._vol_slider.setValue(50)
|
||||
self._vol_slider.setFixedWidth(60)
|
||||
self._vol_slider.valueChanged.connect(self._set_volume)
|
||||
controls.addWidget(self._vol_slider)
|
||||
|
||||
self._vol_icon = _paint_icon("volume", _fg)
|
||||
self._muted_icon = _paint_icon("muted", _fg)
|
||||
|
||||
self._mute_btn = _icon_btn("volume", "_ctrl_mute", "Mute / Unmute")
|
||||
self._mute_btn.clicked.connect(self._toggle_mute)
|
||||
controls.addWidget(self._mute_btn)
|
||||
|
||||
self._autoplay = True
|
||||
self._auto_icon = _paint_icon("auto", _fg)
|
||||
self._autoplay_btn = _icon_btn("auto", "_ctrl_autoplay", "Auto-play videos when selected")
|
||||
self._autoplay_btn.setCheckable(True)
|
||||
self._autoplay_btn.setChecked(True)
|
||||
self._autoplay_btn.clicked.connect(self._toggle_autoplay)
|
||||
self._autoplay_btn.hide()
|
||||
controls.addWidget(self._autoplay_btn)
|
||||
|
||||
self._loop_icons = {
|
||||
0: _paint_icon("loop", _fg),
|
||||
1: _paint_icon("once", _fg),
|
||||
2: _paint_icon("next", _fg),
|
||||
}
|
||||
self._loop_state = 0 # 0=Loop, 1=Once, 2=Next
|
||||
self._loop_btn = _icon_btn("loop", "_ctrl_loop", "Loop / Once / Next")
|
||||
self._loop_btn.clicked.connect(self._cycle_loop)
|
||||
controls.addWidget(self._loop_btn)
|
||||
|
||||
# NO styleSheet here. The popout (FullscreenPreview) re-applies its
|
||||
# own `_slideshow_controls` overlay styling after reparenting the
|
||||
# bar to its central widget — see FullscreenPreview.__init__ — so
|
||||
# the popout still gets the floating dark-translucent look. The
|
||||
# embedded preview leaves the bar unstyled so it inherits the
|
||||
# panel theme and visually matches the Bookmark/Save/BL Tag bar
|
||||
# at the top of the panel rather than looking like a stamped-in
|
||||
# overlay box.
|
||||
if embed_controls:
|
||||
layout.addWidget(self._controls_bar)
|
||||
|
||||
# Responsive hiding: watch controls bar resize and hide widgets
|
||||
# that don't fit at narrow widths.
|
||||
self._controls_bar.installEventFilter(self)
|
||||
|
||||
self._eof_pending = False
|
||||
# Stale-eof suppression window. mpv emits `eof-reached=True`
|
||||
# whenever a file ends — including via `command('stop')` —
|
||||
# and the observer fires asynchronously on mpv's event thread.
|
||||
# When set_media swaps to a new file, the previous file's stop
|
||||
# generates an eof event that can race with `play_file`'s
|
||||
# `_eof_pending = False` reset and arrive AFTER it, sticking
|
||||
# the bool back to True. The next `_poll` then runs
|
||||
# `_handle_eof` and emits `play_next` in Loop=Next mode →
|
||||
# auto-advance past the post the user wanted → SKIP.
|
||||
#
|
||||
# Fix: ignore eof events for `_eof_ignore_window_secs` after
|
||||
# each `play_file` call. The race is single-digit ms, so
|
||||
# 250ms is comfortably wide for the suppression and narrow
|
||||
# enough not to mask a real EOF on the shortest possible
|
||||
# videos (booru video clips are always >= 1s).
|
||||
self._eof_ignore_until: float = 0.0
|
||||
self._eof_ignore_window_secs: float = 0.25
|
||||
|
||||
# The legacy 500ms `_seek_pending_until` pin window that lived
|
||||
# here was removed after `609066c` switched the slider seek
|
||||
# to `'absolute+exact'`. With exact seek, mpv lands at the
|
||||
# click position rather than at a keyframe before it, so the
|
||||
# slider doesn't drag back through the missing time when
|
||||
# `_poll` resumes reading `time_pos` after the seek. The pin
|
||||
# was defense in depth for keyframe-rounding latency that no
|
||||
# longer exists.
|
||||
|
||||
# Polling timer for position/duration/pause/eof state
|
||||
self._poll_timer = QTimer(self)
|
||||
self._poll_timer.setInterval(100)
|
||||
self._poll_timer.timeout.connect(self._poll)
|
||||
|
||||
# Pending values from mpv observers (written from mpv thread)
|
||||
self._pending_duration: float | None = None
|
||||
self._media_ready_fired = False
|
||||
self._current_file: str | None = None
|
||||
# Last reported source video size — used to dedupe video-params
|
||||
# observer firings so widget-driven re-emissions don't trigger
|
||||
# repeated _fit_to_content calls (which would loop forever).
|
||||
self._last_video_size: tuple[int, int] | None = None
|
||||
# Pending mute state — survives the lazy mpv creation. The popout's
|
||||
# video player is constructed with no mpv attached (mpv is wired
|
||||
# in _ensure_mpv on first set_media), and main_window's open-popout
|
||||
# state sync writes is_muted before mpv exists. Without a Python-
|
||||
# side fallback the value would be lost — the setter would update
|
||||
# button text but the actual mpv instance (created later) would
|
||||
# spawn unmuted by default. _ensure_mpv replays this on creation.
|
||||
self._pending_mute: bool = False
|
||||
|
||||
def _ensure_mpv(self) -> mpvlib.MPV:
|
||||
"""Set up mpv callbacks on first use. MPV instance is pre-created."""
|
||||
if self._mpv is not None:
|
||||
return self._mpv
|
||||
self._mpv = self._gl_widget._mpv
|
||||
self._mpv['loop-file'] = 'inf' # default to loop mode
|
||||
self._mpv.volume = self._vol_slider.value()
|
||||
self._mpv.mute = self._pending_mute
|
||||
self._mpv.observe_property('duration', self._on_duration_change)
|
||||
self._mpv.observe_property('eof-reached', self._on_eof_reached)
|
||||
self._mpv.observe_property('video-params', self._on_video_params)
|
||||
# Forward mpv's `playback-restart` event to the Qt-side signal so
|
||||
# the popout's state machine adapter can dispatch VideoStarted /
|
||||
# SeekCompleted events on the GUI thread. mpv's event_callback
|
||||
# decorator runs on mpv's event thread; emitting a Qt Signal is
|
||||
# thread-safe and the receiving slot runs on the connection's
|
||||
# target thread (typically the GUI main loop via the default
|
||||
# AutoConnection from the same-thread receiver).
|
||||
@self._mpv.event_callback('playback-restart')
|
||||
def _emit_playback_restart(_event):
|
||||
self.playback_restart.emit()
|
||||
self._pending_video_size: tuple[int, int] | None = None
|
||||
# Push any QSS-set letterbox color into mpv now that the instance
|
||||
# exists. The qproperty-letterboxColor setter is a no-op if mpv
|
||||
# hasn't been initialized yet, so we have to (re)apply on init.
|
||||
self._apply_letterbox_color()
|
||||
return self._mpv
|
||||
|
||||
# -- Public API (used by app.py for state sync) --
|
||||
|
||||
@property
|
||||
def volume(self) -> int:
|
||||
return self._vol_slider.value()
|
||||
|
||||
@volume.setter
|
||||
def volume(self, val: int) -> None:
|
||||
self._vol_slider.setValue(val)
|
||||
|
||||
@property
|
||||
def is_muted(self) -> bool:
|
||||
if self._mpv:
|
||||
return bool(self._mpv.mute)
|
||||
return self._pending_mute
|
||||
|
||||
@is_muted.setter
|
||||
def is_muted(self, val: bool) -> None:
|
||||
self._pending_mute = val
|
||||
if self._mpv:
|
||||
self._mpv.mute = val
|
||||
self._mute_btn.setIcon(self._muted_icon if val else self._vol_icon)
|
||||
|
||||
@property
|
||||
def autoplay(self) -> bool:
|
||||
return self._autoplay
|
||||
|
||||
@autoplay.setter
|
||||
def autoplay(self, val: bool) -> None:
|
||||
self._autoplay = val
|
||||
self._autoplay_btn.setChecked(val)
|
||||
self._autoplay_btn.setIcon(self._auto_icon if val else self._play_icon)
|
||||
self._autoplay_btn.setToolTip("Autoplay on" if val else "Autoplay off")
|
||||
|
||||
@property
|
||||
def loop_state(self) -> int:
|
||||
return self._loop_state
|
||||
|
||||
@loop_state.setter
|
||||
def loop_state(self, val: int) -> None:
|
||||
self._loop_state = val
|
||||
tips = ["Loop: repeat", "Once: stop at end", "Next: advance"]
|
||||
self._loop_btn.setIcon(self._loop_icons[val])
|
||||
self._loop_btn.setToolTip(tips[val])
|
||||
self._autoplay_btn.setVisible(val == 2)
|
||||
self._apply_loop_to_mpv()
|
||||
|
||||
def get_position_ms(self) -> int:
|
||||
if self._mpv and self._mpv.time_pos is not None:
|
||||
return int(self._mpv.time_pos * 1000)
|
||||
return 0
|
||||
|
||||
def seek_to_ms(self, ms: int) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.seek(ms / 1000.0, 'absolute+exact')
|
||||
|
||||
def play_file(self, path: str, info: str = "") -> None:
|
||||
"""Play a file from a local path OR a remote http(s) URL.
|
||||
|
||||
URL playback is the fast path for uncached videos: rather than
|
||||
waiting for `download_image` to finish writing the entire file
|
||||
to disk before mpv touches it, the load flow hands mpv the
|
||||
remote URL and lets mpv stream + buffer + render the first
|
||||
frame in parallel with the cache-populating download. mpv's
|
||||
first frame typically lands in 1-2s instead of waiting for
|
||||
the full multi-MB transfer.
|
||||
|
||||
For URL paths we set the `referrer` per-file option from the
|
||||
booru's hostname so CDNs that gate downloads on Referer don't
|
||||
reject mpv's request — same logic our own httpx client uses
|
||||
in `cache._referer_for`. python-mpv's `loadfile()` accepts
|
||||
per-file `**options` kwargs that become `--key=value` overrides
|
||||
for the duration of that file.
|
||||
"""
|
||||
m = self._ensure_mpv()
|
||||
self._gl_widget.ensure_gl_init()
|
||||
# Re-arm hardware decoder before each load. stop() sets
|
||||
# hwdec=no to release the NVDEC/VAAPI surface pool (the bulk
|
||||
# of mpv's idle VRAM footprint on NVIDIA), so we flip it back
|
||||
# to auto here so the next loadfile picks up hwdec again.
|
||||
# mpv re-inits the decoder context on the next frame — swamped
|
||||
# by the network fetch for uncached videos.
|
||||
try:
|
||||
m['hwdec'] = 'auto'
|
||||
except Exception:
|
||||
# If hwdec re-arm is refused, mpv falls back to software
|
||||
# decode silently — playback still works, just at higher
|
||||
# CPU cost on this file.
|
||||
pass
|
||||
self._current_file = path
|
||||
self._media_ready_fired = False
|
||||
self._pending_duration = None
|
||||
self._eof_pending = False
|
||||
# Open the stale-eof suppression window. Any eof-reached event
|
||||
# arriving from mpv's event thread within the next 250ms is
|
||||
# treated as belonging to the previous file's stop and
|
||||
# ignored — see the long comment at __init__'s
|
||||
# `_eof_ignore_until` definition for the race trace.
|
||||
self._eof_ignore_until = time.monotonic() + self._eof_ignore_window_secs
|
||||
self._last_video_size = None # reset dedupe so new file fires a fit
|
||||
self._apply_loop_to_mpv()
|
||||
|
||||
if path.startswith(("http://", "https://")):
|
||||
from urllib.parse import urlparse
|
||||
from ...core.cache import _referer_for
|
||||
referer = _referer_for(urlparse(path))
|
||||
m.loadfile(path, "replace", referrer=referer)
|
||||
else:
|
||||
m.loadfile(path)
|
||||
if self._autoplay:
|
||||
m.pause = False
|
||||
else:
|
||||
m.pause = True
|
||||
self._play_btn.setIcon(self._pause_icon if not m.pause else self._play_icon)
|
||||
self._poll_timer.start()
|
||||
|
||||
def stop(self) -> None:
|
||||
self._poll_timer.stop()
|
||||
if self._mpv:
|
||||
self._mpv.command('stop')
|
||||
# Drop the hardware decoder surface pool to release VRAM
|
||||
# while idle. On NVIDIA the NVDEC pool is the bulk of mpv's
|
||||
# idle footprint and keep_open=yes + the live GL render
|
||||
# context would otherwise pin it for the widget lifetime.
|
||||
# play_file re-arms hwdec='auto' before the next loadfile.
|
||||
try:
|
||||
self._mpv['hwdec'] = 'no'
|
||||
except Exception:
|
||||
# Best-effort VRAM release on stop; if mpv is mid-
|
||||
# teardown and rejects the write, GL context destruction
|
||||
# still drops the surface pool eventually.
|
||||
pass
|
||||
# Free the GL render context so its internal textures and FBOs
|
||||
# release VRAM while no video is playing. The next play_file()
|
||||
# call recreates the context via ensure_gl_init() (~5ms cost,
|
||||
# swamped by the network fetch for uncached videos).
|
||||
self._gl_widget.release_render_context()
|
||||
self._time_label.setText("0:00")
|
||||
self._duration_label.setText("0:00")
|
||||
self._seek_slider.setRange(0, 0)
|
||||
self._play_btn.setIcon(self._play_icon)
|
||||
|
||||
def pause(self) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.pause = True
|
||||
self._play_btn.setIcon(self._play_icon)
|
||||
|
||||
def resume(self) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.pause = False
|
||||
self._play_btn.setIcon(self._pause_icon)
|
||||
|
||||
# -- Internal controls --
|
||||
|
||||
def eventFilter(self, obj, event):
|
||||
if obj is self._controls_bar and event.type() == event.Type.Resize:
|
||||
self._apply_responsive_layout()
|
||||
return super().eventFilter(obj, event)
|
||||
|
||||
def _apply_responsive_layout(self) -> None:
|
||||
"""Hide/show control elements based on available width."""
|
||||
w = self._controls_bar.width()
|
||||
# Breakpoints — hide wider elements first
|
||||
show_volume = w >= 320
|
||||
show_duration = w >= 240
|
||||
show_time = w >= 200
|
||||
self._vol_slider.setVisible(show_volume)
|
||||
self._duration_label.setVisible(show_duration)
|
||||
self._time_label.setVisible(show_time)
|
||||
|
||||
def _toggle_play(self) -> None:
|
||||
if not self._mpv:
|
||||
return
|
||||
# If paused at end-of-file (Once mode after playback), seek back
|
||||
# to the start so pressing play replays instead of doing nothing.
|
||||
if self._mpv.pause:
|
||||
try:
|
||||
pos = self._mpv.time_pos
|
||||
dur = self._mpv.duration
|
||||
if pos is not None and dur is not None and dur > 0 and pos >= dur - 0.5:
|
||||
self._mpv.command('seek', 0, 'absolute+exact')
|
||||
except Exception:
|
||||
# Replay-on-end is a UX nicety; if mpv refuses the
|
||||
# seek (stream not ready, state mid-transition) just
|
||||
# toggle pause without rewinding.
|
||||
pass
|
||||
self._mpv.pause = not self._mpv.pause
|
||||
self._play_btn.setIcon(self._play_icon if self._mpv.pause else self._pause_icon)
|
||||
|
||||
def _toggle_autoplay(self, checked: bool = True) -> None:
|
||||
self._autoplay = self._autoplay_btn.isChecked()
|
||||
self._autoplay_btn.setIcon(self._auto_icon if self._autoplay else self._play_icon)
|
||||
self._autoplay_btn.setToolTip("Autoplay on" if self._autoplay else "Autoplay off")
|
||||
|
||||
def _cycle_loop(self) -> None:
|
||||
self.loop_state = (self._loop_state + 1) % 3
|
||||
|
||||
def _apply_loop_to_mpv(self) -> None:
|
||||
if not self._mpv:
|
||||
return
|
||||
if self._loop_state == 0: # Loop
|
||||
self._mpv['loop-file'] = 'inf'
|
||||
else: # Once or Next
|
||||
self._mpv['loop-file'] = 'no'
|
||||
|
||||
def _seek(self, pos: int) -> None:
|
||||
"""Seek to position in milliseconds (from slider).
|
||||
|
||||
Uses `'absolute+exact'` (frame-accurate seek) to match the
|
||||
existing `seek_to_ms` and `_seek_relative` methods. mpv
|
||||
decodes from the previous keyframe forward to the exact
|
||||
target position, costing 30-100ms more than keyframe-only
|
||||
seek but landing `time_pos` at the click position exactly.
|
||||
|
||||
See `609066c` for the drag-back race fix that introduced
|
||||
this. The legacy 500ms `_seek_pending_until` pin window that
|
||||
used to wrap this call was removed after the exact-seek
|
||||
change made it redundant.
|
||||
"""
|
||||
if self._mpv:
|
||||
self._mpv.seek(pos / 1000.0, 'absolute+exact')
|
||||
|
||||
def _seek_relative(self, ms: int) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.seek(ms / 1000.0, 'relative+exact')
|
||||
|
||||
def _set_volume(self, val: int) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.volume = val
|
||||
|
||||
def _toggle_mute(self) -> None:
|
||||
if self._mpv:
|
||||
self._mpv.mute = not self._mpv.mute
|
||||
self._pending_mute = bool(self._mpv.mute)
|
||||
self._mute_btn.setIcon(self._muted_icon if self._mpv.mute else self._vol_icon)
|
||||
|
||||
# -- mpv callbacks (called from mpv thread) --
|
||||
|
||||
def _on_video_params(self, _name: str, value) -> None:
|
||||
"""Called from mpv thread when video dimensions become known."""
|
||||
if isinstance(value, dict) and value.get('w') and value.get('h'):
|
||||
new_size = (value['w'], value['h'])
|
||||
# mpv re-fires video-params on output-area changes too. Dedupe
|
||||
# against the source dimensions we last reported so resizing the
|
||||
# popout doesn't kick off a fit→resize→fit feedback loop.
|
||||
if new_size != self._last_video_size:
|
||||
self._last_video_size = new_size
|
||||
self._pending_video_size = new_size
|
||||
|
||||
def _on_eof_reached(self, _name: str, value) -> None:
|
||||
"""Called from mpv thread when eof-reached changes.
|
||||
|
||||
Suppresses eof events that arrive within the post-play_file
|
||||
ignore window — those are stale events from the previous
|
||||
file's stop and would otherwise race the `_eof_pending=False`
|
||||
reset and trigger a spurious play_next auto-advance.
|
||||
"""
|
||||
if value is True:
|
||||
if time.monotonic() < self._eof_ignore_until:
|
||||
# Stale eof from a previous file's stop. Drop it.
|
||||
return
|
||||
self._eof_pending = True
|
||||
|
||||
def _on_duration_change(self, _name: str, value) -> None:
|
||||
if value is not None and value > 0:
|
||||
self._pending_duration = value
|
||||
|
||||
# -- Main-thread polling --
|
||||
|
||||
def _poll(self) -> None:
|
||||
if not self._mpv:
|
||||
return
|
||||
# Position. After the `609066c` exact-seek fix and the
|
||||
# subsequent removal of the `_seek_pending_until` pin window,
|
||||
# this is just a straight read-and-write — `mpv.time_pos`
|
||||
# equals the click position immediately after a slider seek
|
||||
# because mpv decodes from the previous keyframe forward to
|
||||
# the exact target before reporting it.
|
||||
pos = self._mpv.time_pos
|
||||
if pos is not None:
|
||||
pos_ms = int(pos * 1000)
|
||||
if not self._seek_slider.isSliderDown():
|
||||
self._seek_slider.setValue(pos_ms)
|
||||
self._time_label.setText(self._fmt(pos_ms))
|
||||
|
||||
# Duration (from observer)
|
||||
dur = self._pending_duration
|
||||
if dur is not None:
|
||||
dur_ms = int(dur * 1000)
|
||||
if self._seek_slider.maximum() != dur_ms:
|
||||
self._seek_slider.setRange(0, dur_ms)
|
||||
self._duration_label.setText(self._fmt(dur_ms))
|
||||
if not self._media_ready_fired:
|
||||
self._media_ready_fired = True
|
||||
self.media_ready.emit()
|
||||
|
||||
# Pause state
|
||||
paused = self._mpv.pause
|
||||
expected_icon = self._play_icon if paused else self._pause_icon
|
||||
if self._play_btn.icon().cacheKey() != expected_icon.cacheKey():
|
||||
self._play_btn.setIcon(expected_icon)
|
||||
|
||||
# Video size (set by observer on mpv thread, emitted here on main thread)
|
||||
if self._pending_video_size is not None:
|
||||
w, h = self._pending_video_size
|
||||
self._pending_video_size = None
|
||||
self.video_size.emit(w, h)
|
||||
|
||||
# EOF (set by observer on mpv thread, handled here on main thread)
|
||||
if self._eof_pending:
|
||||
self._handle_eof()
|
||||
|
||||
def _handle_eof(self) -> None:
|
||||
"""Handle end-of-file on the main thread."""
|
||||
if not self._eof_pending:
|
||||
return
|
||||
self._eof_pending = False
|
||||
if self._loop_state == 1: # Once
|
||||
self.pause()
|
||||
elif self._loop_state == 2: # Next
|
||||
self.pause()
|
||||
self.play_next.emit()
|
||||
|
||||
@staticmethod
|
||||
def _fmt(ms: int) -> str:
|
||||
s = ms // 1000
|
||||
m = s // 60
|
||||
return f"{m}:{s % 60:02d}"
|
||||
|
||||
def destroy(self, *args, **kwargs) -> None:
|
||||
self._poll_timer.stop()
|
||||
self._gl_widget.cleanup()
|
||||
self._mpv = None
|
||||
super().destroy(*args, **kwargs)
|
||||
322
booru_viewer/gui/media_controller.py
Normal file
322
booru_viewer/gui/media_controller.py
Normal file
@ -0,0 +1,322 @@
|
||||
"""Image/video loading, prefetch, download progress, and cache eviction."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..core.cache import download_image, cache_size_bytes, evict_oldest, evict_oldest_thumbnails
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# -- Pure functions (tested in tests/gui/test_media_controller.py) --
|
||||
|
||||
|
||||
def compute_prefetch_order(
|
||||
index: int, total: int, columns: int, mode: str,
|
||||
) -> list[int]:
|
||||
"""Return an ordered list of indices to prefetch around *index*.
|
||||
|
||||
*mode* is ``"Nearby"`` (4 cardinals) or ``"Aggressive"`` (ring expansion
|
||||
capped at ~3 rows radius).
|
||||
"""
|
||||
if total == 0:
|
||||
return []
|
||||
|
||||
if mode == "Nearby":
|
||||
order = []
|
||||
for offset in [1, -1, columns, -columns]:
|
||||
adj = index + offset
|
||||
if 0 <= adj < total:
|
||||
order.append(adj)
|
||||
return order
|
||||
|
||||
# Aggressive: ring expansion
|
||||
max_radius = 3
|
||||
max_posts = columns * max_radius * 2 + columns
|
||||
seen = {index}
|
||||
order = []
|
||||
for dist in range(1, max_radius + 1):
|
||||
ring = set()
|
||||
for dy in (-dist, 0, dist):
|
||||
for dx in (-dist, 0, dist):
|
||||
if dy == 0 and dx == 0:
|
||||
continue
|
||||
adj = index + dy * columns + dx
|
||||
if 0 <= adj < total and adj not in seen:
|
||||
ring.add(adj)
|
||||
for adj in (index + dist, index - dist):
|
||||
if 0 <= adj < total and adj not in seen:
|
||||
ring.add(adj)
|
||||
for adj in sorted(ring):
|
||||
seen.add(adj)
|
||||
order.append(adj)
|
||||
if len(order) >= max_posts:
|
||||
break
|
||||
return order
|
||||
|
||||
|
||||
# -- Controller --
|
||||
|
||||
|
||||
class MediaController:
|
||||
"""Owns image/video loading, prefetch, download progress, and cache eviction."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
self._prefetch_pause = asyncio.Event()
|
||||
self._prefetch_pause.set() # not paused
|
||||
self._last_evict_check = 0.0 # monotonic timestamp
|
||||
self._prefetch_gen = 0 # incremented on each prefetch_adjacent call
|
||||
|
||||
# -- Post activation (media load) --
|
||||
|
||||
def on_post_activated(self, index: int) -> None:
|
||||
if 0 <= index < len(self._app._posts):
|
||||
post = self._app._posts[index]
|
||||
log.info(f"Preview: #{post.id} -> {post.file_url}")
|
||||
try:
|
||||
if self._app._popout_ctrl.window:
|
||||
self._app._popout_ctrl.window.force_mpv_pause()
|
||||
pmpv = self._app._preview._video_player._mpv
|
||||
if pmpv is not None:
|
||||
pmpv.pause = True
|
||||
except Exception:
|
||||
pass
|
||||
self._app._preview._current_post = post
|
||||
self._app._preview._current_site_id = self._app._site_combo.currentData()
|
||||
self._app._preview.set_post_tags(post.tag_categories, post.tag_list)
|
||||
self._app._ensure_post_categories_async(post)
|
||||
site_id = self._app._preview._current_site_id
|
||||
self._app._preview.update_bookmark_state(
|
||||
bool(site_id and self._app._db.is_bookmarked(site_id, post.id))
|
||||
)
|
||||
self._app._preview.update_save_state(self._app._post_actions.is_post_saved(post.id))
|
||||
self._app._status.showMessage(f"Loading #{post.id}...")
|
||||
preview_hidden = not (
|
||||
self._app._preview.isVisible() and self._app._preview.width() > 0
|
||||
)
|
||||
if preview_hidden:
|
||||
self._app._signals.prefetch_progress.emit(index, 0.0)
|
||||
else:
|
||||
self._app._dl_progress.show()
|
||||
self._app._dl_progress.setRange(0, 0)
|
||||
|
||||
def _progress(downloaded, total):
|
||||
self._app._signals.download_progress.emit(downloaded, total)
|
||||
if preview_hidden and total > 0:
|
||||
self._app._signals.prefetch_progress.emit(
|
||||
index, downloaded / total
|
||||
)
|
||||
|
||||
info = (f"#{post.id} {post.width}x{post.height} score:{post.score} [{post.rating}] {Path(post.file_url.split('?')[0]).suffix.lstrip('.').upper() if post.file_url else ''}"
|
||||
+ (f" {post.created_at}" if post.created_at else ""))
|
||||
|
||||
from ..core.cache import is_cached
|
||||
from .media.constants import VIDEO_EXTENSIONS
|
||||
is_video = bool(
|
||||
post.file_url
|
||||
and Path(post.file_url.split('?')[0]).suffix.lower() in VIDEO_EXTENSIONS
|
||||
)
|
||||
streaming = is_video and post.file_url and not is_cached(post.file_url)
|
||||
if streaming:
|
||||
self._app._signals.video_stream.emit(
|
||||
post.file_url, info, post.width, post.height
|
||||
)
|
||||
|
||||
async def _load():
|
||||
self._prefetch_pause.clear()
|
||||
try:
|
||||
path = await download_image(post.file_url, progress_callback=_progress)
|
||||
self._app._signals.image_done.emit(str(path), info)
|
||||
except Exception as e:
|
||||
log.error(f"Image download failed: {e}")
|
||||
self._app._signals.image_error.emit(str(e))
|
||||
finally:
|
||||
self._prefetch_pause.set()
|
||||
if preview_hidden:
|
||||
self._app._signals.prefetch_progress.emit(index, -1)
|
||||
|
||||
self._app._run_async(_load)
|
||||
|
||||
if self._app._db.get_setting("prefetch_mode") in ("Nearby", "Aggressive"):
|
||||
self.prefetch_adjacent(index)
|
||||
|
||||
# -- Image/video result handlers --
|
||||
|
||||
def on_image_done(self, path: str, info: str) -> None:
|
||||
self._app._dl_progress.hide()
|
||||
# If the preview is already streaming this video from URL,
|
||||
# just update path references so copy/paste works — don't
|
||||
# restart playback.
|
||||
current = self._app._preview._current_path
|
||||
if current and current.startswith(("http://", "https://")):
|
||||
from ..core.cache import cached_path_for
|
||||
if Path(path) == cached_path_for(current):
|
||||
self._app._preview._current_path = path
|
||||
idx = self._app._grid.selected_index
|
||||
if 0 <= idx < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[idx]._cached_path = path
|
||||
cn = self._app._search_ctrl._cached_names
|
||||
if cn is not None:
|
||||
cn.add(Path(path).name)
|
||||
self._app._status.showMessage(info)
|
||||
self.auto_evict_cache()
|
||||
return
|
||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||
self._app._preview._info_label.setText(info)
|
||||
self._app._preview._current_path = path
|
||||
else:
|
||||
self.set_preview_media(path, info)
|
||||
self._app._status.showMessage(info)
|
||||
idx = self._app._grid.selected_index
|
||||
if 0 <= idx < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[idx]._cached_path = path
|
||||
# Keep the search controller's cached-names set current so
|
||||
# subsequent _drain_append_queue calls see newly downloaded files
|
||||
# without a full directory rescan.
|
||||
cn = self._app._search_ctrl._cached_names
|
||||
if cn is not None:
|
||||
from pathlib import Path as _P
|
||||
cn.add(_P(path).name)
|
||||
self._app._popout_ctrl.update_media(path, info)
|
||||
self.auto_evict_cache()
|
||||
|
||||
def on_video_stream(self, url: str, info: str, width: int, height: int) -> None:
|
||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||
self._app._preview._info_label.setText(info)
|
||||
self._app._preview._current_path = url
|
||||
self._app._popout_ctrl.window.set_media(url, info, width=width, height=height)
|
||||
self._app._popout_ctrl.update_state()
|
||||
else:
|
||||
self._app._preview._video_player.stop()
|
||||
self._app._preview.set_media(url, info)
|
||||
# Pre-set the expected cache path on the thumbnail immediately.
|
||||
# The parallel httpx download will also set it via on_image_done
|
||||
# when it completes, but this makes it available for drag-to-copy
|
||||
# from the moment streaming starts.
|
||||
from ..core.cache import cached_path_for
|
||||
idx = self._app._grid.selected_index
|
||||
if 0 <= idx < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[idx]._cached_path = str(cached_path_for(url))
|
||||
self._app._status.showMessage(f"Streaming #{Path(url.split('?')[0]).name}...")
|
||||
|
||||
def on_download_progress(self, downloaded: int, total: int) -> None:
|
||||
popout_open = bool(self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible())
|
||||
if total > 0:
|
||||
if not popout_open:
|
||||
self._app._dl_progress.setRange(0, total)
|
||||
self._app._dl_progress.setValue(downloaded)
|
||||
self._app._dl_progress.show()
|
||||
mb = downloaded / (1024 * 1024)
|
||||
total_mb = total / (1024 * 1024)
|
||||
self._app._status.showMessage(f"Downloading... {mb:.1f}/{total_mb:.1f} MB")
|
||||
if downloaded >= total and not popout_open:
|
||||
self._app._dl_progress.hide()
|
||||
elif not popout_open:
|
||||
self._app._dl_progress.setRange(0, 0)
|
||||
self._app._dl_progress.show()
|
||||
|
||||
def set_preview_media(self, path: str, info: str) -> None:
|
||||
"""Set media on preview or just info if popout is open."""
|
||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||
self._app._preview._info_label.setText(info)
|
||||
self._app._preview._current_path = path
|
||||
else:
|
||||
self._app._preview.set_media(path, info)
|
||||
|
||||
# -- Prefetch --
|
||||
|
||||
def on_prefetch_progress(self, index: int, progress: float) -> None:
|
||||
if 0 <= index < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[index].set_prefetch_progress(progress)
|
||||
|
||||
def prefetch_adjacent(self, index: int) -> None:
|
||||
"""Prefetch posts around the given index.
|
||||
|
||||
Bumps a generation counter so any previously running spiral
|
||||
exits at its next iteration instead of continuing to download
|
||||
stale adjacencies.
|
||||
"""
|
||||
total = len(self._app._posts)
|
||||
if total == 0:
|
||||
return
|
||||
cols = self._app._grid._flow.columns
|
||||
mode = self._app._db.get_setting("prefetch_mode")
|
||||
order = compute_prefetch_order(index, total, cols, mode)
|
||||
|
||||
self._prefetch_gen += 1
|
||||
gen = self._prefetch_gen
|
||||
|
||||
async def _prefetch_spiral():
|
||||
for adj in order:
|
||||
if self._prefetch_gen != gen:
|
||||
return # superseded by a newer prefetch
|
||||
await self._prefetch_pause.wait()
|
||||
if self._prefetch_gen != gen:
|
||||
return
|
||||
if 0 <= adj < len(self._app._posts) and self._app._posts[adj].file_url:
|
||||
self._app._signals.prefetch_progress.emit(adj, 0.0)
|
||||
try:
|
||||
def _progress(dl, total_bytes, idx=adj):
|
||||
if total_bytes > 0:
|
||||
self._app._signals.prefetch_progress.emit(idx, dl / total_bytes)
|
||||
await download_image(self._app._posts[adj].file_url, progress_callback=_progress)
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
self._app._signals.prefetch_progress.emit(adj, -1)
|
||||
await asyncio.sleep(0.2)
|
||||
self._app._run_async(_prefetch_spiral)
|
||||
|
||||
# -- Cache eviction --
|
||||
|
||||
def auto_evict_cache(self) -> None:
|
||||
import time
|
||||
now = time.monotonic()
|
||||
if now - self._last_evict_check < 30:
|
||||
return
|
||||
self._last_evict_check = now
|
||||
if not self._app._db.get_setting_bool("auto_evict"):
|
||||
return
|
||||
max_mb = self._app._db.get_setting_int("max_cache_mb")
|
||||
if max_mb <= 0:
|
||||
return
|
||||
max_bytes = max_mb * 1024 * 1024
|
||||
current = cache_size_bytes(include_thumbnails=False)
|
||||
if current > max_bytes:
|
||||
protected = set()
|
||||
for fav in self._app._db.get_bookmarks(limit=999999):
|
||||
if fav.cached_path:
|
||||
protected.add(fav.cached_path)
|
||||
evicted = evict_oldest(max_bytes, protected, current_bytes=current)
|
||||
if evicted:
|
||||
log.info(f"Auto-evicted {evicted} cached files")
|
||||
max_thumb_mb = self._app._db.get_setting_int("max_thumb_cache_mb") or 500
|
||||
max_thumb_bytes = max_thumb_mb * 1024 * 1024
|
||||
evicted_thumbs = evict_oldest_thumbnails(max_thumb_bytes)
|
||||
if evicted_thumbs:
|
||||
log.info(f"Auto-evicted {evicted_thumbs} thumbnails")
|
||||
|
||||
# -- Utility --
|
||||
|
||||
@staticmethod
|
||||
def image_dimensions(path: str) -> tuple[int, int]:
|
||||
"""Read image width/height from a local file without decoding pixels."""
|
||||
from .media.constants import _is_video
|
||||
if _is_video(path):
|
||||
return 0, 0
|
||||
try:
|
||||
from PySide6.QtGui import QImageReader
|
||||
reader = QImageReader(path)
|
||||
size = reader.size()
|
||||
if size.isValid():
|
||||
return size.width(), size.height()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, 0
|
||||
0
booru_viewer/gui/popout/__init__.py
Normal file
0
booru_viewer/gui/popout/__init__.py
Normal file
201
booru_viewer/gui/popout/effects.py
Normal file
201
booru_viewer/gui/popout/effects.py
Normal file
@ -0,0 +1,201 @@
|
||||
"""Effect descriptors for the popout state machine.
|
||||
|
||||
Pure-Python frozen dataclasses describing what the Qt-side adapter
|
||||
should do in response to a state machine dispatch. The state machine
|
||||
in `popout/state.py` returns a list of these from each `dispatch()`
|
||||
call; the adapter pattern-matches by type and applies them in order.
|
||||
|
||||
**Hard constraint**: this module MUST NOT import anything from
|
||||
PySide6, mpv, httpx, subprocess, or any module that does. Same purity
|
||||
gate as `state.py` — the test suite imports both directly without
|
||||
standing up a QApplication.
|
||||
|
||||
The effect types are documented in detail in
|
||||
`docs/POPOUT_ARCHITECTURE.md` "Effects" section.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Union
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Media-control effects
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LoadImage:
|
||||
"""Display a static image or animated GIF. The adapter routes by
|
||||
`is_gif`: True → ImageViewer.set_gif, False → set_image.
|
||||
"""
|
||||
|
||||
path: str
|
||||
is_gif: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LoadVideo:
|
||||
"""Hand a path or URL to mpv via `VideoPlayer.play_file`. If
|
||||
`referer` is set, the adapter passes it to play_file's per-file
|
||||
referrer option (current behavior at media/video_player.py:343-347).
|
||||
"""
|
||||
|
||||
path: str
|
||||
info: str
|
||||
referer: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StopMedia:
|
||||
"""Clear both surfaces (image viewer and video player). Used on
|
||||
navigation away from current media and on close.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApplyMute:
|
||||
"""Push `state.mute` to mpv. Adapter calls
|
||||
`self._video.is_muted = value` which goes through VideoPlayer's
|
||||
setter (which already handles the lazy-mpv case via _pending_mute
|
||||
as defense in depth).
|
||||
"""
|
||||
|
||||
value: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApplyVolume:
|
||||
"""Push `state.volume` to mpv via the existing
|
||||
`VideoPlayer.volume = value` setter (which writes through the
|
||||
slider widget, which is the persistent storage).
|
||||
"""
|
||||
|
||||
value: int
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ApplyLoopMode:
|
||||
"""Push `state.loop_mode` to mpv via the existing
|
||||
`VideoPlayer.loop_state = value` setter.
|
||||
"""
|
||||
|
||||
value: int # LoopMode.value, kept as int for cross-process portability
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SeekVideoTo:
|
||||
"""Adapter calls `mpv.seek(target_ms / 1000.0, 'absolute')`. Note
|
||||
the use of plain 'absolute' (keyframe seek), not 'absolute+exact' —
|
||||
matches the current slider behavior at video_player.py:405. The
|
||||
seek pin behavior is independent: the slider shows
|
||||
`state.seek_target_ms` while in SeekingVideo, regardless of mpv's
|
||||
keyframe-rounded actual position.
|
||||
"""
|
||||
|
||||
target_ms: int
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class TogglePlay:
|
||||
"""Toggle mpv's `pause` property. Adapter calls
|
||||
`VideoPlayer._toggle_play()`.
|
||||
"""
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Window/geometry effects
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FitWindowToContent:
|
||||
"""Compute the new window rect for the given content aspect using
|
||||
`state.viewport` and dispatch it to Hyprland (or `setGeometry()`
|
||||
on non-Hyprland). The adapter delegates the rect math + dispatch
|
||||
to the helpers in `popout/hyprland.py`.
|
||||
"""
|
||||
|
||||
content_w: int
|
||||
content_h: int
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EnterFullscreen:
|
||||
"""Adapter calls `self.showFullScreen()`."""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ExitFullscreen:
|
||||
"""Adapter calls `self.showNormal()` then defers a
|
||||
FitWindowToContent on the next event-loop tick (matching the
|
||||
current `QTimer.singleShot(0, ...)` pattern at
|
||||
popout/window.py:1023).
|
||||
"""
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Outbound signal effects
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EmitNavigate:
|
||||
"""Tell main_window to navigate to the next/previous post.
|
||||
Adapter emits `self.navigate.emit(direction)`.
|
||||
"""
|
||||
|
||||
direction: int
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EmitPlayNextRequested:
|
||||
"""Tell main_window the video ended in Loop=Next mode. Adapter
|
||||
emits `self.play_next_requested.emit()`.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EmitClosed:
|
||||
"""Tell main_window the popout is closing. Fired on entry to
|
||||
Closing state. Adapter emits `self.closed.emit()`.
|
||||
"""
|
||||
|
||||
|
||||
# Type alias for the union of all effects.
|
||||
Effect = Union[
|
||||
LoadImage,
|
||||
LoadVideo,
|
||||
StopMedia,
|
||||
ApplyMute,
|
||||
ApplyVolume,
|
||||
ApplyLoopMode,
|
||||
SeekVideoTo,
|
||||
TogglePlay,
|
||||
FitWindowToContent,
|
||||
EnterFullscreen,
|
||||
ExitFullscreen,
|
||||
EmitNavigate,
|
||||
EmitPlayNextRequested,
|
||||
EmitClosed,
|
||||
]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"LoadImage",
|
||||
"LoadVideo",
|
||||
"StopMedia",
|
||||
"ApplyMute",
|
||||
"ApplyVolume",
|
||||
"ApplyLoopMode",
|
||||
"SeekVideoTo",
|
||||
"TogglePlay",
|
||||
"FitWindowToContent",
|
||||
"EnterFullscreen",
|
||||
"ExitFullscreen",
|
||||
"EmitNavigate",
|
||||
"EmitPlayNextRequested",
|
||||
"EmitClosed",
|
||||
"Effect",
|
||||
]
|
||||
245
booru_viewer/gui/popout/hyprland.py
Normal file
245
booru_viewer/gui/popout/hyprland.py
Normal file
@ -0,0 +1,245 @@
|
||||
"""Hyprland IPC helpers for the popout window.
|
||||
|
||||
Module-level functions that wrap `hyprctl` for window state queries
|
||||
and dispatches. Extracted from `popout/window.py` so the popout's Qt
|
||||
adapter can call them through a clean import surface and so the state
|
||||
machine refactor's `FitWindowToContent` effect handler has a single
|
||||
place to find them.
|
||||
|
||||
This module DOES touch `subprocess` and `os.environ`, so it's gated
|
||||
behind the same `HYPRLAND_INSTANCE_SIGNATURE` env var check the
|
||||
legacy code used. Off-Hyprland systems no-op or return None at every
|
||||
entry point.
|
||||
|
||||
The popout adapter calls these helpers directly; there are no
|
||||
`FullscreenPreview._hyprctl_*` shims anymore. Every env-var gate
|
||||
for opt-out (`BOORU_VIEWER_NO_HYPR_RULES`, popout-specific aspect
|
||||
lock) is implemented inside these functions so every call site
|
||||
gets the same behavior.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from ...core.config import hypr_rules_enabled, popout_aspect_lock_enabled
|
||||
|
||||
|
||||
def _on_hyprland() -> bool:
|
||||
"""True if running under Hyprland (env signature present)."""
|
||||
return bool(os.environ.get("HYPRLAND_INSTANCE_SIGNATURE"))
|
||||
|
||||
|
||||
def get_window(window_title: str) -> dict | None:
|
||||
"""Return the Hyprland window dict whose `title` matches.
|
||||
|
||||
Returns None if not on Hyprland, if `hyprctl clients -j` fails,
|
||||
or if no client matches the title. The legacy `_hyprctl_get_window`
|
||||
on `FullscreenPreview` is a 1-line shim around this.
|
||||
"""
|
||||
if not _on_hyprland():
|
||||
return None
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["hyprctl", "clients", "-j"],
|
||||
capture_output=True, text=True, timeout=1,
|
||||
)
|
||||
for c in json.loads(result.stdout):
|
||||
if c.get("title") == window_title:
|
||||
return c
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def resize(window_title: str, w: int, h: int, animate: bool = False) -> None:
|
||||
"""Ask Hyprland to resize the popout and lock its aspect ratio.
|
||||
|
||||
No-op on non-Hyprland systems. Tiled windows skip the resize
|
||||
(fights the layout) but still get the aspect-lock setprop if
|
||||
that's enabled.
|
||||
|
||||
Behavior is gated by two independent env vars (see core/config.py):
|
||||
- BOORU_VIEWER_NO_HYPR_RULES: skip resize and no_anim parts
|
||||
- BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK: skip the keep_aspect_ratio
|
||||
setprop
|
||||
|
||||
Either, both, or neither may be set. The aspect-ratio carve-out
|
||||
means a ricer can opt out of in-code window management while
|
||||
still keeping mpv playback at the right shape (or vice versa).
|
||||
"""
|
||||
if not _on_hyprland():
|
||||
return
|
||||
rules_on = hypr_rules_enabled()
|
||||
aspect_on = popout_aspect_lock_enabled()
|
||||
if not rules_on and not aspect_on:
|
||||
return # nothing to dispatch
|
||||
win = get_window(window_title)
|
||||
if not win:
|
||||
return
|
||||
addr = win.get("address")
|
||||
if not addr:
|
||||
return
|
||||
cmds: list[str] = []
|
||||
if not win.get("floating"):
|
||||
# Tiled — don't resize (fights the layout). Optionally set
|
||||
# aspect lock and no_anim depending on the env vars.
|
||||
if rules_on and not animate:
|
||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||
if aspect_on:
|
||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 1")
|
||||
else:
|
||||
if rules_on and not animate:
|
||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||
if aspect_on:
|
||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
||||
if rules_on:
|
||||
cmds.append(f"dispatch resizewindowpixel exact {w} {h},address:{addr}")
|
||||
if aspect_on:
|
||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 1")
|
||||
if not cmds:
|
||||
return
|
||||
_dispatch_batch(cmds)
|
||||
|
||||
|
||||
def resize_and_move(
|
||||
window_title: str,
|
||||
w: int,
|
||||
h: int,
|
||||
x: int,
|
||||
y: int,
|
||||
win: dict | None = None,
|
||||
animate: bool = False,
|
||||
) -> None:
|
||||
"""Atomically resize and move the popout via a single hyprctl batch.
|
||||
|
||||
Gated by BOORU_VIEWER_NO_HYPR_RULES (resize/move/no_anim parts)
|
||||
and BOORU_VIEWER_NO_POPOUT_ASPECT_LOCK (the keep_aspect_ratio
|
||||
parts).
|
||||
|
||||
`win` may be passed in by the caller to skip the `get_window`
|
||||
subprocess call. The address is the only thing we actually need
|
||||
from it; threading it through cuts the per-fit subprocess count
|
||||
from three to one and removes ~6ms of GUI-thread blocking every
|
||||
time the popout fits to new content. The legacy
|
||||
`_hyprctl_resize_and_move` on `FullscreenPreview` already used
|
||||
this optimization; the module-level function preserves it.
|
||||
"""
|
||||
if not _on_hyprland():
|
||||
return
|
||||
rules_on = hypr_rules_enabled()
|
||||
aspect_on = popout_aspect_lock_enabled()
|
||||
if not rules_on and not aspect_on:
|
||||
return
|
||||
if win is None:
|
||||
win = get_window(window_title)
|
||||
if not win or not win.get("floating"):
|
||||
return
|
||||
addr = win.get("address")
|
||||
if not addr:
|
||||
return
|
||||
cmds: list[str] = []
|
||||
if rules_on and not animate:
|
||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||
if aspect_on:
|
||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 0")
|
||||
if rules_on:
|
||||
cmds.append(f"dispatch resizewindowpixel exact {w} {h},address:{addr}")
|
||||
cmds.append(f"dispatch movewindowpixel exact {x} {y},address:{addr}")
|
||||
if aspect_on:
|
||||
cmds.append(f"dispatch setprop address:{addr} keep_aspect_ratio 1")
|
||||
if not cmds:
|
||||
return
|
||||
_dispatch_batch(cmds)
|
||||
|
||||
|
||||
def _dispatch_batch(cmds: list[str]) -> None:
|
||||
"""Fire-and-forget hyprctl --batch with the given commands.
|
||||
|
||||
Uses `subprocess.Popen` (not `run`) so the call returns
|
||||
immediately without waiting for hyprctl. The current popout code
|
||||
relied on this same fire-and-forget pattern to avoid GUI-thread
|
||||
blocking on every fit dispatch.
|
||||
"""
|
||||
try:
|
||||
subprocess.Popen(
|
||||
["hyprctl", "--batch", " ; ".join(cmds)],
|
||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def get_monitor_available_rect(monitor_id: int | None = None) -> tuple[int, int, int, int] | None:
|
||||
"""Return (x, y, w, h) of a monitor's usable area, accounting for
|
||||
exclusive zones (Waybar, etc.) via the ``reserved`` field.
|
||||
|
||||
Falls back to the first monitor if *monitor_id* is None or not found.
|
||||
Returns None if not on Hyprland or the query fails.
|
||||
"""
|
||||
if not _on_hyprland():
|
||||
return None
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["hyprctl", "monitors", "-j"],
|
||||
capture_output=True, text=True, timeout=1,
|
||||
)
|
||||
monitors = json.loads(result.stdout)
|
||||
if not monitors:
|
||||
return None
|
||||
mon = None
|
||||
if monitor_id is not None:
|
||||
mon = next((m for m in monitors if m.get("id") == monitor_id), None)
|
||||
if mon is None:
|
||||
mon = monitors[0]
|
||||
mx = mon.get("x", 0)
|
||||
my = mon.get("y", 0)
|
||||
mw = mon.get("width", 0)
|
||||
mh = mon.get("height", 0)
|
||||
# reserved: [left, top, right, bottom]
|
||||
res = mon.get("reserved", [0, 0, 0, 0])
|
||||
left, top, right, bottom = res[0], res[1], res[2], res[3]
|
||||
return (
|
||||
mx + left,
|
||||
my + top,
|
||||
mw - left - right,
|
||||
mh - top - bottom,
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def settiled(window_title: str) -> None:
|
||||
"""Ask Hyprland to un-float the popout, restoring it to tiled layout.
|
||||
|
||||
Used on reopen when the popout was tiled at close — the windowrule
|
||||
opens it floating, so we dispatch `settiled` to push it back into
|
||||
the layout.
|
||||
|
||||
Gated by BOORU_VIEWER_NO_HYPR_RULES so ricers with their own rules
|
||||
keep control.
|
||||
"""
|
||||
if not _on_hyprland():
|
||||
return
|
||||
if not hypr_rules_enabled():
|
||||
return
|
||||
win = get_window(window_title)
|
||||
if not win:
|
||||
return
|
||||
addr = win.get("address")
|
||||
if not addr:
|
||||
return
|
||||
if not win.get("floating"):
|
||||
return
|
||||
_dispatch_batch([f"dispatch settiled address:{addr}"])
|
||||
|
||||
|
||||
__all__ = [
|
||||
"get_window",
|
||||
"get_monitor_available_rect",
|
||||
"resize",
|
||||
"resize_and_move",
|
||||
"settiled",
|
||||
]
|
||||
1038
booru_viewer/gui/popout/state.py
Normal file
1038
booru_viewer/gui/popout/state.py
Normal file
File diff suppressed because it is too large
Load Diff
62
booru_viewer/gui/popout/viewport.py
Normal file
62
booru_viewer/gui/popout/viewport.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""Popout viewport math: persistent intent + drift tolerance."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import NamedTuple
|
||||
|
||||
|
||||
class Viewport(NamedTuple):
|
||||
"""Where and how large the user wants popout content to appear.
|
||||
|
||||
Three numbers + an anchor mode, no aspect. Aspect is a property of
|
||||
the currently-displayed post and is recomputed from actual content
|
||||
on every navigation. The viewport stays put across navigations; the
|
||||
window rect is a derived projection (Viewport, content_aspect) →
|
||||
(x,y,w,h).
|
||||
|
||||
`long_side` is the binding edge length: for landscape it becomes
|
||||
width, for portrait it becomes height. Symmetric across the two
|
||||
orientations, which is the property that breaks the
|
||||
width-anchor ratchet that the previous `_fit_to_content` had.
|
||||
|
||||
`anchor` controls which point of the window stays fixed across
|
||||
navigations as the window size changes with aspect ratio:
|
||||
``"center"`` (default) pins the window center; ``"tl"``/``"tr"``/
|
||||
``"bl"``/``"br"`` pin the corresponding corner. The window
|
||||
grows/shrinks away from the anchored corner. The user can drag the
|
||||
window anywhere — the anchor only affects resize direction, not
|
||||
screen position.
|
||||
|
||||
`center_x`/`center_y` hold the anchor point coordinates (center
|
||||
of the window in center mode, the pinned corner in corner modes).
|
||||
"""
|
||||
center_x: float
|
||||
center_y: float
|
||||
long_side: float
|
||||
anchor: str = "center"
|
||||
|
||||
|
||||
def anchor_point(x: float, y: float, w: float, h: float, anchor: str) -> tuple[float, float]:
|
||||
"""Extract the anchor point from a window rect based on anchor mode."""
|
||||
if anchor == "tl":
|
||||
return (x, y)
|
||||
if anchor == "tr":
|
||||
return (x + w, y)
|
||||
if anchor == "bl":
|
||||
return (x, y + h)
|
||||
if anchor == "br":
|
||||
return (x + w, y + h)
|
||||
return (x + w / 2, y + h / 2)
|
||||
|
||||
|
||||
# Maximum drift between our last-dispatched window rect and the current
|
||||
# Hyprland-reported rect that we still treat as "no user action happened."
|
||||
# Anything within this tolerance is absorbed (Hyprland gap rounding,
|
||||
# subpixel accumulation, decoration accounting). Anything beyond it is
|
||||
# treated as "the user dragged or resized the window externally" and the
|
||||
# persistent viewport gets updated from current state.
|
||||
#
|
||||
# 2px is small enough not to false-positive on real user drags (which
|
||||
# are always tens of pixels minimum) and large enough to absorb the
|
||||
# 1-2px per-nav drift that compounds across many navigations.
|
||||
_DRIFT_TOLERANCE = 2
|
||||
1800
booru_viewer/gui/popout/window.py
Normal file
1800
booru_viewer/gui/popout/window.py
Normal file
File diff suppressed because it is too large
Load Diff
212
booru_viewer/gui/popout_controller.py
Normal file
212
booru_viewer/gui/popout_controller.py
Normal file
@ -0,0 +1,212 @@
|
||||
"""Popout (fullscreen preview) lifecycle, state sync, and geometry persistence."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# -- Pure functions (tested in tests/gui/test_popout_controller.py) --
|
||||
|
||||
|
||||
def build_video_sync_dict(
|
||||
volume: int,
|
||||
mute: bool,
|
||||
autoplay: bool,
|
||||
loop_state: int,
|
||||
position_ms: int,
|
||||
) -> dict:
|
||||
"""Build the video-state transfer dict used on popout open/close."""
|
||||
return {
|
||||
"volume": volume,
|
||||
"mute": mute,
|
||||
"autoplay": autoplay,
|
||||
"loop_state": loop_state,
|
||||
"position_ms": position_ms,
|
||||
}
|
||||
|
||||
|
||||
# -- Controller --
|
||||
|
||||
|
||||
class PopoutController:
|
||||
"""Owns popout lifecycle, state sync, and geometry persistence."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
self._fullscreen_window = None
|
||||
self._popout_active = False
|
||||
self._info_was_visible = False
|
||||
self._right_splitter_sizes: list[int] = []
|
||||
|
||||
@property
|
||||
def window(self):
|
||||
return self._fullscreen_window
|
||||
|
||||
@property
|
||||
def is_active(self) -> bool:
|
||||
return self._popout_active
|
||||
|
||||
# -- Open --
|
||||
|
||||
def open(self) -> None:
|
||||
path = self._app._preview._current_path
|
||||
if not path:
|
||||
return
|
||||
info = self._app._preview._info_label.text()
|
||||
video_pos = 0
|
||||
if self._app._preview._stack.currentIndex() == 1:
|
||||
video_pos = self._app._preview._video_player.get_position_ms()
|
||||
self._popout_active = True
|
||||
self._info_was_visible = self._app._info_panel.isVisible()
|
||||
self._right_splitter_sizes = self._app._right_splitter.sizes()
|
||||
self._app._preview.clear()
|
||||
self._app._preview.hide()
|
||||
self._app._info_panel.show()
|
||||
self._app._right_splitter.setSizes([0, 0, 1000])
|
||||
self._app._preview._current_path = path
|
||||
idx = self._app._grid.selected_index
|
||||
if 0 <= idx < len(self._app._posts):
|
||||
self._app._info_panel.set_post(self._app._posts[idx])
|
||||
from .popout.window import FullscreenPreview
|
||||
saved_geo = self._app._db.get_setting("slideshow_geometry")
|
||||
saved_fs = self._app._db.get_setting_bool("slideshow_fullscreen")
|
||||
saved_tiled = self._app._db.get_setting_bool("slideshow_tiled")
|
||||
if saved_geo:
|
||||
parts = saved_geo.split(",")
|
||||
if len(parts) == 4:
|
||||
from PySide6.QtCore import QRect
|
||||
FullscreenPreview._saved_geometry = QRect(*[int(p) for p in parts])
|
||||
FullscreenPreview._saved_fullscreen = saved_fs
|
||||
FullscreenPreview._saved_tiled = saved_tiled
|
||||
else:
|
||||
FullscreenPreview._saved_geometry = None
|
||||
FullscreenPreview._saved_fullscreen = True
|
||||
FullscreenPreview._saved_tiled = False
|
||||
else:
|
||||
FullscreenPreview._saved_fullscreen = True
|
||||
FullscreenPreview._saved_tiled = saved_tiled
|
||||
cols = self._app._grid._flow.columns
|
||||
show_actions = self._app._stack.currentIndex() != 2
|
||||
monitor = self._app._db.get_setting("slideshow_monitor")
|
||||
anchor = self._app._db.get_setting("popout_anchor") or "center"
|
||||
self._fullscreen_window = FullscreenPreview(grid_cols=cols, show_actions=show_actions, monitor=monitor, anchor=anchor, parent=self._app)
|
||||
self._fullscreen_window.navigate.connect(self.navigate)
|
||||
self._fullscreen_window.play_next_requested.connect(self._app._on_video_end_next)
|
||||
from ..core.config import library_folders
|
||||
self._fullscreen_window.set_folders_callback(library_folders)
|
||||
self._fullscreen_window.save_to_folder.connect(self._app._post_actions.save_from_preview)
|
||||
self._fullscreen_window.unsave_requested.connect(self._app._post_actions.unsave_from_preview)
|
||||
self._fullscreen_window.toggle_save_requested.connect(self._app._post_actions.toggle_save_from_preview)
|
||||
if show_actions:
|
||||
self._fullscreen_window.bookmark_requested.connect(self._app._post_actions.bookmark_from_preview)
|
||||
self._fullscreen_window.set_bookmark_folders_callback(self._app._db.get_folders)
|
||||
self._fullscreen_window.bookmark_to_folder.connect(self._app._post_actions.bookmark_to_folder_from_preview)
|
||||
self._fullscreen_window.blacklist_tag_requested.connect(self._app._post_actions.blacklist_tag_from_popout)
|
||||
self._fullscreen_window.blacklist_post_requested.connect(self._app._post_actions.blacklist_post_from_popout)
|
||||
self._fullscreen_window.open_in_default.connect(self._app._open_preview_in_default)
|
||||
self._fullscreen_window.open_in_browser.connect(self._app._open_preview_in_browser)
|
||||
self._fullscreen_window.closed.connect(self.on_closed)
|
||||
self._fullscreen_window.privacy_requested.connect(self._app._privacy.toggle)
|
||||
post = self._app._preview._current_post
|
||||
if post:
|
||||
self._fullscreen_window.set_post_tags(post.tag_categories, post.tag_list)
|
||||
pv = self._app._preview._video_player
|
||||
self._fullscreen_window.sync_video_state(
|
||||
volume=pv.volume,
|
||||
mute=pv.is_muted,
|
||||
autoplay=pv.autoplay,
|
||||
loop_state=pv.loop_state,
|
||||
)
|
||||
if video_pos > 0:
|
||||
self._fullscreen_window.connect_media_ready_once(
|
||||
lambda: self._fullscreen_window.seek_video_to(video_pos)
|
||||
)
|
||||
pre_w = post.width if post else 0
|
||||
pre_h = post.height if post else 0
|
||||
self._fullscreen_window.set_media(path, info, width=pre_w, height=pre_h)
|
||||
self.update_state()
|
||||
|
||||
# -- Close --
|
||||
|
||||
def on_closed(self) -> None:
|
||||
if self._fullscreen_window:
|
||||
from .popout.window import FullscreenPreview
|
||||
fs = FullscreenPreview._saved_fullscreen
|
||||
geo = FullscreenPreview._saved_geometry
|
||||
tiled = FullscreenPreview._saved_tiled
|
||||
self._app._db.set_setting("slideshow_fullscreen", "1" if fs else "0")
|
||||
self._app._db.set_setting("slideshow_tiled", "1" if tiled else "0")
|
||||
if geo:
|
||||
self._app._db.set_setting("slideshow_geometry", f"{geo.x()},{geo.y()},{geo.width()},{geo.height()}")
|
||||
self._app._preview.show()
|
||||
if not self._info_was_visible:
|
||||
self._app._info_panel.hide()
|
||||
if self._right_splitter_sizes:
|
||||
self._app._right_splitter.setSizes(self._right_splitter_sizes)
|
||||
self._popout_active = False
|
||||
video_pos = 0
|
||||
if self._fullscreen_window:
|
||||
vstate = self._fullscreen_window.get_video_state()
|
||||
pv = self._app._preview._video_player
|
||||
pv.volume = vstate["volume"]
|
||||
pv.is_muted = vstate["mute"]
|
||||
pv.autoplay = vstate["autoplay"]
|
||||
pv.loop_state = vstate["loop_state"]
|
||||
video_pos = vstate["position_ms"]
|
||||
path = self._app._preview._current_path
|
||||
info = self._app._preview._info_label.text()
|
||||
self._fullscreen_window = None
|
||||
if path:
|
||||
if video_pos > 0:
|
||||
def _seek_preview():
|
||||
self._app._preview._video_player.seek_to_ms(video_pos)
|
||||
try:
|
||||
self._app._preview._video_player.media_ready.disconnect(_seek_preview)
|
||||
except RuntimeError:
|
||||
pass
|
||||
self._app._preview._video_player.media_ready.connect(_seek_preview)
|
||||
self._app._preview.set_media(path, info)
|
||||
|
||||
# -- Navigation --
|
||||
|
||||
def navigate(self, direction: int) -> None:
|
||||
self._app._navigate_preview(direction)
|
||||
|
||||
# -- State sync --
|
||||
|
||||
def update_media(self, path: str, info: str) -> None:
|
||||
"""Sync the popout with new media from browse/bookmark/library."""
|
||||
if self._fullscreen_window and self._fullscreen_window.isVisible():
|
||||
self._app._preview._video_player.stop()
|
||||
cp = self._app._preview._current_post
|
||||
w = cp.width if cp else 0
|
||||
h = cp.height if cp else 0
|
||||
self._fullscreen_window.set_media(path, info, width=w, height=h)
|
||||
show_full = self._app._stack.currentIndex() != 2
|
||||
self._fullscreen_window.set_toolbar_visibility(
|
||||
bookmark=show_full,
|
||||
save=True,
|
||||
bl_tag=show_full,
|
||||
bl_post=show_full,
|
||||
)
|
||||
self.update_state()
|
||||
|
||||
def update_state(self) -> None:
|
||||
"""Update popout button states by mirroring the embedded preview."""
|
||||
if not self._fullscreen_window:
|
||||
return
|
||||
self._fullscreen_window.update_state(
|
||||
self._app._preview._is_bookmarked,
|
||||
self._app._preview._is_saved,
|
||||
)
|
||||
post = self._app._preview._current_post
|
||||
if post is not None:
|
||||
self._fullscreen_window.set_post_tags(
|
||||
post.tag_categories or {}, post.tag_list
|
||||
)
|
||||
606
booru_viewer/gui/post_actions.py
Normal file
606
booru_viewer/gui/post_actions.py
Normal file
@ -0,0 +1,606 @@
|
||||
"""Bookmark, save/library, batch download, and blacklist operations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..core.cache import download_image
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# Pure functions
|
||||
|
||||
def is_batch_message(msg: str) -> bool:
|
||||
"""Detect batch progress messages like 'Saved 3/10 to Unfiled'."""
|
||||
return "/" in msg and any(c.isdigit() for c in msg.split("/")[0][-2:])
|
||||
|
||||
def is_in_library(path: Path, saved_root: Path) -> bool:
|
||||
return path.is_relative_to(saved_root)
|
||||
|
||||
|
||||
class PostActionsController:
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
self._batch_dest: Path | None = None
|
||||
|
||||
def on_bookmark_error(self, e: str) -> None:
|
||||
self._app._status.showMessage(f"Error: {e}")
|
||||
|
||||
def is_post_saved(self, post_id: int) -> bool:
|
||||
return self._app._db.is_post_in_library(post_id)
|
||||
|
||||
def _maybe_unbookmark(self, post) -> None:
|
||||
"""Remove the bookmark for *post* if the unbookmark-on-save setting is on.
|
||||
|
||||
Handles DB removal, grid thumbnail dot, preview state, bookmarks
|
||||
tab refresh, and popout sync in one place so every save path
|
||||
(single, bulk, Save As, batch download) can call it.
|
||||
"""
|
||||
if not self._app._db.get_setting_bool("unbookmark_on_save"):
|
||||
return
|
||||
site_id = (
|
||||
self._app._preview._current_site_id
|
||||
or self._app._site_combo.currentData()
|
||||
)
|
||||
if not site_id or not self._app._db.is_bookmarked(site_id, post.id):
|
||||
return
|
||||
self._app._db.remove_bookmark(site_id, post.id)
|
||||
# Update grid thumbnail bookmark dot
|
||||
for i, p in enumerate(self._app._posts):
|
||||
if p.id == post.id and i < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[i].set_bookmarked(False)
|
||||
break
|
||||
# Update preview and popout
|
||||
if (self._app._preview._current_post
|
||||
and self._app._preview._current_post.id == post.id):
|
||||
self._app._preview.update_bookmark_state(False)
|
||||
self._app._popout_ctrl.update_state()
|
||||
# Refresh bookmarks tab if visible
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
|
||||
def get_preview_post(self):
|
||||
idx = self._app._grid.selected_index
|
||||
if 0 <= idx < len(self._app._posts):
|
||||
return self._app._posts[idx], idx
|
||||
if self._app._preview._current_post:
|
||||
return self._app._preview._current_post, -1
|
||||
return None, -1
|
||||
|
||||
def bookmark_from_preview(self) -> None:
|
||||
post, idx = self.get_preview_post()
|
||||
if not post:
|
||||
return
|
||||
site_id = self._app._preview._current_site_id or self._app._site_combo.currentData()
|
||||
if not site_id:
|
||||
return
|
||||
if idx >= 0:
|
||||
self.toggle_bookmark(idx)
|
||||
else:
|
||||
if self._app._db.is_bookmarked(site_id, post.id):
|
||||
self._app._db.remove_bookmark(site_id, post.id)
|
||||
else:
|
||||
from ..core.cache import cached_path_for
|
||||
cached = cached_path_for(post.file_url)
|
||||
self._app._db.add_bookmark(
|
||||
site_id=site_id, post_id=post.id,
|
||||
file_url=post.file_url, preview_url=post.preview_url or "",
|
||||
tags=post.tags, rating=post.rating, score=post.score,
|
||||
source=post.source, cached_path=str(cached) if cached.exists() else None,
|
||||
tag_categories=post.tag_categories,
|
||||
)
|
||||
bookmarked = bool(self._app._db.is_bookmarked(site_id, post.id))
|
||||
self._app._preview.update_bookmark_state(bookmarked)
|
||||
self._app._popout_ctrl.update_state()
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
|
||||
def bookmark_to_folder_from_preview(self, folder: str) -> None:
|
||||
"""Bookmark the current preview post into a specific bookmark folder.
|
||||
|
||||
Triggered by the toolbar Bookmark-as submenu, which only shows
|
||||
when the post is not yet bookmarked -- so this method only handles
|
||||
the create path, never the move/remove paths. Empty string means
|
||||
Unfiled. Brand-new folder names get added to the DB folder list
|
||||
first so the bookmarks tab combo immediately shows them.
|
||||
"""
|
||||
post, idx = self.get_preview_post()
|
||||
if not post:
|
||||
return
|
||||
site_id = self._app._preview._current_site_id or self._app._site_combo.currentData()
|
||||
if not site_id:
|
||||
return
|
||||
target = folder if folder else None
|
||||
if target and target not in self._app._db.get_folders():
|
||||
try:
|
||||
self._app._db.add_folder(target)
|
||||
except ValueError as e:
|
||||
self._app._status.showMessage(f"Invalid folder name: {e}")
|
||||
return
|
||||
if idx >= 0:
|
||||
# In the grid -- go through toggle_bookmark so the grid
|
||||
# thumbnail's bookmark badge updates via on_bookmark_done.
|
||||
self.toggle_bookmark(idx, target)
|
||||
else:
|
||||
# Preview-only post (e.g. opened from the bookmarks tab while
|
||||
# browse is empty). Inline the add -- no grid index to update.
|
||||
from ..core.cache import cached_path_for
|
||||
cached = cached_path_for(post.file_url)
|
||||
self._app._db.add_bookmark(
|
||||
site_id=site_id, post_id=post.id,
|
||||
file_url=post.file_url, preview_url=post.preview_url or "",
|
||||
tags=post.tags, rating=post.rating, score=post.score,
|
||||
source=post.source,
|
||||
cached_path=str(cached) if cached.exists() else None,
|
||||
folder=target,
|
||||
tag_categories=post.tag_categories,
|
||||
)
|
||||
where = target or "Unfiled"
|
||||
self._app._status.showMessage(f"Bookmarked #{post.id} to {where}")
|
||||
self._app._preview.update_bookmark_state(True)
|
||||
self._app._popout_ctrl.update_state()
|
||||
# Refresh bookmarks tab if visible so the new entry appears.
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
|
||||
def save_from_preview(self, folder: str) -> None:
|
||||
post, idx = self.get_preview_post()
|
||||
if post:
|
||||
target = folder if folder else None
|
||||
self.save_to_library(post, target)
|
||||
|
||||
def toggle_save_from_preview(self) -> None:
|
||||
"""Toggle library save: unsave if already saved, save to Unfiled otherwise."""
|
||||
post, _ = self.get_preview_post()
|
||||
if not post:
|
||||
return
|
||||
if self.is_post_saved(post.id):
|
||||
self.unsave_from_preview()
|
||||
else:
|
||||
self.save_from_preview("")
|
||||
|
||||
def unsave_from_preview(self) -> None:
|
||||
post, idx = self.get_preview_post()
|
||||
if not post:
|
||||
return
|
||||
# delete_from_library walks every library folder by post id and
|
||||
# deletes every match in one call -- no folder hint needed. Pass
|
||||
# db so templated filenames also get unlinked AND the meta row
|
||||
# gets cleaned up.
|
||||
from ..core.cache import delete_from_library
|
||||
deleted = delete_from_library(post.id, db=self._app._db)
|
||||
if deleted:
|
||||
self._app._status.showMessage(f"Removed #{post.id} from library")
|
||||
self._app._preview.update_save_state(False)
|
||||
# Update browse grid thumbnail saved dot
|
||||
for i, p in enumerate(self._app._posts):
|
||||
if p.id == post.id and i < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[i].set_saved_locally(False)
|
||||
break
|
||||
# Update bookmarks grid thumbnail
|
||||
bm_grid = self._app._bookmarks_view._grid
|
||||
for i, fav in enumerate(self._app._bookmarks_view._bookmarks):
|
||||
if fav.post_id == post.id and i < len(bm_grid._thumbs):
|
||||
bm_grid._thumbs[i].set_saved_locally(False)
|
||||
break
|
||||
# Refresh the active tab's grid so the unsaved post disappears
|
||||
# from library or loses its saved dot on bookmarks.
|
||||
if self._app._stack.currentIndex() == 2:
|
||||
self._app._library_view.refresh()
|
||||
elif self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
else:
|
||||
self._app._status.showMessage(f"#{post.id} not in library")
|
||||
self._app._popout_ctrl.update_state()
|
||||
|
||||
def blacklist_tag_from_popout(self, tag: str) -> None:
|
||||
from PySide6.QtWidgets import QMessageBox
|
||||
reply = QMessageBox.question(
|
||||
self._app, "Blacklist Tag",
|
||||
f"Blacklist tag \"{tag}\"?\nPosts with this tag will be hidden.",
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
)
|
||||
if reply != QMessageBox.StandardButton.Yes:
|
||||
return
|
||||
self._app._db.add_blacklisted_tag(tag)
|
||||
self._app._db.set_setting("blacklist_enabled", "1")
|
||||
self._app._status.showMessage(f"Blacklisted: {tag}")
|
||||
self._app._search_ctrl.remove_blacklisted_from_grid(tag=tag)
|
||||
|
||||
def blacklist_post_from_popout(self) -> None:
|
||||
post, idx = self.get_preview_post()
|
||||
if post:
|
||||
from PySide6.QtWidgets import QMessageBox
|
||||
reply = QMessageBox.question(
|
||||
self._app, "Blacklist Post",
|
||||
f"Blacklist post #{post.id}?\nThis post will be hidden from results.",
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
)
|
||||
if reply != QMessageBox.StandardButton.Yes:
|
||||
return
|
||||
self._app._db.add_blacklisted_post(post.file_url)
|
||||
self._app._status.showMessage(f"Post #{post.id} blacklisted")
|
||||
self._app._search_ctrl.remove_blacklisted_from_grid(post_url=post.file_url)
|
||||
|
||||
def toggle_bookmark(self, index: int, folder: str | None = None) -> None:
|
||||
"""Toggle the bookmark state of post at `index`.
|
||||
|
||||
When `folder` is given and the post is not yet bookmarked, the
|
||||
new bookmark is filed under that bookmark folder. The folder
|
||||
arg is ignored when removing -- bookmark folder membership is
|
||||
moot if the bookmark itself is going away.
|
||||
"""
|
||||
post = self._app._posts[index]
|
||||
site_id = self._app._site_combo.currentData()
|
||||
if not site_id:
|
||||
return
|
||||
|
||||
if self._app._db.is_bookmarked(site_id, post.id):
|
||||
self._app._db.remove_bookmark(site_id, post.id)
|
||||
self._app._search_ctrl.invalidate_lookup_caches()
|
||||
self._app._status.showMessage(f"Unbookmarked #{post.id}")
|
||||
thumbs = self._app._grid._thumbs
|
||||
if 0 <= index < len(thumbs):
|
||||
thumbs[index].set_bookmarked(False)
|
||||
else:
|
||||
self._app._status.showMessage(f"Bookmarking #{post.id}...")
|
||||
|
||||
async def _fav():
|
||||
try:
|
||||
path = await download_image(post.file_url)
|
||||
self._app._db.add_bookmark(
|
||||
site_id=site_id,
|
||||
post_id=post.id,
|
||||
file_url=post.file_url,
|
||||
preview_url=post.preview_url,
|
||||
tags=post.tags,
|
||||
rating=post.rating,
|
||||
score=post.score,
|
||||
source=post.source,
|
||||
cached_path=str(path),
|
||||
folder=folder,
|
||||
tag_categories=post.tag_categories,
|
||||
)
|
||||
where = folder or "Unfiled"
|
||||
self._app._signals.bookmark_done.emit(index, f"Bookmarked #{post.id} to {where}")
|
||||
except Exception as e:
|
||||
self._app._signals.bookmark_error.emit(str(e))
|
||||
|
||||
self._app._run_async(_fav)
|
||||
|
||||
def bulk_bookmark(self, indices: list[int], posts: list) -> None:
|
||||
site_id = self._app._site_combo.currentData()
|
||||
if not site_id:
|
||||
return
|
||||
self._app._status.showMessage(f"Bookmarking {len(posts)}...")
|
||||
|
||||
async def _do():
|
||||
for i, (idx, post) in enumerate(zip(indices, posts)):
|
||||
if self._app._db.is_bookmarked(site_id, post.id):
|
||||
continue
|
||||
try:
|
||||
path = await download_image(post.file_url)
|
||||
self._app._db.add_bookmark(
|
||||
site_id=site_id, post_id=post.id,
|
||||
file_url=post.file_url, preview_url=post.preview_url,
|
||||
tags=post.tags, rating=post.rating, score=post.score,
|
||||
source=post.source, cached_path=str(path),
|
||||
tag_categories=post.tag_categories,
|
||||
)
|
||||
self._app._signals.bookmark_done.emit(idx, f"Bookmarked {i+1}/{len(posts)}")
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
self._app._signals.batch_done.emit(f"Bookmarked {len(posts)} posts")
|
||||
|
||||
self._app._run_async(_do)
|
||||
|
||||
def bulk_save(self, indices: list[int], posts: list, folder: str | None) -> None:
|
||||
"""Bulk-save the selected posts into the library, optionally inside a subfolder.
|
||||
|
||||
Each iteration routes through save_post_file with a shared
|
||||
in_flight set so template-collision-prone batches (e.g.
|
||||
%artist% on a page that has many posts by the same artist) get
|
||||
sequential _1, _2, _3 suffixes instead of clobbering each other.
|
||||
"""
|
||||
from ..core.config import saved_dir, saved_folder_dir
|
||||
from ..core.library_save import save_post_file
|
||||
|
||||
where = folder or "Unfiled"
|
||||
self._app._status.showMessage(f"Saving {len(posts)} to {where}...")
|
||||
try:
|
||||
dest_dir = saved_folder_dir(folder) if folder else saved_dir()
|
||||
except ValueError as e:
|
||||
self._app._status.showMessage(f"Invalid folder name: {e}")
|
||||
return
|
||||
|
||||
in_flight: set[str] = set()
|
||||
|
||||
async def _do():
|
||||
fetcher = self._app._get_category_fetcher()
|
||||
for i, (idx, post) in enumerate(zip(indices, posts)):
|
||||
try:
|
||||
src = Path(await download_image(post.file_url))
|
||||
await save_post_file(src, post, dest_dir, self._app._db, in_flight, category_fetcher=fetcher)
|
||||
self.copy_library_thumb(post)
|
||||
self._app._signals.bookmark_done.emit(idx, f"Saved {i+1}/{len(posts)} to {where}")
|
||||
self._maybe_unbookmark(post)
|
||||
except Exception as e:
|
||||
log.warning(f"Bulk save #{post.id} failed: {e}")
|
||||
self._app._signals.batch_done.emit(f"Saved {len(posts)} to {where}")
|
||||
|
||||
self._app._run_async(_do)
|
||||
|
||||
def bulk_unsave(self, indices: list[int], posts: list) -> None:
|
||||
"""Bulk-remove selected posts from the library.
|
||||
|
||||
Mirrors `bulk_save` shape but synchronously -- `delete_from_library`
|
||||
is a filesystem op, no httpx round-trip needed. Touches only the
|
||||
library (filesystem); bookmarks are a separate DB-backed concept
|
||||
and stay untouched. The grid's saved-locally dot clears for every
|
||||
selection slot regardless of whether the file was actually present
|
||||
-- the user's intent is "make these not-saved", and a missing file
|
||||
is already not-saved.
|
||||
"""
|
||||
from ..core.cache import delete_from_library
|
||||
for post in posts:
|
||||
delete_from_library(post.id, db=self._app._db)
|
||||
for idx in indices:
|
||||
if 0 <= idx < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[idx].set_saved_locally(False)
|
||||
self._app._grid._clear_multi()
|
||||
self._app._status.showMessage(f"Removed {len(posts)} from library")
|
||||
if self._app._stack.currentIndex() == 2:
|
||||
self._app._library_view.refresh()
|
||||
self._app._popout_ctrl.update_state()
|
||||
|
||||
def ensure_bookmarked(self, post) -> None:
|
||||
"""Bookmark a post if not already bookmarked."""
|
||||
site_id = self._app._site_combo.currentData()
|
||||
if not site_id or self._app._db.is_bookmarked(site_id, post.id):
|
||||
return
|
||||
|
||||
async def _fav():
|
||||
try:
|
||||
path = await download_image(post.file_url)
|
||||
self._app._db.add_bookmark(
|
||||
site_id=site_id,
|
||||
post_id=post.id,
|
||||
file_url=post.file_url,
|
||||
preview_url=post.preview_url,
|
||||
tags=post.tags,
|
||||
rating=post.rating,
|
||||
score=post.score,
|
||||
source=post.source,
|
||||
cached_path=str(path),
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
|
||||
self._app._run_async(_fav)
|
||||
|
||||
def batch_download_posts(self, posts: list, dest: str) -> None:
|
||||
"""Multi-select Download All entry point. Delegates to
|
||||
batch_download_to so the in_flight set, library_meta write,
|
||||
and saved-dots refresh share one implementation."""
|
||||
self.batch_download_to(posts, Path(dest))
|
||||
|
||||
def batch_download_to(self, posts: list, dest_dir: Path) -> None:
|
||||
"""Download `posts` into `dest_dir`, routing each save through
|
||||
save_post_file with a shared in_flight set so collision-prone
|
||||
templates produce sequential _1, _2 suffixes within the batch.
|
||||
|
||||
Stashes `dest_dir` on `self._batch_dest` so on_batch_progress
|
||||
and on_batch_done can decide whether the destination is inside
|
||||
the library and the saved-dots need refreshing. The library_meta
|
||||
write happens automatically inside save_post_file when dest_dir
|
||||
is inside saved_dir() -- fixes the v0.2.3 latent bug where batch
|
||||
downloads into a library folder left files unregistered.
|
||||
"""
|
||||
from ..core.library_save import save_post_file
|
||||
|
||||
self._batch_dest = dest_dir
|
||||
self._app._status.showMessage(f"Downloading {len(posts)} images...")
|
||||
in_flight: set[str] = set()
|
||||
|
||||
async def _batch():
|
||||
fetcher = self._app._get_category_fetcher()
|
||||
for i, post in enumerate(posts):
|
||||
try:
|
||||
src = Path(await download_image(post.file_url))
|
||||
await save_post_file(src, post, dest_dir, self._app._db, in_flight, category_fetcher=fetcher)
|
||||
self._app._signals.batch_progress.emit(i + 1, len(posts), post.id)
|
||||
self._maybe_unbookmark(post)
|
||||
except Exception as e:
|
||||
log.warning(f"Batch #{post.id} failed: {e}")
|
||||
self._app._signals.batch_done.emit(f"Downloaded {len(posts)} images to {dest_dir}")
|
||||
|
||||
self._app._run_async(_batch)
|
||||
|
||||
def batch_download(self) -> None:
|
||||
if not self._app._posts:
|
||||
self._app._status.showMessage("No posts to download")
|
||||
return
|
||||
from .dialogs import select_directory
|
||||
dest = select_directory(self._app, "Download to folder")
|
||||
if not dest:
|
||||
return
|
||||
self.batch_download_to(list(self._app._posts), Path(dest))
|
||||
|
||||
def is_current_bookmarked(self, index: int) -> bool:
|
||||
site_id = self._app._site_combo.currentData()
|
||||
if not site_id or index < 0 or index >= len(self._app._posts):
|
||||
return False
|
||||
return self._app._db.is_bookmarked(site_id, self._app._posts[index].id)
|
||||
|
||||
def copy_library_thumb(self, post) -> None:
|
||||
"""Copy a post's browse thumbnail into the library thumbnail
|
||||
cache so the Library tab can paint it without re-downloading.
|
||||
No-op if there's no preview_url or the source thumb isn't cached."""
|
||||
if not post.preview_url:
|
||||
return
|
||||
from ..core.config import thumbnails_dir
|
||||
from ..core.cache import cached_path_for
|
||||
thumb_src = cached_path_for(post.preview_url, thumbnails_dir())
|
||||
if not thumb_src.exists():
|
||||
return
|
||||
lib_thumb_dir = thumbnails_dir() / "library"
|
||||
lib_thumb_dir.mkdir(parents=True, exist_ok=True)
|
||||
lib_thumb = lib_thumb_dir / f"{post.id}.jpg"
|
||||
if not lib_thumb.exists():
|
||||
import shutil
|
||||
shutil.copy2(thumb_src, lib_thumb)
|
||||
|
||||
def save_to_library(self, post, folder: str | None) -> None:
|
||||
"""Save a post into the library, optionally inside a subfolder.
|
||||
|
||||
Routes through the unified save_post_file flow so the filename
|
||||
template, sequential collision suffixes, same-post idempotency,
|
||||
and library_meta write are all handled in one place. Re-saving
|
||||
the same post into the same folder is a no-op (idempotent);
|
||||
saving into a different folder produces a second copy without
|
||||
touching the first.
|
||||
"""
|
||||
from ..core.config import saved_dir, saved_folder_dir
|
||||
from ..core.library_save import save_post_file
|
||||
|
||||
self._app._status.showMessage(f"Saving #{post.id} to library...")
|
||||
try:
|
||||
dest_dir = saved_folder_dir(folder) if folder else saved_dir()
|
||||
except ValueError as e:
|
||||
self._app._status.showMessage(f"Invalid folder name: {e}")
|
||||
return
|
||||
|
||||
async def _save():
|
||||
try:
|
||||
src = Path(await download_image(post.file_url))
|
||||
await save_post_file(src, post, dest_dir, self._app._db, category_fetcher=self._app._get_category_fetcher())
|
||||
self.copy_library_thumb(post)
|
||||
where = folder or "Unfiled"
|
||||
self._app._signals.bookmark_done.emit(
|
||||
self._app._grid.selected_index,
|
||||
f"Saved #{post.id} to {where}",
|
||||
)
|
||||
self._maybe_unbookmark(post)
|
||||
except Exception as e:
|
||||
self._app._signals.bookmark_error.emit(str(e))
|
||||
|
||||
self._app._run_async(_save)
|
||||
|
||||
def save_as(self, post) -> None:
|
||||
"""Open a Save As dialog for a single post and write the file
|
||||
through the unified save_post_file flow.
|
||||
|
||||
The default name in the dialog comes from rendering the user's
|
||||
library_filename_template against the post; the user can edit
|
||||
before confirming. If the chosen destination ends up inside
|
||||
saved_dir(), save_post_file registers a library_meta row --
|
||||
a behavior change from v0.2.3 (where Save As never wrote meta
|
||||
regardless of destination)."""
|
||||
from ..core.cache import cached_path_for
|
||||
from ..core.config import render_filename_template
|
||||
from ..core.library_save import save_post_file
|
||||
from .dialogs import save_file
|
||||
|
||||
src = cached_path_for(post.file_url)
|
||||
if not src.exists():
|
||||
self._app._status.showMessage("Image not cached — double-click to download first")
|
||||
return
|
||||
ext = src.suffix
|
||||
template = self._app._db.get_setting("library_filename_template")
|
||||
default_name = render_filename_template(template, post, ext)
|
||||
dest = save_file(self._app, "Save Image", default_name, f"Images (*{ext})")
|
||||
if not dest:
|
||||
return
|
||||
dest_path = Path(dest)
|
||||
|
||||
async def _do_save():
|
||||
try:
|
||||
actual = await save_post_file(
|
||||
src, post, dest_path.parent, self._app._db,
|
||||
explicit_name=dest_path.name,
|
||||
category_fetcher=self._app._get_category_fetcher(),
|
||||
)
|
||||
self._app._signals.bookmark_done.emit(
|
||||
self._app._grid.selected_index,
|
||||
f"Saved to {actual}",
|
||||
)
|
||||
self._maybe_unbookmark(post)
|
||||
except Exception as e:
|
||||
self._app._signals.bookmark_error.emit(f"Save failed: {e}")
|
||||
|
||||
self._app._run_async(_do_save)
|
||||
|
||||
def on_bookmark_done(self, index: int, msg: str) -> None:
|
||||
self._app._status.showMessage(f"{len(self._app._posts)} results — {msg}")
|
||||
self._app._search_ctrl.invalidate_lookup_caches()
|
||||
# Detect batch operations (e.g. "Saved 3/10 to Unfiled") -- skip heavy updates
|
||||
is_batch = is_batch_message(msg)
|
||||
thumbs = self._app._grid._thumbs
|
||||
if 0 <= index < len(thumbs):
|
||||
if "Saved" in msg:
|
||||
thumbs[index].set_saved_locally(True)
|
||||
if "Bookmarked" in msg:
|
||||
thumbs[index].set_bookmarked(True)
|
||||
if not is_batch:
|
||||
if "Bookmarked" in msg:
|
||||
self._app._preview.update_bookmark_state(True)
|
||||
if "Saved" in msg:
|
||||
self._app._preview.update_save_state(True)
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
bm_grid = self._app._bookmarks_view._grid
|
||||
bm_idx = bm_grid.selected_index
|
||||
if 0 <= bm_idx < len(bm_grid._thumbs):
|
||||
bm_grid._thumbs[bm_idx].set_saved_locally(True)
|
||||
if self._app._stack.currentIndex() == 2:
|
||||
self._app._library_view.refresh()
|
||||
self._app._popout_ctrl.update_state()
|
||||
|
||||
def on_batch_progress(self, current: int, total: int, post_id: int) -> None:
|
||||
self._app._status.showMessage(f"Downloading {current}/{total}...")
|
||||
# Light the browse saved-dot for the just-finished post if the
|
||||
# batch destination is inside the library. Runs per-post on the
|
||||
# main thread (this is a Qt slot), so the dot appears as the
|
||||
# files land instead of all at once when the batch completes.
|
||||
dest = self._batch_dest
|
||||
if dest is None:
|
||||
return
|
||||
from ..core.config import saved_dir
|
||||
if not is_in_library(dest, saved_dir()):
|
||||
return
|
||||
for i, p in enumerate(self._app._posts):
|
||||
if p.id == post_id and i < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[i].set_saved_locally(True)
|
||||
break
|
||||
|
||||
def on_batch_done(self, msg: str) -> None:
|
||||
self._app._status.showMessage(msg)
|
||||
self._app._popout_ctrl.update_state()
|
||||
if self._app._stack.currentIndex() == 1:
|
||||
self._app._bookmarks_view.refresh()
|
||||
if self._app._stack.currentIndex() == 2:
|
||||
self._app._library_view.refresh()
|
||||
# Saved-dot updates happen incrementally in on_batch_progress as
|
||||
# each file lands; this slot just clears the destination stash.
|
||||
self._batch_dest = None
|
||||
|
||||
def on_library_files_deleted(self, post_ids: list) -> None:
|
||||
"""Library deleted files -- clear saved dots on browse grid."""
|
||||
for i, p in enumerate(self._app._posts):
|
||||
if p.id in post_ids and i < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[i].set_saved_locally(False)
|
||||
|
||||
def refresh_browse_saved_dots(self) -> None:
|
||||
"""Bookmarks changed -- rescan saved state for all visible browse grid posts."""
|
||||
for i, p in enumerate(self._app._posts):
|
||||
if i < len(self._app._grid._thumbs):
|
||||
self._app._grid._thumbs[i].set_saved_locally(self.is_post_saved(p.id))
|
||||
site_id = self._app._site_combo.currentData()
|
||||
self._app._grid._thumbs[i].set_bookmarked(
|
||||
bool(site_id and self._app._db.is_bookmarked(site_id, p.id))
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
444
booru_viewer/gui/preview_pane.py
Normal file
444
booru_viewer/gui/preview_pane.py
Normal file
@ -0,0 +1,444 @@
|
||||
"""Embedded preview pane: image + video, with toolbar and context menu."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtCore import Qt, Signal
|
||||
from PySide6.QtGui import QPixmap, QMouseEvent, QKeyEvent
|
||||
from PySide6.QtWidgets import (
|
||||
QWidget, QVBoxLayout, QHBoxLayout, QLabel, QStackedWidget,
|
||||
QPushButton, QMenu, QInputDialog,
|
||||
)
|
||||
|
||||
from .media.constants import _is_video
|
||||
from .media.image_viewer import ImageViewer
|
||||
from .media.video_player import VideoPlayer
|
||||
|
||||
|
||||
# -- Combined Preview (image + video) --
|
||||
|
||||
class ImagePreview(QWidget):
|
||||
"""Combined media preview — auto-switches between image and video."""
|
||||
|
||||
close_requested = Signal()
|
||||
open_in_default = Signal()
|
||||
open_in_browser = Signal()
|
||||
save_to_folder = Signal(str)
|
||||
unsave_requested = Signal()
|
||||
bookmark_requested = Signal()
|
||||
# Bookmark-as: emitted when the user picks a bookmark folder from
|
||||
# the toolbar's Bookmark button submenu. Empty string = Unfiled.
|
||||
# Mirrors save_to_folder's shape so app.py can route it the same way.
|
||||
bookmark_to_folder = Signal(str)
|
||||
blacklist_tag_requested = Signal(str)
|
||||
blacklist_post_requested = Signal()
|
||||
navigate = Signal(int) # -1 = prev, +1 = next
|
||||
play_next_requested = Signal() # video ended in "Next" mode (wrap-aware)
|
||||
fullscreen_requested = Signal()
|
||||
|
||||
def __init__(self, parent: QWidget | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._folders_callback = None
|
||||
# Bookmark folders live in a separate name space (DB-backed); the
|
||||
# toolbar Bookmark-as submenu reads them via this callback so the
|
||||
# preview widget stays decoupled from the Database object.
|
||||
self._bookmark_folders_callback = None
|
||||
self._current_path: str | None = None
|
||||
self._current_post = None # Post object, set by app.py
|
||||
self._current_site_id = None # site_id for the current post
|
||||
self._is_saved = False # tracks library save state for context menu
|
||||
self._is_bookmarked = False # tracks bookmark state for the button submenu
|
||||
self._current_tags: dict[str, list[str]] = {}
|
||||
self._current_tag_list: list[str] = []
|
||||
self._vol_scroll_accum = 0
|
||||
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.setSpacing(0)
|
||||
|
||||
# Action toolbar — above the media, in the layout.
|
||||
# 4px horizontal margins so the leftmost button (Bookmark) doesn't
|
||||
# sit flush against the preview splitter handle on the left.
|
||||
self._toolbar = QWidget()
|
||||
tb = QHBoxLayout(self._toolbar)
|
||||
tb.setContentsMargins(4, 1, 4, 1)
|
||||
tb.setSpacing(4)
|
||||
|
||||
_tb_sz = 24
|
||||
|
||||
def _icon_btn(text: str, name: str, tip: str) -> QPushButton:
|
||||
btn = QPushButton(text)
|
||||
btn.setObjectName(name)
|
||||
btn.setFixedSize(_tb_sz, _tb_sz)
|
||||
btn.setToolTip(tip)
|
||||
return btn
|
||||
|
||||
self._bookmark_btn = _icon_btn("\u2606", "_tb_bookmark", "Bookmark (B)")
|
||||
self._bookmark_btn.clicked.connect(self._on_bookmark_clicked)
|
||||
tb.addWidget(self._bookmark_btn)
|
||||
|
||||
self._save_btn = _icon_btn("\u2193", "_tb_save", "Save to library (S)")
|
||||
self._save_btn.clicked.connect(self._on_save_clicked)
|
||||
tb.addWidget(self._save_btn)
|
||||
|
||||
self._bl_tag_btn = _icon_btn("\u2298", "_tb_bl_tag", "Blacklist a tag")
|
||||
self._bl_tag_btn.clicked.connect(self._show_bl_tag_menu)
|
||||
tb.addWidget(self._bl_tag_btn)
|
||||
|
||||
self._bl_post_btn = _icon_btn("\u2297", "_tb_bl_post", "Blacklist this post")
|
||||
self._bl_post_btn.clicked.connect(self.blacklist_post_requested)
|
||||
tb.addWidget(self._bl_post_btn)
|
||||
|
||||
tb.addStretch()
|
||||
|
||||
self._popout_btn = _icon_btn("\u29c9", "_tb_popout", "Popout")
|
||||
self._popout_btn.clicked.connect(self.fullscreen_requested)
|
||||
tb.addWidget(self._popout_btn)
|
||||
|
||||
self._toolbar.hide() # shown when a post is active
|
||||
layout.addWidget(self._toolbar)
|
||||
|
||||
self._stack = QStackedWidget()
|
||||
layout.addWidget(self._stack, stretch=1)
|
||||
|
||||
# Image viewer (index 0)
|
||||
self._image_viewer = ImageViewer()
|
||||
self._image_viewer.setFocusPolicy(Qt.FocusPolicy.NoFocus)
|
||||
self._image_viewer.close_requested.connect(self.close_requested)
|
||||
self._stack.addWidget(self._image_viewer)
|
||||
|
||||
# Video player (index 1). embed_controls=False keeps the
|
||||
# transport controls bar out of the VideoPlayer's own layout —
|
||||
# we reparent it below the stack a few lines down so the controls
|
||||
# sit *under* the media rather than overlaying it.
|
||||
self._video_player = VideoPlayer(embed_controls=False)
|
||||
self._video_player.setFocusPolicy(Qt.FocusPolicy.NoFocus)
|
||||
self._video_player.play_next.connect(self.play_next_requested)
|
||||
self._stack.addWidget(self._video_player)
|
||||
|
||||
# Place the video controls bar in the preview panel's own layout,
|
||||
# underneath the stack. The bar exists as a child of VideoPlayer
|
||||
# but is not in any layout (because of embed_controls=False); we
|
||||
# adopt it here as a sibling of the stack so it lays out cleanly
|
||||
# below the media rather than floating on top of it. The popout
|
||||
# uses its own separate VideoPlayer instance and reparents that
|
||||
# instance's controls bar to its own central widget as an overlay.
|
||||
self._stack_video_controls = self._video_player._controls_bar
|
||||
self._stack_video_controls.setParent(self)
|
||||
layout.addWidget(self._stack_video_controls)
|
||||
# Only visible when the stack is showing the video player.
|
||||
self._stack_video_controls.hide()
|
||||
self._stack.currentChanged.connect(
|
||||
lambda idx: self._stack_video_controls.setVisible(idx == 1)
|
||||
)
|
||||
|
||||
# Info label
|
||||
self._info_label = QLabel()
|
||||
self._info_label.setStyleSheet("padding: 2px 6px;")
|
||||
layout.addWidget(self._info_label)
|
||||
|
||||
self.setFocusPolicy(Qt.FocusPolicy.NoFocus)
|
||||
self.setContextMenuPolicy(Qt.ContextMenuPolicy.CustomContextMenu)
|
||||
self.customContextMenuRequested.connect(self._on_context_menu)
|
||||
|
||||
def set_post_tags(self, tag_categories: dict[str, list[str]], tag_list: list[str]) -> None:
|
||||
self._current_tags = tag_categories
|
||||
self._current_tag_list = tag_list
|
||||
|
||||
def _show_bl_tag_menu(self) -> None:
|
||||
menu = QMenu(self)
|
||||
if self._current_tags:
|
||||
for category, tags in self._current_tags.items():
|
||||
cat_menu = menu.addMenu(category)
|
||||
for tag in tags[:30]:
|
||||
cat_menu.addAction(tag)
|
||||
else:
|
||||
for tag in self._current_tag_list[:30]:
|
||||
menu.addAction(tag)
|
||||
action = menu.exec(self._bl_tag_btn.mapToGlobal(self._bl_tag_btn.rect().bottomLeft()))
|
||||
if action:
|
||||
self.blacklist_tag_requested.emit(action.text())
|
||||
|
||||
def _on_bookmark_clicked(self) -> None:
|
||||
"""Toolbar Bookmark button — mirrors the browse-tab Bookmark-as
|
||||
submenu so the preview pane has the same one-click filing flow.
|
||||
|
||||
When the post is already bookmarked, the button collapses to a
|
||||
flat unbookmark action (emits the same signal as before, the
|
||||
existing toggle in app.py handles the removal). When not yet
|
||||
bookmarked, a popup menu lets the user pick the destination
|
||||
bookmark folder — the chosen name is sent through bookmark_to_folder
|
||||
and app.py adds the folder + creates the bookmark.
|
||||
"""
|
||||
if self._is_bookmarked:
|
||||
self.bookmark_requested.emit()
|
||||
return
|
||||
menu = QMenu(self)
|
||||
unfiled = menu.addAction("Unfiled")
|
||||
menu.addSeparator()
|
||||
folder_actions: dict[int, str] = {}
|
||||
if self._bookmark_folders_callback:
|
||||
for folder in self._bookmark_folders_callback():
|
||||
a = menu.addAction(folder)
|
||||
folder_actions[id(a)] = folder
|
||||
menu.addSeparator()
|
||||
new_action = menu.addAction("+ New Folder...")
|
||||
action = menu.exec(self._bookmark_btn.mapToGlobal(self._bookmark_btn.rect().bottomLeft()))
|
||||
if not action:
|
||||
return
|
||||
if action == unfiled:
|
||||
self.bookmark_to_folder.emit("")
|
||||
elif action == new_action:
|
||||
name, ok = QInputDialog.getText(self, "New Bookmark Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self.bookmark_to_folder.emit(name.strip())
|
||||
elif id(action) in folder_actions:
|
||||
self.bookmark_to_folder.emit(folder_actions[id(action)])
|
||||
|
||||
def _on_save_clicked(self) -> None:
|
||||
if self._is_saved:
|
||||
self.unsave_requested.emit()
|
||||
return
|
||||
menu = QMenu(self)
|
||||
unsorted = menu.addAction("Unfiled")
|
||||
menu.addSeparator()
|
||||
folder_actions = {}
|
||||
if self._folders_callback:
|
||||
for folder in self._folders_callback():
|
||||
a = menu.addAction(folder)
|
||||
folder_actions[id(a)] = folder
|
||||
menu.addSeparator()
|
||||
new_action = menu.addAction("+ New Folder...")
|
||||
action = menu.exec(self._save_btn.mapToGlobal(self._save_btn.rect().bottomLeft()))
|
||||
if not action:
|
||||
return
|
||||
if action == unsorted:
|
||||
self.save_to_folder.emit("")
|
||||
elif action == new_action:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self.save_to_folder.emit(name.strip())
|
||||
elif id(action) in folder_actions:
|
||||
self.save_to_folder.emit(folder_actions[id(action)])
|
||||
|
||||
def update_bookmark_state(self, bookmarked: bool) -> None:
|
||||
self._is_bookmarked = bookmarked
|
||||
self._bookmark_btn.setText("\u2605" if bookmarked else "\u2606") # ★ / ☆
|
||||
self._bookmark_btn.setToolTip("Unbookmark (B)" if bookmarked else "Bookmark (B)")
|
||||
|
||||
def update_save_state(self, saved: bool) -> None:
|
||||
self._is_saved = saved
|
||||
self._save_btn.setText("\u2715" if saved else "\u2193") # ✕ / ⤓
|
||||
self._save_btn.setToolTip("Unsave from library" if saved else "Save to library (S)")
|
||||
|
||||
|
||||
|
||||
# Keep these for compatibility with app.py accessing them
|
||||
@property
|
||||
def _pixmap(self):
|
||||
return self._image_viewer._pixmap
|
||||
|
||||
@property
|
||||
def _info_text(self):
|
||||
return self._image_viewer._info_text
|
||||
|
||||
def set_folders_callback(self, callback) -> None:
|
||||
self._folders_callback = callback
|
||||
|
||||
def set_bookmark_folders_callback(self, callback) -> None:
|
||||
"""Wire the bookmark folder list source. Called once from app.py
|
||||
with self._db.get_folders. Kept separate from set_folders_callback
|
||||
because library and bookmark folders are independent name spaces.
|
||||
"""
|
||||
self._bookmark_folders_callback = callback
|
||||
|
||||
def set_image(self, pixmap: QPixmap, info: str = "") -> None:
|
||||
self._video_player.stop()
|
||||
self._image_viewer.set_image(pixmap, info)
|
||||
self._stack.setCurrentIndex(0)
|
||||
self._info_label.setText(info)
|
||||
self._current_path = None
|
||||
self._toolbar.show()
|
||||
self._toolbar.raise_()
|
||||
|
||||
def set_media(self, path: str, info: str = "") -> None:
|
||||
"""Auto-detect and show image or video."""
|
||||
self._current_path = path
|
||||
ext = Path(path).suffix.lower()
|
||||
if _is_video(path):
|
||||
self._image_viewer.clear()
|
||||
self._video_player.stop()
|
||||
self._video_player.play_file(path, info)
|
||||
self._stack.setCurrentIndex(1)
|
||||
self._info_label.setText(info)
|
||||
elif ext == ".gif":
|
||||
self._video_player.stop()
|
||||
self._image_viewer.set_gif(path, info)
|
||||
self._stack.setCurrentIndex(0)
|
||||
self._info_label.setText(info)
|
||||
else:
|
||||
self._video_player.stop()
|
||||
pix = QPixmap(path)
|
||||
if not pix.isNull():
|
||||
self._image_viewer.set_image(pix, info)
|
||||
self._stack.setCurrentIndex(0)
|
||||
self._info_label.setText(info)
|
||||
self._toolbar.show()
|
||||
self._toolbar.raise_()
|
||||
|
||||
def clear(self) -> None:
|
||||
self._video_player.stop()
|
||||
self._image_viewer.clear()
|
||||
self._info_label.setText("")
|
||||
self._current_path = None
|
||||
self._toolbar.hide()
|
||||
|
||||
def _on_context_menu(self, pos) -> None:
|
||||
menu = QMenu(self)
|
||||
|
||||
# Bookmark: unbookmark if already bookmarked, folder submenu if not
|
||||
fav_action = None
|
||||
bm_folder_actions = {}
|
||||
bm_new_action = None
|
||||
bm_unfiled = None
|
||||
if self._is_bookmarked:
|
||||
fav_action = menu.addAction("Unbookmark")
|
||||
else:
|
||||
bm_menu = menu.addMenu("Bookmark as")
|
||||
bm_unfiled = bm_menu.addAction("Unfiled")
|
||||
bm_menu.addSeparator()
|
||||
if self._bookmark_folders_callback:
|
||||
for folder in self._bookmark_folders_callback():
|
||||
a = bm_menu.addAction(folder)
|
||||
bm_folder_actions[id(a)] = folder
|
||||
bm_menu.addSeparator()
|
||||
bm_new_action = bm_menu.addAction("+ New Folder...")
|
||||
|
||||
save_menu = None
|
||||
save_unsorted = None
|
||||
save_new = None
|
||||
save_folder_actions = {}
|
||||
unsave_action = None
|
||||
if self._is_saved:
|
||||
unsave_action = menu.addAction("Unsave from Library")
|
||||
else:
|
||||
save_menu = menu.addMenu("Save to Library")
|
||||
save_unsorted = save_menu.addAction("Unfiled")
|
||||
save_menu.addSeparator()
|
||||
if self._folders_callback:
|
||||
for folder in self._folders_callback():
|
||||
a = save_menu.addAction(folder)
|
||||
save_folder_actions[id(a)] = folder
|
||||
save_menu.addSeparator()
|
||||
save_new = save_menu.addAction("+ New Folder...")
|
||||
|
||||
menu.addSeparator()
|
||||
copy_image = menu.addAction("Copy File to Clipboard")
|
||||
copy_url = menu.addAction("Copy Image URL")
|
||||
open_action = menu.addAction("Open in Default App")
|
||||
browser_action = menu.addAction("Open in Browser")
|
||||
|
||||
# Image-specific
|
||||
reset_action = None
|
||||
if self._stack.currentIndex() == 0:
|
||||
reset_action = menu.addAction("Reset View")
|
||||
|
||||
popout_action = None
|
||||
if self._current_path:
|
||||
popout_action = menu.addAction("Popout")
|
||||
clear_action = menu.addAction("Clear Preview")
|
||||
|
||||
action = menu.exec(self.mapToGlobal(pos))
|
||||
if not action:
|
||||
return
|
||||
if action == fav_action:
|
||||
self.bookmark_requested.emit()
|
||||
elif action == bm_unfiled:
|
||||
self.bookmark_to_folder.emit("")
|
||||
elif action == bm_new_action:
|
||||
name, ok = QInputDialog.getText(self, "New Bookmark Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self.bookmark_to_folder.emit(name.strip())
|
||||
elif id(action) in bm_folder_actions:
|
||||
self.bookmark_to_folder.emit(bm_folder_actions[id(action)])
|
||||
elif action == save_unsorted:
|
||||
self.save_to_folder.emit("")
|
||||
elif action == save_new:
|
||||
name, ok = QInputDialog.getText(self, "New Folder", "Folder name:")
|
||||
if ok and name.strip():
|
||||
self.save_to_folder.emit(name.strip())
|
||||
elif id(action) in save_folder_actions:
|
||||
self.save_to_folder.emit(save_folder_actions[id(action)])
|
||||
elif action == copy_image:
|
||||
from pathlib import Path as _Path
|
||||
from PySide6.QtCore import QMimeData, QUrl
|
||||
from PySide6.QtWidgets import QApplication
|
||||
from PySide6.QtGui import QPixmap as _QP
|
||||
cp = self._current_path
|
||||
if cp and _Path(cp).exists():
|
||||
mime = QMimeData()
|
||||
mime.setUrls([QUrl.fromLocalFile(str(_Path(cp).resolve()))])
|
||||
pix = _QP(cp)
|
||||
if not pix.isNull():
|
||||
mime.setImageData(pix.toImage())
|
||||
QApplication.clipboard().setMimeData(mime)
|
||||
elif action == copy_url:
|
||||
from PySide6.QtWidgets import QApplication
|
||||
if self._current_post and self._current_post.file_url:
|
||||
QApplication.clipboard().setText(self._current_post.file_url)
|
||||
elif action == open_action:
|
||||
self.open_in_default.emit()
|
||||
elif action == browser_action:
|
||||
self.open_in_browser.emit()
|
||||
elif action == reset_action:
|
||||
self._image_viewer._fit_to_view()
|
||||
self._image_viewer.update()
|
||||
elif action == unsave_action:
|
||||
self.unsave_requested.emit()
|
||||
elif action == popout_action:
|
||||
self.fullscreen_requested.emit()
|
||||
elif action == clear_action:
|
||||
self.close_requested.emit()
|
||||
|
||||
def mousePressEvent(self, event: QMouseEvent) -> None:
|
||||
if event.button() == Qt.MouseButton.RightButton:
|
||||
event.ignore()
|
||||
else:
|
||||
super().mousePressEvent(event)
|
||||
|
||||
def wheelEvent(self, event) -> None:
|
||||
# Horizontal tilt navigates between posts on either stack
|
||||
tilt = event.angleDelta().x()
|
||||
if tilt > 30:
|
||||
self.navigate.emit(-1)
|
||||
return
|
||||
if tilt < -30:
|
||||
self.navigate.emit(1)
|
||||
return
|
||||
if self._stack.currentIndex() == 1:
|
||||
self._vol_scroll_accum += event.angleDelta().y()
|
||||
steps = self._vol_scroll_accum // 120
|
||||
if steps:
|
||||
self._vol_scroll_accum -= steps * 120
|
||||
vol = max(0, min(100, self._video_player.volume + 5 * steps))
|
||||
self._video_player.volume = vol
|
||||
else:
|
||||
super().wheelEvent(event)
|
||||
|
||||
def keyPressEvent(self, event: QKeyEvent) -> None:
|
||||
if self._stack.currentIndex() == 0:
|
||||
self._image_viewer.keyPressEvent(event)
|
||||
elif event.key() == Qt.Key.Key_Space:
|
||||
self._video_player._toggle_play()
|
||||
elif event.key() == Qt.Key.Key_Period:
|
||||
self._video_player._seek_relative(1800)
|
||||
elif event.key() == Qt.Key.Key_Comma:
|
||||
self._video_player._seek_relative(-1800)
|
||||
elif event.key() in (Qt.Key.Key_Left, Qt.Key.Key_H):
|
||||
self.navigate.emit(-1)
|
||||
elif event.key() in (Qt.Key.Key_Right, Qt.Key.Key_L):
|
||||
self.navigate.emit(1)
|
||||
|
||||
def resizeEvent(self, event) -> None:
|
||||
super().resizeEvent(event)
|
||||
68
booru_viewer/gui/privacy.py
Normal file
68
booru_viewer/gui/privacy.py
Normal file
@ -0,0 +1,68 @@
|
||||
"""Privacy-screen overlay for the main window."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from PySide6.QtWidgets import QWidget
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
|
||||
class PrivacyController:
|
||||
"""Owns the privacy overlay toggle and popout coordination."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
self._on = False
|
||||
self._overlay: QWidget | None = None
|
||||
self._popout_was_visible = False
|
||||
self._preview_was_playing = False
|
||||
|
||||
@property
|
||||
def is_active(self) -> bool:
|
||||
return self._on
|
||||
|
||||
def resize_overlay(self) -> None:
|
||||
"""Re-fit the overlay to the main window's current rect."""
|
||||
if self._overlay is not None and self._on:
|
||||
self._overlay.setGeometry(self._app.rect())
|
||||
|
||||
def toggle(self) -> None:
|
||||
if self._overlay is None:
|
||||
self._overlay = QWidget(self._app)
|
||||
self._overlay.setStyleSheet("background: black;")
|
||||
self._overlay.hide()
|
||||
|
||||
self._on = not self._on
|
||||
if self._on:
|
||||
self._overlay.setGeometry(self._app.rect())
|
||||
self._overlay.raise_()
|
||||
self._overlay.show()
|
||||
self._app.setWindowTitle("booru-viewer")
|
||||
# Pause preview video, remembering whether it was playing
|
||||
self._preview_was_playing = False
|
||||
if self._app._preview._stack.currentIndex() == 1:
|
||||
mpv = self._app._preview._video_player._mpv
|
||||
self._preview_was_playing = mpv is not None and not mpv.pause
|
||||
self._app._preview._video_player.pause()
|
||||
# Delegate popout hide-and-pause to FullscreenPreview so it
|
||||
# can capture its own geometry for restore.
|
||||
self._popout_was_visible = bool(
|
||||
self._app._popout_ctrl.window
|
||||
and self._app._popout_ctrl.window.isVisible()
|
||||
)
|
||||
if self._popout_was_visible:
|
||||
self._app._popout_ctrl.window.privacy_hide()
|
||||
else:
|
||||
self._overlay.hide()
|
||||
# Resume embedded preview video only if it was playing before
|
||||
if self._preview_was_playing and self._app._preview._stack.currentIndex() == 1:
|
||||
self._app._preview._video_player.resume()
|
||||
# Restore the popout via its own privacy_show method, which
|
||||
# also re-dispatches the captured geometry to Hyprland (Qt
|
||||
# show() alone doesn't preserve position on Wayland) and
|
||||
# resumes its video.
|
||||
if self._popout_was_visible and self._app._popout_ctrl.window:
|
||||
self._app._popout_ctrl.window.privacy_show()
|
||||
@ -17,6 +17,29 @@ from PySide6.QtWidgets import (
|
||||
from ..core.db import Database
|
||||
|
||||
|
||||
class _TagCompleter(QCompleter):
|
||||
"""Completer that operates on the last space-separated tag only.
|
||||
|
||||
When the user types "blue_sky tre", the completer matches against
|
||||
"tre" and the popup shows suggestions for that fragment. Accepting
|
||||
a suggestion replaces only the last tag, preserving everything
|
||||
before the final space.
|
||||
"""
|
||||
|
||||
def splitPath(self, path: str) -> list[str]:
|
||||
return [path.split()[-1]] if path.split() else [""]
|
||||
|
||||
def pathFromIndex(self, index) -> str:
|
||||
completion = super().pathFromIndex(index)
|
||||
text = self.widget().text()
|
||||
parts = text.split()
|
||||
if parts:
|
||||
parts[-1] = completion
|
||||
else:
|
||||
parts = [completion]
|
||||
return " ".join(parts) + " "
|
||||
|
||||
|
||||
class SearchBar(QWidget):
|
||||
"""Tag search bar with autocomplete, history dropdown, and saved searches."""
|
||||
|
||||
@ -63,9 +86,10 @@ class SearchBar(QWidget):
|
||||
self._btn.clicked.connect(self._do_search)
|
||||
layout.addWidget(self._btn)
|
||||
|
||||
# Autocomplete
|
||||
# Autocomplete — _TagCompleter only completes the last tag,
|
||||
# preserving previous tags in multi-tag queries.
|
||||
self._completer_model = QStringListModel()
|
||||
self._completer = QCompleter(self._completer_model)
|
||||
self._completer = _TagCompleter(self._completer_model)
|
||||
self._completer.setCaseSensitivity(Qt.CaseSensitivity.CaseInsensitive)
|
||||
self._completer.setCompletionMode(QCompleter.CompletionMode.PopupCompletion)
|
||||
self._input.setCompleter(self._completer)
|
||||
@ -78,6 +102,9 @@ class SearchBar(QWidget):
|
||||
self._input.textChanged.connect(self._on_text_changed)
|
||||
|
||||
def _on_text_changed(self, text: str) -> None:
|
||||
if text.endswith(" "):
|
||||
self._completer_model.setStringList([])
|
||||
return
|
||||
self._ac_timer.start()
|
||||
|
||||
def _request_autocomplete(self) -> None:
|
||||
@ -94,7 +121,7 @@ class SearchBar(QWidget):
|
||||
|
||||
def _do_search(self) -> None:
|
||||
query = self._input.text().strip()
|
||||
if self._db and query:
|
||||
if self._db and query and self._db.get_setting_bool("search_history_enabled"):
|
||||
self._db.add_search_history(query)
|
||||
self.search_requested.emit(query)
|
||||
|
||||
@ -116,8 +143,8 @@ class SearchBar(QWidget):
|
||||
saved_actions[id(a)] = (sid, query)
|
||||
menu.addSeparator()
|
||||
|
||||
# History
|
||||
history = self._db.get_search_history()
|
||||
# History (only shown when the setting is on)
|
||||
history = self._db.get_search_history() if self._db.get_setting_bool("search_history_enabled") else []
|
||||
if history:
|
||||
hist_header = menu.addAction("-- Recent --")
|
||||
hist_header.setEnabled(False)
|
||||
|
||||
601
booru_viewer/gui/search_controller.py
Normal file
601
booru_viewer/gui/search_controller.py
Normal file
@ -0,0 +1,601 @@
|
||||
"""Search orchestration, infinite scroll, tag building, and blacklist filtering."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .search_state import SearchState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# -- Pure functions (tested in tests/gui/test_search_controller.py) --
|
||||
|
||||
|
||||
def build_search_tags(
|
||||
tags: str,
|
||||
rating: str,
|
||||
api_type: str | None,
|
||||
min_score: int,
|
||||
media_filter: str,
|
||||
) -> str:
|
||||
"""Build the full search tag string from individual filter values."""
|
||||
parts = []
|
||||
if tags:
|
||||
parts.append(tags)
|
||||
|
||||
if rating != "all" and api_type:
|
||||
if api_type == "danbooru":
|
||||
danbooru_map = {
|
||||
"general": "g", "sensitive": "s",
|
||||
"questionable": "q", "explicit": "e",
|
||||
}
|
||||
if rating in danbooru_map:
|
||||
parts.append(f"rating:{danbooru_map[rating]}")
|
||||
elif api_type == "gelbooru":
|
||||
gelbooru_map = {
|
||||
"general": "general", "sensitive": "sensitive",
|
||||
"questionable": "questionable", "explicit": "explicit",
|
||||
}
|
||||
if rating in gelbooru_map:
|
||||
parts.append(f"rating:{gelbooru_map[rating]}")
|
||||
elif api_type == "e621":
|
||||
e621_map = {
|
||||
"general": "s", "sensitive": "s",
|
||||
"questionable": "q", "explicit": "e",
|
||||
}
|
||||
if rating in e621_map:
|
||||
parts.append(f"rating:{e621_map[rating]}")
|
||||
else:
|
||||
moebooru_map = {
|
||||
"general": "safe", "sensitive": "safe",
|
||||
"questionable": "questionable", "explicit": "explicit",
|
||||
}
|
||||
if rating in moebooru_map:
|
||||
parts.append(f"rating:{moebooru_map[rating]}")
|
||||
|
||||
if min_score > 0:
|
||||
parts.append(f"score:>={min_score}")
|
||||
|
||||
if media_filter == "Animated":
|
||||
parts.append("animated")
|
||||
elif media_filter == "Video":
|
||||
parts.append("video")
|
||||
elif media_filter == "GIF":
|
||||
parts.append("animated_gif")
|
||||
elif media_filter == "Audio":
|
||||
parts.append("audio")
|
||||
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def filter_posts(
|
||||
posts: list,
|
||||
bl_tags: set,
|
||||
bl_posts: set,
|
||||
seen_ids: set,
|
||||
) -> tuple[list, dict]:
|
||||
"""Filter posts by blacklisted tags/URLs and dedup against *seen_ids*.
|
||||
|
||||
Mutates *seen_ids* in place (adds surviving post IDs).
|
||||
Returns ``(filtered_posts, drop_counts)`` where *drop_counts* has keys
|
||||
``bl_tags``, ``bl_posts``, ``dedup``.
|
||||
"""
|
||||
drops = {"bl_tags": 0, "bl_posts": 0, "dedup": 0}
|
||||
n0 = len(posts)
|
||||
if bl_tags:
|
||||
posts = [p for p in posts if not bl_tags.intersection(p.tag_list)]
|
||||
n1 = len(posts)
|
||||
drops["bl_tags"] = n0 - n1
|
||||
if bl_posts:
|
||||
posts = [p for p in posts if p.file_url not in bl_posts]
|
||||
n2 = len(posts)
|
||||
drops["bl_posts"] = n1 - n2
|
||||
posts = [p for p in posts if p.id not in seen_ids]
|
||||
n3 = len(posts)
|
||||
drops["dedup"] = n2 - n3
|
||||
seen_ids.update(p.id for p in posts)
|
||||
return posts, drops
|
||||
|
||||
|
||||
def should_backfill(collected_count: int, limit: int, last_batch_size: int) -> bool:
|
||||
"""Return True if another backfill page should be fetched."""
|
||||
return collected_count < limit and last_batch_size >= limit
|
||||
|
||||
|
||||
# -- Controller --
|
||||
|
||||
|
||||
class SearchController:
|
||||
"""Owns search orchestration, pagination, infinite scroll, and blacklist."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
self._current_page = 1
|
||||
self._current_tags = ""
|
||||
self._current_rating = "all"
|
||||
self._min_score = 0
|
||||
self._loading = False
|
||||
self._search = SearchState()
|
||||
self._last_scroll_page = 0
|
||||
self._infinite_scroll = app._db.get_setting_bool("infinite_scroll")
|
||||
# Cached lookup sets — rebuilt once per search, reused in
|
||||
# _drain_append_queue to avoid repeated DB queries and directory
|
||||
# listings on every infinite-scroll append.
|
||||
self._cached_names: set[str] | None = None
|
||||
self._bookmarked_ids: set[int] | None = None
|
||||
self._saved_ids: set[int] | None = None
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Reset search state for a site change."""
|
||||
self._search.shown_post_ids.clear()
|
||||
self._search.page_cache.clear()
|
||||
self._cached_names = None
|
||||
self._bookmarked_ids = None
|
||||
self._saved_ids = None
|
||||
|
||||
def invalidate_lookup_caches(self) -> None:
|
||||
"""Clear cached bookmark/saved/cache-dir sets.
|
||||
|
||||
Call after a bookmark or save operation so the next
|
||||
``_drain_append_queue`` picks up the change.
|
||||
"""
|
||||
self._bookmarked_ids = None
|
||||
self._saved_ids = None
|
||||
|
||||
def clear_loading(self) -> None:
|
||||
self._loading = False
|
||||
|
||||
# -- Search entry points --
|
||||
|
||||
def on_search(self, tags: str) -> None:
|
||||
self._current_tags = tags
|
||||
self._app._page_spin.setValue(1)
|
||||
self._current_page = 1
|
||||
self._search = SearchState()
|
||||
self._cached_names = None
|
||||
self._bookmarked_ids = None
|
||||
self._saved_ids = None
|
||||
self._min_score = self._app._score_spin.value()
|
||||
self._app._preview.clear()
|
||||
self._app._next_page_btn.setVisible(True)
|
||||
self._app._prev_page_btn.setVisible(False)
|
||||
self.do_search()
|
||||
|
||||
def on_search_error(self, e: str) -> None:
|
||||
self._loading = False
|
||||
self._app._status.showMessage(f"Error: {e}")
|
||||
|
||||
# -- Pagination --
|
||||
|
||||
def prev_page(self) -> None:
|
||||
if self._current_page > 1:
|
||||
self._current_page -= 1
|
||||
if self._current_page in self._search.page_cache:
|
||||
self._app._signals.search_done.emit(self._search.page_cache[self._current_page])
|
||||
else:
|
||||
self.do_search()
|
||||
|
||||
def next_page(self) -> None:
|
||||
if self._loading:
|
||||
return
|
||||
self._current_page += 1
|
||||
if self._current_page in self._search.page_cache:
|
||||
self._app._signals.search_done.emit(self._search.page_cache[self._current_page])
|
||||
return
|
||||
self.do_search()
|
||||
|
||||
def on_nav_past_end(self) -> None:
|
||||
if self._infinite_scroll:
|
||||
return
|
||||
self._search.nav_page_turn = "first"
|
||||
self.next_page()
|
||||
|
||||
def on_nav_before_start(self) -> None:
|
||||
if self._infinite_scroll:
|
||||
return
|
||||
if self._current_page > 1:
|
||||
self._search.nav_page_turn = "last"
|
||||
self.prev_page()
|
||||
|
||||
def scroll_next_page(self) -> None:
|
||||
if self._loading:
|
||||
return
|
||||
self._current_page += 1
|
||||
self.do_search()
|
||||
|
||||
def scroll_prev_page(self) -> None:
|
||||
if self._loading or self._current_page <= 1:
|
||||
return
|
||||
self._current_page -= 1
|
||||
self.do_search()
|
||||
|
||||
# -- Tag building --
|
||||
|
||||
def _build_search_tags(self) -> str:
|
||||
api_type = self._app._current_site.api_type if self._app._current_site else None
|
||||
return build_search_tags(
|
||||
self._current_tags,
|
||||
self._current_rating,
|
||||
api_type,
|
||||
self._min_score,
|
||||
self._app._media_filter.currentText(),
|
||||
)
|
||||
|
||||
# -- Core search --
|
||||
|
||||
def do_search(self) -> None:
|
||||
if not self._app._current_site:
|
||||
self._app._status.showMessage("No site selected")
|
||||
return
|
||||
self._loading = True
|
||||
self._app._page_label.setText(f"Page {self._current_page}")
|
||||
self._app._status.showMessage("Searching...")
|
||||
|
||||
search_tags = self._build_search_tags()
|
||||
log.info(f"Search: tags='{search_tags}' rating={self._current_rating}")
|
||||
page = self._current_page
|
||||
limit = self._app._db.get_setting_int("page_size") or 40
|
||||
|
||||
bl_tags = set()
|
||||
if self._app._db.get_setting_bool("blacklist_enabled"):
|
||||
bl_tags = set(self._app._db.get_blacklisted_tags())
|
||||
bl_posts = self._app._db.get_blacklisted_posts()
|
||||
shown_ids = self._search.shown_post_ids.copy()
|
||||
seen = shown_ids.copy()
|
||||
|
||||
total_drops = {"bl_tags": 0, "bl_posts": 0, "dedup": 0}
|
||||
|
||||
async def _search():
|
||||
client = self._app._make_client()
|
||||
try:
|
||||
collected = []
|
||||
raw_total = 0
|
||||
current_page = page
|
||||
batch = await client.search(tags=search_tags, page=current_page, limit=limit)
|
||||
raw_total += len(batch)
|
||||
filtered, batch_drops = filter_posts(batch, bl_tags, bl_posts, seen)
|
||||
for k in total_drops:
|
||||
total_drops[k] += batch_drops[k]
|
||||
collected.extend(filtered)
|
||||
if should_backfill(len(collected), limit, len(batch)):
|
||||
for _ in range(9):
|
||||
await asyncio.sleep(0.3)
|
||||
current_page += 1
|
||||
batch = await client.search(tags=search_tags, page=current_page, limit=limit)
|
||||
raw_total += len(batch)
|
||||
filtered, batch_drops = filter_posts(batch, bl_tags, bl_posts, seen)
|
||||
for k in total_drops:
|
||||
total_drops[k] += batch_drops[k]
|
||||
collected.extend(filtered)
|
||||
log.debug(f"Backfill: page={current_page} batch={len(batch)} filtered={len(filtered)} total={len(collected)}/{limit}")
|
||||
if not should_backfill(len(collected), limit, len(batch)):
|
||||
break
|
||||
log.debug(
|
||||
f"do_search: limit={limit} api_returned_total={raw_total} kept={len(collected[:limit])} "
|
||||
f"drops_bl_tags={total_drops['bl_tags']} drops_bl_posts={total_drops['bl_posts']} drops_dedup={total_drops['dedup']} "
|
||||
f"last_batch_size={len(batch)} api_short_signal={len(batch) < limit}"
|
||||
)
|
||||
self._app._signals.search_done.emit(collected[:limit])
|
||||
except Exception as e:
|
||||
self._app._signals.search_error.emit(str(e))
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
self._app._run_async(_search)
|
||||
|
||||
# -- Search results --
|
||||
|
||||
def on_search_done(self, posts: list) -> None:
|
||||
self._app._page_label.setText(f"Page {self._current_page}")
|
||||
self._app._posts = posts
|
||||
ss = self._search
|
||||
ss.shown_post_ids.update(p.id for p in posts)
|
||||
ss.page_cache[self._current_page] = posts
|
||||
if not self._infinite_scroll and len(ss.page_cache) > 10:
|
||||
oldest = min(ss.page_cache.keys())
|
||||
del ss.page_cache[oldest]
|
||||
limit = self._app._db.get_setting_int("page_size") or 40
|
||||
at_end = len(posts) < limit
|
||||
log.debug(f"on_search_done: displayed_count={len(posts)} limit={limit} at_end={at_end}")
|
||||
if at_end:
|
||||
self._app._status.showMessage(f"{len(posts)} results (end)")
|
||||
else:
|
||||
self._app._status.showMessage(f"{len(posts)} results")
|
||||
self._app._prev_page_btn.setVisible(self._current_page > 1)
|
||||
self._app._next_page_btn.setVisible(not at_end)
|
||||
thumbs = self._app._grid.set_posts(len(posts))
|
||||
self._app._grid.scroll_to_top()
|
||||
from PySide6.QtCore import QTimer
|
||||
QTimer.singleShot(100, self.clear_loading)
|
||||
|
||||
from ..core.cache import cached_path_for, cache_dir
|
||||
site_id = self._app._site_combo.currentData()
|
||||
|
||||
self._saved_ids = self._app._db.get_saved_post_ids()
|
||||
|
||||
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
||||
self._bookmarked_ids = {f.post_id for f in _favs}
|
||||
|
||||
_cd = cache_dir()
|
||||
self._cached_names = set()
|
||||
if _cd.exists():
|
||||
self._cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
||||
|
||||
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
||||
if post.id in self._bookmarked_ids:
|
||||
thumb.set_bookmarked(True)
|
||||
thumb.set_saved_locally(post.id in self._saved_ids)
|
||||
cached = cached_path_for(post.file_url)
|
||||
if cached.name in self._cached_names:
|
||||
thumb._cached_path = str(cached)
|
||||
|
||||
if post.preview_url:
|
||||
self.fetch_thumbnail(i, post.preview_url)
|
||||
|
||||
turn = self._search.nav_page_turn
|
||||
if turn and posts:
|
||||
self._search.nav_page_turn = None
|
||||
if turn == "first":
|
||||
idx = 0
|
||||
else:
|
||||
idx = len(posts) - 1
|
||||
self._app._grid._select(idx)
|
||||
self._app._media_ctrl.on_post_activated(idx)
|
||||
|
||||
self._app._grid.setFocus()
|
||||
|
||||
if self._app._db.get_setting("prefetch_mode") in ("Nearby", "Aggressive") and posts:
|
||||
self._app._media_ctrl.prefetch_adjacent(0)
|
||||
|
||||
if self._infinite_scroll and posts:
|
||||
QTimer.singleShot(200, self.check_viewport_fill)
|
||||
|
||||
# -- Infinite scroll --
|
||||
|
||||
def on_reached_bottom(self) -> None:
|
||||
if not self._infinite_scroll or self._loading or self._search.infinite_exhausted:
|
||||
return
|
||||
self._loading = True
|
||||
self._current_page += 1
|
||||
|
||||
search_tags = self._build_search_tags()
|
||||
page = self._current_page
|
||||
limit = self._app._db.get_setting_int("page_size") or 40
|
||||
|
||||
bl_tags = set()
|
||||
if self._app._db.get_setting_bool("blacklist_enabled"):
|
||||
bl_tags = set(self._app._db.get_blacklisted_tags())
|
||||
bl_posts = self._app._db.get_blacklisted_posts()
|
||||
shown_ids = self._search.shown_post_ids.copy()
|
||||
seen = shown_ids.copy()
|
||||
|
||||
total_drops = {"bl_tags": 0, "bl_posts": 0, "dedup": 0}
|
||||
|
||||
async def _search():
|
||||
client = self._app._make_client()
|
||||
collected = []
|
||||
raw_total = 0
|
||||
last_page = page
|
||||
api_exhausted = False
|
||||
try:
|
||||
current_page = page
|
||||
batch = await client.search(tags=search_tags, page=current_page, limit=limit)
|
||||
raw_total += len(batch)
|
||||
last_page = current_page
|
||||
filtered, batch_drops = filter_posts(batch, bl_tags, bl_posts, seen)
|
||||
for k in total_drops:
|
||||
total_drops[k] += batch_drops[k]
|
||||
collected.extend(filtered)
|
||||
if len(batch) < limit:
|
||||
api_exhausted = True
|
||||
elif len(collected) < limit:
|
||||
for _ in range(9):
|
||||
await asyncio.sleep(0.3)
|
||||
current_page += 1
|
||||
batch = await client.search(tags=search_tags, page=current_page, limit=limit)
|
||||
raw_total += len(batch)
|
||||
last_page = current_page
|
||||
filtered, batch_drops = filter_posts(batch, bl_tags, bl_posts, seen)
|
||||
for k in total_drops:
|
||||
total_drops[k] += batch_drops[k]
|
||||
collected.extend(filtered)
|
||||
if len(batch) < limit:
|
||||
api_exhausted = True
|
||||
break
|
||||
if len(collected) >= limit:
|
||||
break
|
||||
except Exception as e:
|
||||
log.warning(f"Infinite scroll fetch failed: {e}")
|
||||
finally:
|
||||
self._search.infinite_last_page = last_page
|
||||
self._search.infinite_api_exhausted = api_exhausted
|
||||
log.debug(
|
||||
f"on_reached_bottom: limit={limit} api_returned_total={raw_total} kept={len(collected[:limit])} "
|
||||
f"drops_bl_tags={total_drops['bl_tags']} drops_bl_posts={total_drops['bl_posts']} drops_dedup={total_drops['dedup']} "
|
||||
f"api_exhausted={api_exhausted} last_page={last_page}"
|
||||
)
|
||||
self._app._signals.search_append.emit(collected[:limit])
|
||||
await client.close()
|
||||
|
||||
self._app._run_async(_search)
|
||||
|
||||
def on_scroll_range_changed(self, _min: int, max_val: int) -> None:
|
||||
"""Scrollbar range changed (resize/splitter) -- check if viewport needs filling."""
|
||||
if max_val == 0 and self._infinite_scroll and self._app._posts:
|
||||
from PySide6.QtCore import QTimer
|
||||
QTimer.singleShot(100, self.check_viewport_fill)
|
||||
|
||||
def check_viewport_fill(self) -> None:
|
||||
"""If content doesn't fill the viewport, trigger infinite scroll."""
|
||||
if not self._infinite_scroll or self._loading or self._search.infinite_exhausted:
|
||||
return
|
||||
self._app._grid.widget().updateGeometry()
|
||||
from PySide6.QtWidgets import QApplication
|
||||
QApplication.processEvents()
|
||||
sb = self._app._grid.verticalScrollBar()
|
||||
if sb.maximum() == 0 and self._app._posts:
|
||||
self.on_reached_bottom()
|
||||
|
||||
def on_search_append(self, posts: list) -> None:
|
||||
"""Queue posts and add them one at a time as thumbnails arrive."""
|
||||
ss = self._search
|
||||
|
||||
if not posts:
|
||||
if ss.infinite_api_exhausted and ss.infinite_last_page > self._current_page:
|
||||
self._current_page = ss.infinite_last_page
|
||||
self._loading = False
|
||||
if ss.infinite_api_exhausted:
|
||||
ss.infinite_exhausted = True
|
||||
self._app._status.showMessage(f"{len(self._app._posts)} results (end)")
|
||||
else:
|
||||
from PySide6.QtCore import QTimer
|
||||
QTimer.singleShot(100, self.check_viewport_fill)
|
||||
return
|
||||
if ss.infinite_last_page > self._current_page:
|
||||
self._current_page = ss.infinite_last_page
|
||||
ss.shown_post_ids.update(p.id for p in posts)
|
||||
ss.append_queue.extend(posts)
|
||||
self._drain_append_queue()
|
||||
|
||||
def _drain_append_queue(self) -> None:
|
||||
"""Add all queued posts to the grid at once, thumbnails load async."""
|
||||
ss = self._search
|
||||
if not ss.append_queue:
|
||||
self._loading = False
|
||||
return
|
||||
|
||||
from ..core.cache import cached_path_for
|
||||
|
||||
# Reuse the lookup sets built in on_search_done. They stay valid
|
||||
# within an infinite-scroll session — bookmarks/saves don't change
|
||||
# during passive scrolling, and the cache directory only grows.
|
||||
if self._saved_ids is None:
|
||||
self._saved_ids = self._app._db.get_saved_post_ids()
|
||||
if self._bookmarked_ids is None:
|
||||
site_id = self._app._site_combo.currentData()
|
||||
_favs = self._app._db.get_bookmarks(site_id=site_id) if site_id else []
|
||||
self._bookmarked_ids = {f.post_id for f in _favs}
|
||||
if self._cached_names is None:
|
||||
from ..core.cache import cache_dir
|
||||
_cd = cache_dir()
|
||||
self._cached_names = set()
|
||||
if _cd.exists():
|
||||
self._cached_names = {f.name for f in _cd.iterdir() if f.is_file()}
|
||||
|
||||
posts = ss.append_queue[:]
|
||||
ss.append_queue.clear()
|
||||
start_idx = len(self._app._posts)
|
||||
self._app._posts.extend(posts)
|
||||
thumbs = self._app._grid.append_posts(len(posts))
|
||||
|
||||
for i, (post, thumb) in enumerate(zip(posts, thumbs)):
|
||||
idx = start_idx + i
|
||||
if post.id in self._bookmarked_ids:
|
||||
thumb.set_bookmarked(True)
|
||||
thumb.set_saved_locally(post.id in self._saved_ids)
|
||||
cached = cached_path_for(post.file_url)
|
||||
if cached.name in self._cached_names:
|
||||
thumb._cached_path = str(cached)
|
||||
if post.preview_url:
|
||||
self.fetch_thumbnail(idx, post.preview_url)
|
||||
|
||||
self._app._status.showMessage(f"{len(self._app._posts)} results")
|
||||
|
||||
self._loading = False
|
||||
self._app._media_ctrl.auto_evict_cache()
|
||||
sb = self._app._grid.verticalScrollBar()
|
||||
from .grid import THUMB_SIZE, THUMB_SPACING
|
||||
threshold = THUMB_SIZE + THUMB_SPACING * 2
|
||||
if sb.maximum() == 0 or sb.value() >= sb.maximum() - threshold:
|
||||
self.on_reached_bottom()
|
||||
|
||||
# -- Thumbnails --
|
||||
|
||||
def fetch_thumbnail(self, index: int, url: str) -> None:
|
||||
from ..core.cache import download_thumbnail
|
||||
|
||||
async def _download():
|
||||
try:
|
||||
path = await download_thumbnail(url)
|
||||
self._app._signals.thumb_done.emit(index, str(path))
|
||||
except Exception as e:
|
||||
log.warning(f"Thumb #{index} failed: {e}")
|
||||
self._app._run_async(_download)
|
||||
|
||||
def on_thumb_done(self, index: int, path: str) -> None:
|
||||
from PySide6.QtGui import QPixmap
|
||||
thumbs = self._app._grid._thumbs
|
||||
if 0 <= index < len(thumbs):
|
||||
pix = QPixmap(path)
|
||||
if not pix.isNull():
|
||||
thumbs[index].set_pixmap(pix, path)
|
||||
|
||||
# -- Autocomplete --
|
||||
|
||||
def request_autocomplete(self, query: str) -> None:
|
||||
if not self._app._current_site or len(query) < 2:
|
||||
return
|
||||
|
||||
async def _ac():
|
||||
client = self._app._make_client()
|
||||
try:
|
||||
results = await client.autocomplete(query)
|
||||
self._app._signals.autocomplete_done.emit(results)
|
||||
except Exception as e:
|
||||
log.warning(f"Operation failed: {e}")
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
self._app._run_async(_ac)
|
||||
|
||||
def on_autocomplete_done(self, suggestions: list) -> None:
|
||||
self._app._search_bar.set_suggestions(suggestions)
|
||||
|
||||
# -- Blacklist removal --
|
||||
|
||||
def remove_blacklisted_from_grid(self, tag: str = None, post_url: str = None) -> None:
|
||||
"""Remove matching posts from the grid in-place without re-searching."""
|
||||
to_remove = []
|
||||
for i, post in enumerate(self._app._posts):
|
||||
if tag and tag in post.tag_list:
|
||||
to_remove.append(i)
|
||||
elif post_url and post.file_url == post_url:
|
||||
to_remove.append(i)
|
||||
|
||||
if not to_remove:
|
||||
return
|
||||
|
||||
from ..core.cache import cached_path_for
|
||||
for i in to_remove:
|
||||
cp = str(cached_path_for(self._app._posts[i].file_url))
|
||||
if cp == self._app._preview._current_path:
|
||||
self._app._preview.clear()
|
||||
if self._app._popout_ctrl.window and self._app._popout_ctrl.window.isVisible():
|
||||
self._app._popout_ctrl.window.stop_media()
|
||||
break
|
||||
|
||||
for i in reversed(to_remove):
|
||||
self._app._posts.pop(i)
|
||||
|
||||
thumbs = self._app._grid.set_posts(len(self._app._posts))
|
||||
site_id = self._app._site_combo.currentData()
|
||||
_saved_ids = self._app._db.get_saved_post_ids()
|
||||
|
||||
for i, (post, thumb) in enumerate(zip(self._app._posts, thumbs)):
|
||||
if site_id and self._app._db.is_bookmarked(site_id, post.id):
|
||||
thumb.set_bookmarked(True)
|
||||
thumb.set_saved_locally(post.id in _saved_ids)
|
||||
from ..core.cache import cached_path_for as cpf
|
||||
cached = cpf(post.file_url)
|
||||
if cached.exists():
|
||||
thumb._cached_path = str(cached)
|
||||
if post.preview_url:
|
||||
self.fetch_thumbnail(i, post.preview_url)
|
||||
|
||||
self._app._status.showMessage(f"{len(self._app._posts)} results — {len(to_remove)} removed")
|
||||
17
booru_viewer/gui/search_state.py
Normal file
17
booru_viewer/gui/search_state.py
Normal file
@ -0,0 +1,17 @@
|
||||
"""Mutable per-search state container."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
@dataclass
|
||||
class SearchState:
|
||||
"""Mutable state that resets on every new search."""
|
||||
shown_post_ids: set[int] = field(default_factory=set)
|
||||
page_cache: dict[int, list] = field(default_factory=dict)
|
||||
infinite_exhausted: bool = False
|
||||
infinite_last_page: int = 0
|
||||
infinite_api_exhausted: bool = False
|
||||
nav_page_turn: str | None = None
|
||||
append_queue: list = field(default_factory=list)
|
||||
@ -21,7 +21,6 @@ from PySide6.QtWidgets import (
|
||||
QListWidget,
|
||||
QMessageBox,
|
||||
QGroupBox,
|
||||
QProgressBar,
|
||||
)
|
||||
|
||||
from ..core.db import Database
|
||||
@ -65,6 +64,10 @@ class SettingsDialog(QDialog):
|
||||
btns = QHBoxLayout()
|
||||
btns.addStretch()
|
||||
|
||||
apply_btn = QPushButton("Apply")
|
||||
apply_btn.clicked.connect(self._apply)
|
||||
btns.addWidget(apply_btn)
|
||||
|
||||
save_btn = QPushButton("Save")
|
||||
save_btn.clicked.connect(self._save_and_close)
|
||||
btns.addWidget(save_btn)
|
||||
@ -136,7 +139,7 @@ class SettingsDialog(QDialog):
|
||||
|
||||
# Thumbnail size
|
||||
self._thumb_size = QSpinBox()
|
||||
self._thumb_size.setRange(100, 400)
|
||||
self._thumb_size.setRange(100, 200)
|
||||
self._thumb_size.setSingleStep(20)
|
||||
self._thumb_size.setValue(self._db.get_setting_int("thumbnail_size"))
|
||||
form.addRow("Thumbnail size (px):", self._spinbox_row(self._thumb_size))
|
||||
@ -187,6 +190,21 @@ class SettingsDialog(QDialog):
|
||||
self._infinite_scroll.setChecked(self._db.get_setting_bool("infinite_scroll"))
|
||||
form.addRow("", self._infinite_scroll)
|
||||
|
||||
# Unbookmark on save
|
||||
self._unbookmark_on_save = QCheckBox("Remove bookmark when saved to library")
|
||||
self._unbookmark_on_save.setChecked(self._db.get_setting_bool("unbookmark_on_save"))
|
||||
form.addRow("", self._unbookmark_on_save)
|
||||
|
||||
# Search history
|
||||
self._search_history = QCheckBox("Record recent searches")
|
||||
self._search_history.setChecked(self._db.get_setting_bool("search_history_enabled"))
|
||||
form.addRow("", self._search_history)
|
||||
|
||||
# Flip layout
|
||||
self._flip_layout = QCheckBox("Preview on left")
|
||||
self._flip_layout.setChecked(self._db.get_setting_bool("flip_layout"))
|
||||
form.addRow("", self._flip_layout)
|
||||
|
||||
# Slideshow monitor
|
||||
from PySide6.QtWidgets import QApplication
|
||||
self._monitor_combo = QComboBox()
|
||||
@ -200,6 +218,16 @@ class SettingsDialog(QDialog):
|
||||
self._monitor_combo.setCurrentIndex(idx)
|
||||
form.addRow("Popout monitor:", self._monitor_combo)
|
||||
|
||||
# Popout anchor — resize pivot point
|
||||
self._popout_anchor = QComboBox()
|
||||
self._popout_anchor.addItems(["Center", "Top-left", "Top-right", "Bottom-left", "Bottom-right"])
|
||||
_anchor_map = {"center": "Center", "tl": "Top-left", "tr": "Top-right", "bl": "Bottom-left", "br": "Bottom-right"}
|
||||
current_anchor = self._db.get_setting("popout_anchor") or "center"
|
||||
idx = self._popout_anchor.findText(_anchor_map.get(current_anchor, "Center"))
|
||||
if idx >= 0:
|
||||
self._popout_anchor.setCurrentIndex(idx)
|
||||
form.addRow("Popout anchor:", self._popout_anchor)
|
||||
|
||||
# File dialog platform (Linux only)
|
||||
self._file_dialog_combo = None
|
||||
if not IS_WINDOWS:
|
||||
@ -285,6 +313,15 @@ class SettingsDialog(QDialog):
|
||||
clear_cache_btn.clicked.connect(self._clear_image_cache)
|
||||
btn_row1.addWidget(clear_cache_btn)
|
||||
|
||||
clear_tags_btn = QPushButton("Clear Tag Cache")
|
||||
clear_tags_btn.setToolTip(
|
||||
"Wipe the per-site tag-type cache (Gelbooru/Moebooru sites). "
|
||||
"Use this if category colors stop appearing correctly — the "
|
||||
"app will re-fetch tag types on the next post view."
|
||||
)
|
||||
clear_tags_btn.clicked.connect(self._clear_tag_cache)
|
||||
btn_row1.addWidget(clear_tags_btn)
|
||||
|
||||
actions_layout.addLayout(btn_row1)
|
||||
|
||||
btn_row2 = QHBoxLayout()
|
||||
@ -430,6 +467,26 @@ class SettingsDialog(QDialog):
|
||||
layout.addWidget(QLabel("Library directory:"))
|
||||
layout.addLayout(lib_row)
|
||||
|
||||
# Library filename template (editable). Applies to every save action
|
||||
# — Save to Library, Save As, batch downloads, multi-select bulk
|
||||
# operations, and bookmark→library copies. Empty = post id.
|
||||
layout.addWidget(QLabel("Library filename template:"))
|
||||
self._library_filename_template = QLineEdit(
|
||||
self._db.get_setting("library_filename_template") or ""
|
||||
)
|
||||
self._library_filename_template.setPlaceholderText("e.g. %artist%_%id% (leave blank for post id)")
|
||||
layout.addWidget(self._library_filename_template)
|
||||
tmpl_help = QLabel(
|
||||
"Tokens: %id% %md5% %ext% %rating% %score% "
|
||||
"%artist% %character% %copyright% %general% %meta% %species%\n"
|
||||
"Applies to every save action: Save to Library, Save As, Batch Download, "
|
||||
"multi-select bulk operations, and bookmark→library copies.\n"
|
||||
"All tokens work on all sites. Category tokens are fetched on demand."
|
||||
)
|
||||
tmpl_help.setWordWrap(True)
|
||||
tmpl_help.setStyleSheet("color: palette(mid); font-size: 10pt;")
|
||||
layout.addWidget(tmpl_help)
|
||||
|
||||
open_btn = QPushButton("Open Data Folder")
|
||||
open_btn.clicked.connect(self._open_data_folder)
|
||||
layout.addWidget(open_btn)
|
||||
@ -495,7 +552,6 @@ class SettingsDialog(QDialog):
|
||||
# -- Network tab --
|
||||
|
||||
def _build_network_tab(self) -> QWidget:
|
||||
from ..core.cache import get_connection_log
|
||||
w = QWidget()
|
||||
layout = QVBoxLayout(w)
|
||||
|
||||
@ -652,6 +708,18 @@ class SettingsDialog(QDialog):
|
||||
QMessageBox.information(self, "Done", f"Evicted {count} files.")
|
||||
self._refresh_stats()
|
||||
|
||||
def _clear_tag_cache(self) -> None:
|
||||
reply = QMessageBox.question(
|
||||
self, "Confirm",
|
||||
"Wipe the tag category cache for every site? This also clears "
|
||||
"the per-site batch-API probe result, so the app will re-probe "
|
||||
"Gelbooru/Moebooru backends on next use.",
|
||||
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No,
|
||||
)
|
||||
if reply == QMessageBox.StandardButton.Yes:
|
||||
count = self._db.clear_tag_cache()
|
||||
QMessageBox.information(self, "Done", f"Deleted {count} tag-type rows.")
|
||||
|
||||
def _bl_export(self) -> None:
|
||||
from .dialogs import save_file
|
||||
path = save_file(self, "Export Blacklist", "blacklist.txt", "Text (*.txt)")
|
||||
@ -750,7 +818,8 @@ class SettingsDialog(QDialog):
|
||||
|
||||
# -- Save --
|
||||
|
||||
def _save_and_close(self) -> None:
|
||||
def _apply(self) -> None:
|
||||
"""Write all settings to DB and emit settings_changed."""
|
||||
self._db.set_setting("page_size", str(self._page_size.value()))
|
||||
self._db.set_setting("thumbnail_size", str(self._thumb_size.value()))
|
||||
self._db.set_setting("default_rating", self._default_rating.currentText())
|
||||
@ -759,8 +828,14 @@ class SettingsDialog(QDialog):
|
||||
self._db.set_setting("preload_thumbnails", "1" if self._preload.isChecked() else "0")
|
||||
self._db.set_setting("prefetch_mode", self._prefetch_combo.currentText())
|
||||
self._db.set_setting("infinite_scroll", "1" if self._infinite_scroll.isChecked() else "0")
|
||||
self._db.set_setting("unbookmark_on_save", "1" if self._unbookmark_on_save.isChecked() else "0")
|
||||
self._db.set_setting("search_history_enabled", "1" if self._search_history.isChecked() else "0")
|
||||
self._db.set_setting("flip_layout", "1" if self._flip_layout.isChecked() else "0")
|
||||
self._db.set_setting("slideshow_monitor", self._monitor_combo.currentText())
|
||||
_anchor_rmap = {"Center": "center", "Top-left": "tl", "Top-right": "tr", "Bottom-left": "bl", "Bottom-right": "br"}
|
||||
self._db.set_setting("popout_anchor", _anchor_rmap.get(self._popout_anchor.currentText(), "center"))
|
||||
self._db.set_setting("library_dir", self._library_dir.text().strip())
|
||||
self._db.set_setting("library_filename_template", self._library_filename_template.text().strip())
|
||||
self._db.set_setting("max_cache_mb", str(self._max_cache.value()))
|
||||
self._db.set_setting("max_thumb_cache_mb", str(self._max_thumb_cache.value()))
|
||||
self._db.set_setting("auto_evict", "1" if self._auto_evict.isChecked() else "0")
|
||||
@ -775,5 +850,10 @@ class SettingsDialog(QDialog):
|
||||
self._db.add_blacklisted_tag(tag)
|
||||
if self._file_dialog_combo is not None:
|
||||
self._db.set_setting("file_dialog_platform", self._file_dialog_combo.currentText())
|
||||
from .dialogs import reset_gtk_cache
|
||||
reset_gtk_cache()
|
||||
self.settings_changed.emit()
|
||||
|
||||
def _save_and_close(self) -> None:
|
||||
self._apply()
|
||||
self.accept()
|
||||
|
||||
@ -2,10 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import threading
|
||||
|
||||
from PySide6.QtCore import Qt, Signal, QMetaObject, Q_ARG, Qt as QtNS
|
||||
from PySide6.QtCore import Qt, Signal
|
||||
from PySide6.QtWidgets import (
|
||||
QDialog,
|
||||
QVBoxLayout,
|
||||
@ -22,16 +19,34 @@ from PySide6.QtWidgets import (
|
||||
|
||||
from ..core.db import Database, Site
|
||||
from ..core.api.detect import detect_site_type
|
||||
from ..core.concurrency import run_on_app_loop
|
||||
|
||||
|
||||
class SiteDialog(QDialog):
|
||||
"""Dialog to add or edit a booru site."""
|
||||
|
||||
# Internal signals used to marshal worker results back to the GUI thread.
|
||||
# Connected with QueuedConnection so emit() from the asyncio loop thread
|
||||
# is always delivered on the Qt main thread.
|
||||
_detect_done_sig = Signal(object, object) # (result_or_None, error_or_None)
|
||||
_test_done_sig = Signal(bool, str)
|
||||
|
||||
def __init__(self, parent: QWidget | None = None, site: Site | None = None) -> None:
|
||||
super().__init__(parent)
|
||||
self._editing = site is not None
|
||||
self.setWindowTitle("Edit Site" if self._editing else "Add Site")
|
||||
self.setMinimumWidth(400)
|
||||
# Set when the dialog is closed/destroyed so in-flight worker
|
||||
# callbacks can short-circuit instead of poking a dead QObject.
|
||||
self._closed = False
|
||||
# Tracked so we can cancel pending coroutines on close.
|
||||
self._inflight = [] # list[concurrent.futures.Future]
|
||||
self._detect_done_sig.connect(
|
||||
self._detect_finished, Qt.ConnectionType.QueuedConnection
|
||||
)
|
||||
self._test_done_sig.connect(
|
||||
self._test_finished, Qt.ConnectionType.QueuedConnection
|
||||
)
|
||||
|
||||
layout = QVBoxLayout(self)
|
||||
|
||||
@ -102,16 +117,22 @@ class SiteDialog(QDialog):
|
||||
api_key = self._key_input.text().strip() or None
|
||||
api_user = self._user_input.text().strip() or None
|
||||
|
||||
def _run():
|
||||
async def _do_detect():
|
||||
try:
|
||||
result = asyncio.run(detect_site_type(url, api_key=api_key, api_user=api_user))
|
||||
self._detect_finished(result, None)
|
||||
result = await detect_site_type(url, api_key=api_key, api_user=api_user)
|
||||
if not self._closed:
|
||||
self._detect_done_sig.emit(result, None)
|
||||
except Exception as e:
|
||||
self._detect_finished(None, e)
|
||||
if not self._closed:
|
||||
self._detect_done_sig.emit(None, e)
|
||||
|
||||
threading.Thread(target=_run, daemon=True).start()
|
||||
fut = run_on_app_loop(_do_detect())
|
||||
self._inflight.append(fut)
|
||||
fut.add_done_callback(lambda f: self._inflight.remove(f) if f in self._inflight else None)
|
||||
|
||||
def _detect_finished(self, result: str | None, error: Exception | None) -> None:
|
||||
def _detect_finished(self, result, error) -> None:
|
||||
if self._closed:
|
||||
return
|
||||
self._detect_btn.setEnabled(True)
|
||||
if error:
|
||||
self._status_label.setText(f"Error: {error}")
|
||||
@ -132,28 +153,45 @@ class SiteDialog(QDialog):
|
||||
self._status_label.setText("Testing connection...")
|
||||
self._test_btn.setEnabled(False)
|
||||
|
||||
def _run():
|
||||
import asyncio
|
||||
from ..core.api.detect import client_for_type
|
||||
from ..core.api.detect import client_for_type
|
||||
|
||||
async def _do_test():
|
||||
try:
|
||||
client = client_for_type(api_type, url, api_key=api_key, api_user=api_user)
|
||||
ok, detail = asyncio.run(client.test_connection())
|
||||
self._test_finished(ok, detail)
|
||||
ok, detail = await client.test_connection()
|
||||
if not self._closed:
|
||||
self._test_done_sig.emit(ok, detail)
|
||||
except Exception as e:
|
||||
self._test_finished(False, str(e))
|
||||
if not self._closed:
|
||||
self._test_done_sig.emit(False, str(e))
|
||||
|
||||
threading.Thread(target=_run, daemon=True).start()
|
||||
fut = run_on_app_loop(_do_test())
|
||||
self._inflight.append(fut)
|
||||
fut.add_done_callback(lambda f: self._inflight.remove(f) if f in self._inflight else None)
|
||||
|
||||
def _test_finished(self, ok: bool, detail: str) -> None:
|
||||
if self._closed:
|
||||
return
|
||||
self._test_btn.setEnabled(True)
|
||||
if ok:
|
||||
self._status_label.setText(f"Connected! {detail}")
|
||||
else:
|
||||
self._status_label.setText(f"Failed: {detail}")
|
||||
|
||||
def closeEvent(self, event) -> None:
|
||||
# Mark closed first so in-flight callbacks short-circuit, then
|
||||
# cancel anything still pending so we don't tie up the loop.
|
||||
self._closed = True
|
||||
for fut in list(self._inflight):
|
||||
try:
|
||||
fut.cancel()
|
||||
except Exception:
|
||||
pass
|
||||
super().closeEvent(event)
|
||||
|
||||
def _try_parse_url(self, text: str) -> None:
|
||||
"""Strip query params from pasted URLs like https://gelbooru.com/index.php?page=post&s=list&tags=all."""
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
from urllib.parse import urlparse
|
||||
text = text.strip()
|
||||
if "?" not in text:
|
||||
return
|
||||
|
||||
299
booru_viewer/gui/window_state.py
Normal file
299
booru_viewer/gui/window_state.py
Normal file
@ -0,0 +1,299 @@
|
||||
"""Main-window geometry and splitter persistence."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .main_window import BooruApp
|
||||
|
||||
log = logging.getLogger("booru")
|
||||
|
||||
|
||||
# -- Pure functions (tested in tests/gui/test_window_state.py) --
|
||||
|
||||
|
||||
def parse_geometry(s: str) -> tuple[int, int, int, int] | None:
|
||||
"""Parse ``"x,y,w,h"`` into a 4-tuple of ints, or *None* on bad input."""
|
||||
if not s:
|
||||
return None
|
||||
parts = s.split(",")
|
||||
if len(parts) != 4:
|
||||
return None
|
||||
try:
|
||||
vals = tuple(int(p) for p in parts)
|
||||
except ValueError:
|
||||
return None
|
||||
return vals # type: ignore[return-value]
|
||||
|
||||
|
||||
def format_geometry(x: int, y: int, w: int, h: int) -> str:
|
||||
"""Format geometry ints into the ``"x,y,w,h"`` DB string."""
|
||||
return f"{x},{y},{w},{h}"
|
||||
|
||||
|
||||
def parse_splitter_sizes(s: str, expected: int) -> list[int] | None:
|
||||
"""Parse ``"a,b,..."`` into a list of *expected* non-negative ints.
|
||||
|
||||
Returns *None* when the string is empty, has the wrong count, contains
|
||||
non-numeric values, any value is negative, or every value is zero (an
|
||||
all-zero splitter is a transient state that should not be persisted).
|
||||
"""
|
||||
if not s:
|
||||
return None
|
||||
parts = s.split(",")
|
||||
if len(parts) != expected:
|
||||
return None
|
||||
try:
|
||||
sizes = [int(p) for p in parts]
|
||||
except ValueError:
|
||||
return None
|
||||
if any(v < 0 for v in sizes):
|
||||
return None
|
||||
if all(v == 0 for v in sizes):
|
||||
return None
|
||||
return sizes
|
||||
|
||||
|
||||
def build_hyprctl_restore_cmds(
|
||||
addr: str,
|
||||
x: int,
|
||||
y: int,
|
||||
w: int,
|
||||
h: int,
|
||||
want_floating: bool,
|
||||
cur_floating: bool,
|
||||
) -> list[str]:
|
||||
"""Build the ``hyprctl --batch`` command list to restore window state.
|
||||
|
||||
When *want_floating* is True, ensures the window is floating then
|
||||
resizes/moves. When False, primes Hyprland's per-window floating cache
|
||||
by briefly toggling to floating (wrapped in ``no_anim``), then ends on
|
||||
tiled so a later mid-session float-toggle picks up the saved dimensions.
|
||||
"""
|
||||
cmds: list[str] = []
|
||||
if want_floating:
|
||||
if not cur_floating:
|
||||
cmds.append(f"dispatch togglefloating address:{addr}")
|
||||
cmds.append(f"dispatch resizewindowpixel exact {w} {h},address:{addr}")
|
||||
cmds.append(f"dispatch movewindowpixel exact {x} {y},address:{addr}")
|
||||
else:
|
||||
cmds.append(f"dispatch setprop address:{addr} no_anim 1")
|
||||
if not cur_floating:
|
||||
cmds.append(f"dispatch togglefloating address:{addr}")
|
||||
cmds.append(f"dispatch resizewindowpixel exact {w} {h},address:{addr}")
|
||||
cmds.append(f"dispatch movewindowpixel exact {x} {y},address:{addr}")
|
||||
cmds.append(f"dispatch togglefloating address:{addr}")
|
||||
cmds.append(f"dispatch setprop address:{addr} no_anim 0")
|
||||
return cmds
|
||||
|
||||
|
||||
# -- Controller --
|
||||
|
||||
|
||||
class WindowStateController:
|
||||
"""Owns main-window geometry persistence and Hyprland IPC."""
|
||||
|
||||
def __init__(self, app: BooruApp) -> None:
|
||||
self._app = app
|
||||
|
||||
# -- Splitter persistence --
|
||||
|
||||
def save_main_splitter_sizes(self) -> None:
|
||||
"""Persist the main grid/preview splitter sizes (debounced).
|
||||
|
||||
Refuses to save when either side is collapsed (size 0). The user can
|
||||
end up with a collapsed right panel transiently -- e.g. while the
|
||||
popout is open and the right panel is empty -- and persisting that
|
||||
state traps them next launch with no visible preview area until they
|
||||
manually drag the splitter back.
|
||||
"""
|
||||
sizes = self._app._splitter.sizes()
|
||||
if len(sizes) >= 2 and all(s > 0 for s in sizes):
|
||||
self._app._db.set_setting(
|
||||
"main_splitter_sizes", ",".join(str(s) for s in sizes)
|
||||
)
|
||||
|
||||
def save_right_splitter_sizes(self) -> None:
|
||||
"""Persist the right splitter sizes (preview / dl_progress / info).
|
||||
|
||||
Skipped while the popout is open -- the popout temporarily collapses
|
||||
the preview pane and gives the info panel the full right column,
|
||||
and we don't want that transient layout persisted as the user's
|
||||
preferred state.
|
||||
"""
|
||||
if self._app._popout_ctrl.is_active:
|
||||
return
|
||||
sizes = self._app._right_splitter.sizes()
|
||||
if len(sizes) == 3 and sum(sizes) > 0:
|
||||
self._app._db.set_setting(
|
||||
"right_splitter_sizes", ",".join(str(s) for s in sizes)
|
||||
)
|
||||
|
||||
# -- Hyprland IPC --
|
||||
|
||||
def hyprctl_main_window(self) -> dict | None:
|
||||
"""Look up this main window in hyprctl clients. None off Hyprland.
|
||||
|
||||
Matches by Wayland app_id (Hyprland reports it as ``class``), which is
|
||||
set in run() via setDesktopFileName. Title would also work but it
|
||||
changes whenever the search bar updates the window title -- class is
|
||||
constant for the lifetime of the window.
|
||||
"""
|
||||
if not os.environ.get("HYPRLAND_INSTANCE_SIGNATURE"):
|
||||
return None
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["hyprctl", "clients", "-j"],
|
||||
capture_output=True, text=True, timeout=1,
|
||||
)
|
||||
for c in json.loads(result.stdout):
|
||||
cls = c.get("class") or c.get("initialClass")
|
||||
if cls == "booru-viewer":
|
||||
# Skip the popout -- it shares our class but has a
|
||||
# distinct title we set explicitly.
|
||||
if (c.get("title") or "").endswith("Popout"):
|
||||
continue
|
||||
return c
|
||||
except Exception:
|
||||
# hyprctl unavailable (non-Hyprland session), timed out,
|
||||
# or produced invalid JSON. Caller treats None as
|
||||
# "no Hyprland-visible main window" and falls back to
|
||||
# Qt's own geometry tracking.
|
||||
pass
|
||||
return None
|
||||
|
||||
# -- Window state save / restore --
|
||||
|
||||
def save_main_window_state(self) -> None:
|
||||
"""Persist the main window's last mode and (separately) the last
|
||||
known floating geometry.
|
||||
|
||||
Two settings keys are used:
|
||||
- main_window_was_floating ("1" / "0"): the *last* mode the window
|
||||
was in (floating or tiled). Updated on every save.
|
||||
- main_window_floating_geometry ("x,y,w,h"): the position+size the
|
||||
window had the *last time it was actually floating*. Only updated
|
||||
when the current state is floating, so a tile->close->reopen->float
|
||||
sequence still has the user's old floating dimensions to use.
|
||||
|
||||
This split is important because Hyprland's resizeEvent for a tiled
|
||||
window reports the tile slot size -- saving that into the floating
|
||||
slot would clobber the user's chosen floating dimensions every time
|
||||
they tiled the window.
|
||||
"""
|
||||
try:
|
||||
win = self.hyprctl_main_window()
|
||||
if win is None:
|
||||
# Non-Hyprland fallback: just track Qt's frameGeometry as
|
||||
# floating. There's no real tiled concept off-Hyprland.
|
||||
g = self._app.frameGeometry()
|
||||
self._app._db.set_setting(
|
||||
"main_window_floating_geometry",
|
||||
format_geometry(g.x(), g.y(), g.width(), g.height()),
|
||||
)
|
||||
self._app._db.set_setting("main_window_was_floating", "1")
|
||||
return
|
||||
floating = bool(win.get("floating"))
|
||||
self._app._db.set_setting(
|
||||
"main_window_was_floating", "1" if floating else "0"
|
||||
)
|
||||
if floating and win.get("at") and win.get("size"):
|
||||
x, y = win["at"]
|
||||
w, h = win["size"]
|
||||
self._app._db.set_setting(
|
||||
"main_window_floating_geometry", format_geometry(x, y, w, h)
|
||||
)
|
||||
# When tiled, intentionally do NOT touch floating_geometry --
|
||||
# preserve the last good floating dimensions.
|
||||
except Exception:
|
||||
# Geometry persistence is best-effort; swallowing here
|
||||
# beats crashing closeEvent over a hyprctl timeout or a
|
||||
# setting-write race. Next save attempt will retry.
|
||||
pass
|
||||
|
||||
def restore_main_window_state(self) -> None:
|
||||
"""One-shot restore of saved floating geometry and last mode.
|
||||
|
||||
Called from __init__ via QTimer.singleShot(0, ...) so it fires on the
|
||||
next event-loop iteration -- by which time the window has been shown
|
||||
and (on Hyprland) registered with the compositor.
|
||||
|
||||
Entirely skipped when BOORU_VIEWER_NO_HYPR_RULES is set -- that flag
|
||||
means the user wants their own windowrules to handle the main
|
||||
window. Even seeding Qt's geometry could fight a ``windowrule = size``,
|
||||
so we leave the initial Qt geometry alone too.
|
||||
"""
|
||||
from ..core.config import hypr_rules_enabled
|
||||
if not hypr_rules_enabled():
|
||||
return
|
||||
# Migration: clear obsolete keys from earlier schemas so they can't
|
||||
# interfere. main_window_maximized came from a buggy version that
|
||||
# used Qt's isMaximized() which lies for Hyprland tiled windows.
|
||||
# main_window_geometry was the combined-format key that's now split.
|
||||
for stale in ("main_window_maximized", "main_window_geometry"):
|
||||
if self._app._db.get_setting(stale):
|
||||
self._app._db.set_setting(stale, "")
|
||||
|
||||
floating_geo = self._app._db.get_setting("main_window_floating_geometry")
|
||||
was_floating = self._app._db.get_setting_bool("main_window_was_floating")
|
||||
if not floating_geo:
|
||||
return
|
||||
geo = parse_geometry(floating_geo)
|
||||
if geo is None:
|
||||
return
|
||||
x, y, w, h = geo
|
||||
# Seed Qt with the floating geometry -- even if we're going to leave
|
||||
# the window tiled now, this becomes the xdg-toplevel preferred size,
|
||||
# which Hyprland uses when the user later toggles to floating. So
|
||||
# mid-session float-toggle picks up the saved dimensions even when
|
||||
# the window opened tiled.
|
||||
self._app.setGeometry(x, y, w, h)
|
||||
if not os.environ.get("HYPRLAND_INSTANCE_SIGNATURE"):
|
||||
return
|
||||
# Slight delay so the window is registered before we try to find
|
||||
# its address. The popout uses the same pattern.
|
||||
from PySide6.QtCore import QTimer
|
||||
QTimer.singleShot(
|
||||
50, lambda: self.hyprctl_apply_main_state(x, y, w, h, was_floating)
|
||||
)
|
||||
|
||||
def hyprctl_apply_main_state(
|
||||
self, x: int, y: int, w: int, h: int, floating: bool
|
||||
) -> None:
|
||||
"""Apply saved floating mode + geometry to the main window via hyprctl.
|
||||
|
||||
If floating==True, ensures the window is floating and resizes/moves it
|
||||
to the saved dimensions.
|
||||
|
||||
If floating==False, the window is left tiled but we still "prime"
|
||||
Hyprland's per-window floating cache by briefly toggling to floating,
|
||||
applying the saved geometry, and toggling back. This is wrapped in
|
||||
a transient ``no_anim`` so the toggles are instant.
|
||||
|
||||
Skipped entirely when BOORU_VIEWER_NO_HYPR_RULES is set.
|
||||
"""
|
||||
from ..core.config import hypr_rules_enabled
|
||||
if not hypr_rules_enabled():
|
||||
return
|
||||
win = self.hyprctl_main_window()
|
||||
if not win:
|
||||
return
|
||||
addr = win.get("address")
|
||||
if not addr:
|
||||
return
|
||||
cur_floating = bool(win.get("floating"))
|
||||
cmds = build_hyprctl_restore_cmds(addr, x, y, w, h, floating, cur_floating)
|
||||
if not cmds:
|
||||
return
|
||||
try:
|
||||
subprocess.Popen(
|
||||
["hyprctl", "--batch", " ; ".join(cmds)],
|
||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
@ -25,10 +25,17 @@ def main() -> None:
|
||||
if platform == "gtk":
|
||||
# Use xdg-desktop-portal which routes to GTK portal (Thunar)
|
||||
os.environ.setdefault("QT_QPA_PLATFORMTHEME", "xdgdesktopportal")
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Surface DB-init failures to stderr — silently swallowing meant
|
||||
# users debugging "why is my file picker the wrong one" had no
|
||||
# signal at all when the DB was missing or corrupt.
|
||||
print(
|
||||
f"booru-viewer: file_dialog_platform DB probe failed: "
|
||||
f"{type(e).__name__}: {e}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
from booru_viewer.gui.app import run
|
||||
from booru_viewer.gui.app_runtime import run
|
||||
run()
|
||||
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
[Setup]
|
||||
AppName=booru-viewer
|
||||
AppVersion=0.2.1
|
||||
AppVersion=0.2.7
|
||||
AppPublisher=pax
|
||||
AppPublisherURL=https://git.pax.moe/pax/booru-viewer
|
||||
DefaultDirName={localappdata}\booru-viewer
|
||||
|
||||
@ -4,14 +4,14 @@ build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "booru-viewer"
|
||||
version = "0.2.1"
|
||||
version = "0.2.7"
|
||||
description = "Local booru image browser with Qt6 GUI"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"httpx[http2]>=0.27",
|
||||
"Pillow>=10.0",
|
||||
"PySide6>=6.6",
|
||||
"python-mpv>=1.0",
|
||||
"httpx>=0.27,<1.0",
|
||||
"Pillow>=10.0,<12.0",
|
||||
"PySide6>=6.6,<7.0",
|
||||
"python-mpv>=1.0,<2.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 966 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 972 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 962 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 980 KiB |
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
71
tests/conftest.py
Normal file
71
tests/conftest.py
Normal file
@ -0,0 +1,71 @@
|
||||
"""Shared fixtures for the booru-viewer test suite.
|
||||
|
||||
All fixtures here are pure-Python — no Qt, no mpv, no network. Filesystem
|
||||
writes go through `tmp_path` (or fixtures that wrap it). Module-level globals
|
||||
that the production code mutates (the concurrency loop, the httpx singletons)
|
||||
get reset around each test that touches them.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tmp_db(tmp_path):
|
||||
"""Fresh `Database` instance writing to a temp file. Auto-closes."""
|
||||
from booru_viewer.core.db import Database
|
||||
db = Database(tmp_path / "test.db")
|
||||
yield db
|
||||
db.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tmp_library(tmp_path):
|
||||
"""Point `saved_dir()` at `tmp_path/saved` for the duration of the test.
|
||||
|
||||
Uses `core.config.set_library_dir` (the official override hook) so the
|
||||
redirect goes through the same code path the GUI uses for the
|
||||
user-configurable library location. Tear-down restores the previous
|
||||
value so tests can run in any order without bleed.
|
||||
"""
|
||||
from booru_viewer.core import config
|
||||
saved = tmp_path / "saved"
|
||||
saved.mkdir()
|
||||
original = config._library_dir_override
|
||||
config.set_library_dir(saved)
|
||||
yield saved
|
||||
config.set_library_dir(original)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_app_loop():
|
||||
"""Reset `concurrency._app_loop` between tests.
|
||||
|
||||
The module global is set once at app startup in production; tests need
|
||||
to start from a clean slate to assert the unset-state behavior.
|
||||
"""
|
||||
from booru_viewer.core import concurrency
|
||||
original = concurrency._app_loop
|
||||
concurrency._app_loop = None
|
||||
yield
|
||||
concurrency._app_loop = original
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_shared_clients():
|
||||
"""Reset both shared httpx singletons (cache module + BooruClient class).
|
||||
|
||||
Both are class/module-level globals; tests that exercise the lazy-init
|
||||
+ lock pattern need them cleared so the test sees a fresh first-call
|
||||
race instead of a leftover instance from a previous test.
|
||||
"""
|
||||
from booru_viewer.core.api.base import BooruClient
|
||||
from booru_viewer.core import cache
|
||||
original_booru = BooruClient._shared_client
|
||||
original_cache = cache._shared_client
|
||||
BooruClient._shared_client = None
|
||||
cache._shared_client = None
|
||||
yield
|
||||
BooruClient._shared_client = original_booru
|
||||
cache._shared_client = original_cache
|
||||
0
tests/core/__init__.py
Normal file
0
tests/core/__init__.py
Normal file
0
tests/core/api/__init__.py
Normal file
0
tests/core/api/__init__.py
Normal file
77
tests/core/api/test_base.py
Normal file
77
tests/core/api/test_base.py
Normal file
@ -0,0 +1,77 @@
|
||||
"""Tests for `booru_viewer.core.api.base` — the lazy `_shared_client`
|
||||
singleton on `BooruClient`.
|
||||
|
||||
Locks in the lock-and-recheck pattern at `base.py:90-108`. Without it,
|
||||
two threads racing on first `.client` access would both see
|
||||
`_shared_client is None`, both build an `httpx.AsyncClient`, and one of
|
||||
them would leak (overwritten without aclose).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import threading
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core.api.base import BooruClient
|
||||
|
||||
|
||||
class _StubClient(BooruClient):
|
||||
"""Concrete subclass so we can instantiate `BooruClient` for the test
|
||||
— the base class has abstract `search` / `get_post` methods."""
|
||||
api_type = "stub"
|
||||
|
||||
async def search(self, tags="", page=1, limit=40):
|
||||
return []
|
||||
|
||||
async def get_post(self, post_id):
|
||||
return None
|
||||
|
||||
|
||||
def test_shared_client_singleton_under_concurrency(reset_shared_clients):
|
||||
"""N threads racing on first `.client` access must result in exactly
|
||||
one `httpx.AsyncClient` constructor call. The threading.Lock guards
|
||||
the check-and-set so the second-and-later callers re-read the now-set
|
||||
`_shared_client` after acquiring the lock instead of building their
|
||||
own."""
|
||||
constructor_calls = 0
|
||||
constructor_lock = threading.Lock()
|
||||
|
||||
def _fake_async_client(*args, **kwargs):
|
||||
nonlocal constructor_calls
|
||||
with constructor_lock:
|
||||
constructor_calls += 1
|
||||
m = MagicMock()
|
||||
m.is_closed = False
|
||||
return m
|
||||
|
||||
# Barrier so all threads hit the property at the same moment
|
||||
n_threads = 10
|
||||
barrier = threading.Barrier(n_threads)
|
||||
results = []
|
||||
results_lock = threading.Lock()
|
||||
|
||||
client_instance = _StubClient("http://example.test")
|
||||
|
||||
def _worker():
|
||||
barrier.wait()
|
||||
c = client_instance.client
|
||||
with results_lock:
|
||||
results.append(c)
|
||||
|
||||
with patch("booru_viewer.core.api.base.httpx.AsyncClient",
|
||||
side_effect=_fake_async_client):
|
||||
threads = [threading.Thread(target=_worker) for _ in range(n_threads)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join(timeout=5)
|
||||
|
||||
assert constructor_calls == 1, (
|
||||
f"Expected exactly one httpx.AsyncClient construction, "
|
||||
f"got {constructor_calls}"
|
||||
)
|
||||
# All threads got back the same shared instance
|
||||
assert len(results) == n_threads
|
||||
assert all(r is results[0] for r in results)
|
||||
542
tests/core/api/test_category_fetcher.py
Normal file
542
tests/core/api/test_category_fetcher.py
Normal file
@ -0,0 +1,542 @@
|
||||
"""Tests for CategoryFetcher: HTML parser, tag API parser, cache compose,
|
||||
probe persistence, dispatch logic, and canonical ordering.
|
||||
|
||||
All pure Python — no Qt, no network. Uses tmp_db fixture for cache tests
|
||||
and synthetic HTML/JSON/XML for parser tests.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core.api.category_fetcher import (
|
||||
CategoryFetcher,
|
||||
_canonical_order,
|
||||
_parse_post_html,
|
||||
_parse_tag_response,
|
||||
_LABEL_MAP,
|
||||
_GELBOORU_TYPE_MAP,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Synthetic data helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass
|
||||
class FakePost:
|
||||
id: int = 1
|
||||
tags: str = ""
|
||||
tag_categories: dict = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def tag_list(self) -> list[str]:
|
||||
return self.tags.split() if self.tags else []
|
||||
|
||||
|
||||
class FakeClient:
|
||||
"""Minimal mock of BooruClient for CategoryFetcher construction."""
|
||||
api_key = None
|
||||
api_user = None
|
||||
|
||||
def __init__(self, post_view_url=None, tag_api_url=None, api_key=None, api_user=None):
|
||||
self._pv_url = post_view_url
|
||||
self._ta_url = tag_api_url
|
||||
self.api_key = api_key
|
||||
self.api_user = api_user
|
||||
|
||||
def _post_view_url(self, post):
|
||||
return self._pv_url
|
||||
|
||||
def _tag_api_url(self):
|
||||
return self._ta_url
|
||||
|
||||
async def _request(self, method, url, params=None):
|
||||
raise NotImplementedError("mock _request not configured")
|
||||
|
||||
|
||||
class FakeResponse:
|
||||
"""Minimal httpx.Response stand-in for parser tests."""
|
||||
def __init__(self, text: str, status_code: int = 200):
|
||||
self.text = text
|
||||
self.status_code = status_code
|
||||
|
||||
def json(self):
|
||||
return json.loads(self.text)
|
||||
|
||||
def raise_for_status(self):
|
||||
if self.status_code >= 400:
|
||||
raise Exception(f"HTTP {self.status_code}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML parser tests (_parse_post_html)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestParsePostHtml:
|
||||
"""Test the two-pass regex HTML parser against synthetic markup."""
|
||||
|
||||
def test_rule34_style_two_links(self):
|
||||
"""Standard Gelbooru-fork layout: ? wiki link + tag search link."""
|
||||
html = '''
|
||||
<li class="tag-type-character">
|
||||
<a href="index.php?page=wiki&s=list&search=hatsune_miku">?</a>
|
||||
<a href="index.php?page=post&s=list&tags=hatsune_miku">hatsune miku</a>
|
||||
<span class="tag-count">12345</span>
|
||||
</li>
|
||||
<li class="tag-type-artist">
|
||||
<a href="index.php?page=wiki&s=list&search=someartist">?</a>
|
||||
<a href="index.php?page=post&s=list&tags=someartist">someartist</a>
|
||||
<span class="tag-count">100</span>
|
||||
</li>
|
||||
<li class="tag-type-general">
|
||||
<a href="index.php?page=wiki&s=list&search=1girl">?</a>
|
||||
<a href="index.php?page=post&s=list&tags=1girl">1girl</a>
|
||||
<span class="tag-count">9999999</span>
|
||||
</li>
|
||||
'''
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert "Character" in cats
|
||||
assert "Artist" in cats
|
||||
assert "General" in cats
|
||||
assert cats["Character"] == ["hatsune_miku"]
|
||||
assert cats["Artist"] == ["someartist"]
|
||||
assert cats["General"] == ["1girl"]
|
||||
assert labels["hatsune_miku"] == "Character"
|
||||
assert labels["someartist"] == "Artist"
|
||||
|
||||
def test_moebooru_style(self):
|
||||
"""yande.re / Konachan: /post?tags=NAME format."""
|
||||
html = '''
|
||||
<li class="tag-type-artist">
|
||||
<a href="/artist/show?name=anmi">?</a>
|
||||
<a href="/post?tags=anmi">anmi</a>
|
||||
</li>
|
||||
<li class="tag-type-copyright">
|
||||
<a href="/wiki/show?title=vocaloid">?</a>
|
||||
<a href="/post?tags=vocaloid">vocaloid</a>
|
||||
</li>
|
||||
'''
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert cats["Artist"] == ["anmi"]
|
||||
assert cats["Copyright"] == ["vocaloid"]
|
||||
|
||||
def test_combined_class_konachan(self):
|
||||
"""Konachan uses class="tag-link tag-type-character"."""
|
||||
html = '''
|
||||
<span class="tag-link tag-type-character">
|
||||
<a href="/wiki/show?title=miku">?</a>
|
||||
<a href="/post?tags=hatsune_miku">hatsune miku</a>
|
||||
</span>
|
||||
'''
|
||||
cats, _ = _parse_post_html(html)
|
||||
assert cats["Character"] == ["hatsune_miku"]
|
||||
|
||||
def test_gelbooru_proper_returns_empty(self):
|
||||
"""Gelbooru proper only has ? links with no tags= param."""
|
||||
html = '''
|
||||
<li class="tag-type-artist">
|
||||
<a href="index.php?page=wiki&s=list&search=ooiaooi">?</a>
|
||||
</li>
|
||||
<li class="tag-type-character">
|
||||
<a href="index.php?page=wiki&s=list&search=hatsune_miku">?</a>
|
||||
</li>
|
||||
'''
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert cats == {}
|
||||
assert labels == {}
|
||||
|
||||
def test_metadata_maps_to_meta(self):
|
||||
"""class="tag-type-metadata" should map to label "Meta"."""
|
||||
html = '''
|
||||
<li class="tag-type-metadata">
|
||||
<a href="?">?</a>
|
||||
<a href="index.php?tags=highres">highres</a>
|
||||
</li>
|
||||
'''
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert "Meta" in cats
|
||||
assert cats["Meta"] == ["highres"]
|
||||
|
||||
def test_url_encoded_tag_names(self):
|
||||
"""Tags with special chars get URL-encoded in the href."""
|
||||
html = '''
|
||||
<li class="tag-type-character">
|
||||
<a href="?">?</a>
|
||||
<a href="index.php?tags=miku_%28shinkalion%29">miku (shinkalion)</a>
|
||||
</li>
|
||||
'''
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert cats["Character"] == ["miku_(shinkalion)"]
|
||||
|
||||
def test_empty_html(self):
|
||||
cats, labels = _parse_post_html("")
|
||||
assert cats == {}
|
||||
assert labels == {}
|
||||
|
||||
def test_no_tag_type_elements(self):
|
||||
html = '<div class="content"><p>Hello world</p></div>'
|
||||
cats, labels = _parse_post_html(html)
|
||||
assert cats == {}
|
||||
|
||||
def test_unknown_type_class_ignored(self):
|
||||
"""Tag types not in _LABEL_MAP are silently skipped."""
|
||||
html = '''
|
||||
<li class="tag-type-faults">
|
||||
<a href="?">?</a>
|
||||
<a href="index.php?tags=broken">broken</a>
|
||||
</li>
|
||||
'''
|
||||
cats, _ = _parse_post_html(html)
|
||||
assert cats == {}
|
||||
|
||||
def test_multiple_tags_same_category(self):
|
||||
html = '''
|
||||
<li class="tag-type-character">
|
||||
<a href="?">?</a>
|
||||
<a href="index.php?tags=miku">miku</a>
|
||||
</li>
|
||||
<li class="tag-type-character">
|
||||
<a href="?">?</a>
|
||||
<a href="index.php?tags=rin">rin</a>
|
||||
</li>
|
||||
'''
|
||||
cats, _ = _parse_post_html(html)
|
||||
assert cats["Character"] == ["miku", "rin"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tag API response parser tests (_parse_tag_response)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestParseTagResponse:
|
||||
|
||||
def test_json_response(self):
|
||||
resp = FakeResponse(json.dumps({
|
||||
"@attributes": {"limit": 100, "offset": 0, "count": 2},
|
||||
"tag": [
|
||||
{"id": 1, "name": "hatsune_miku", "count": 12345, "type": 4, "ambiguous": 0},
|
||||
{"id": 2, "name": "1girl", "count": 9999, "type": 0, "ambiguous": 0},
|
||||
]
|
||||
}))
|
||||
result = _parse_tag_response(resp)
|
||||
assert ("hatsune_miku", 4) in result
|
||||
assert ("1girl", 0) in result
|
||||
|
||||
def test_xml_response(self):
|
||||
resp = FakeResponse(
|
||||
'<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<tags type="array">'
|
||||
'<tag type="4" count="12345" name="hatsune_miku" ambiguous="false" id="1"/>'
|
||||
'<tag type="0" count="9999" name="1girl" ambiguous="false" id="2"/>'
|
||||
'</tags>'
|
||||
)
|
||||
result = _parse_tag_response(resp)
|
||||
assert ("hatsune_miku", 4) in result
|
||||
assert ("1girl", 0) in result
|
||||
|
||||
def test_empty_response(self):
|
||||
resp = FakeResponse("")
|
||||
assert _parse_tag_response(resp) == []
|
||||
|
||||
def test_json_flat_list(self):
|
||||
"""Some endpoints return a flat list instead of wrapping in {"tag": [...]}."""
|
||||
resp = FakeResponse(json.dumps([
|
||||
{"name": "solo", "type": 0, "count": 5000},
|
||||
]))
|
||||
result = _parse_tag_response(resp)
|
||||
assert ("solo", 0) in result
|
||||
|
||||
def test_malformed_xml(self):
|
||||
resp = FakeResponse("<broken><xml")
|
||||
result = _parse_tag_response(resp)
|
||||
assert result == []
|
||||
|
||||
def test_malformed_json(self):
|
||||
resp = FakeResponse("{not valid json!!!")
|
||||
result = _parse_tag_response(resp)
|
||||
assert result == []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Canonical ordering
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestCanonicalOrder:
|
||||
|
||||
def test_standard_order(self):
|
||||
cats = {
|
||||
"General": ["1girl"],
|
||||
"Artist": ["anmi"],
|
||||
"Meta": ["highres"],
|
||||
"Character": ["miku"],
|
||||
"Copyright": ["vocaloid"],
|
||||
}
|
||||
ordered = _canonical_order(cats)
|
||||
keys = list(ordered.keys())
|
||||
assert keys == ["Artist", "Character", "Copyright", "General", "Meta"]
|
||||
|
||||
def test_species_position(self):
|
||||
cats = {
|
||||
"General": ["1girl"],
|
||||
"Species": ["cat_girl"],
|
||||
"Artist": ["anmi"],
|
||||
}
|
||||
ordered = _canonical_order(cats)
|
||||
keys = list(ordered.keys())
|
||||
assert keys == ["Artist", "Species", "General"]
|
||||
|
||||
def test_unknown_category_appended(self):
|
||||
cats = {
|
||||
"Artist": ["anmi"],
|
||||
"Circle": ["some_circle"],
|
||||
}
|
||||
ordered = _canonical_order(cats)
|
||||
keys = list(ordered.keys())
|
||||
assert "Artist" in keys
|
||||
assert "Circle" in keys
|
||||
assert keys.index("Artist") < keys.index("Circle")
|
||||
|
||||
def test_empty_dict(self):
|
||||
assert _canonical_order({}) == {}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cache compose (try_compose_from_cache)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestCacheCompose:
|
||||
|
||||
def test_full_coverage_returns_true(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
tmp_db.set_tag_labels(1, {
|
||||
"1girl": "General",
|
||||
"hatsune_miku": "Character",
|
||||
"vocaloid": "Copyright",
|
||||
})
|
||||
post = FakePost(tags="1girl hatsune_miku vocaloid")
|
||||
result = fetcher.try_compose_from_cache(post)
|
||||
assert result is True
|
||||
assert "Character" in post.tag_categories
|
||||
assert "Copyright" in post.tag_categories
|
||||
assert "General" in post.tag_categories
|
||||
|
||||
def test_partial_coverage_returns_false_but_populates(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
tmp_db.set_tag_labels(1, {"hatsune_miku": "Character"})
|
||||
post = FakePost(tags="1girl hatsune_miku vocaloid")
|
||||
result = fetcher.try_compose_from_cache(post)
|
||||
assert result is False
|
||||
# Still populated with what IS cached
|
||||
assert "Character" in post.tag_categories
|
||||
assert post.tag_categories["Character"] == ["hatsune_miku"]
|
||||
|
||||
def test_zero_coverage_returns_false(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
post = FakePost(tags="1girl hatsune_miku vocaloid")
|
||||
result = fetcher.try_compose_from_cache(post)
|
||||
assert result is False
|
||||
assert post.tag_categories == {}
|
||||
|
||||
def test_empty_tags_returns_true(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
post = FakePost(tags="")
|
||||
assert fetcher.try_compose_from_cache(post) is True
|
||||
|
||||
def test_canonical_order_applied(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
tmp_db.set_tag_labels(1, {
|
||||
"1girl": "General",
|
||||
"anmi": "Artist",
|
||||
"miku": "Character",
|
||||
})
|
||||
post = FakePost(tags="1girl anmi miku")
|
||||
fetcher.try_compose_from_cache(post)
|
||||
keys = list(post.tag_categories.keys())
|
||||
assert keys == ["Artist", "Character", "General"]
|
||||
|
||||
def test_per_site_isolation(self, tmp_db):
|
||||
client = FakeClient()
|
||||
fetcher_1 = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
fetcher_2 = CategoryFetcher(client, tmp_db, site_id=2)
|
||||
tmp_db.set_tag_labels(1, {"miku": "Character"})
|
||||
# Site 2 has nothing cached
|
||||
post = FakePost(tags="miku")
|
||||
assert fetcher_1.try_compose_from_cache(post) is True
|
||||
post2 = FakePost(tags="miku")
|
||||
assert fetcher_2.try_compose_from_cache(post2) is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Probe persistence
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestProbePersistence:
|
||||
|
||||
def test_initial_state_none(self, tmp_db):
|
||||
fetcher = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert fetcher._batch_api_works is None
|
||||
|
||||
def test_save_true_persists(self, tmp_db):
|
||||
fetcher = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
fetcher._save_probe_result(True)
|
||||
fetcher2 = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert fetcher2._batch_api_works is True
|
||||
|
||||
def test_save_false_persists(self, tmp_db):
|
||||
fetcher = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
fetcher._save_probe_result(False)
|
||||
fetcher2 = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert fetcher2._batch_api_works is False
|
||||
|
||||
def test_per_site_isolation(self, tmp_db):
|
||||
f1 = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
f1._save_probe_result(True)
|
||||
f2 = CategoryFetcher(FakeClient(), tmp_db, site_id=2)
|
||||
f2._save_probe_result(False)
|
||||
assert CategoryFetcher(FakeClient(), tmp_db, site_id=1)._batch_api_works is True
|
||||
assert CategoryFetcher(FakeClient(), tmp_db, site_id=2)._batch_api_works is False
|
||||
|
||||
def test_clear_tag_cache_wipes_probe(self, tmp_db):
|
||||
fetcher = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
fetcher._save_probe_result(True)
|
||||
tmp_db.clear_tag_cache(site_id=1)
|
||||
fetcher2 = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert fetcher2._batch_api_works is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Batch API availability check
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestBatchApiAvailable:
|
||||
|
||||
def test_available_with_url_and_auth(self, tmp_db):
|
||||
client = FakeClient(tag_api_url="http://example.com", api_key="k", api_user="u")
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
assert fetcher._batch_api_available() is True
|
||||
|
||||
def test_not_available_without_url(self, tmp_db):
|
||||
client = FakeClient(api_key="k", api_user="u")
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
assert fetcher._batch_api_available() is False
|
||||
|
||||
def test_not_available_without_auth(self, tmp_db):
|
||||
client = FakeClient(tag_api_url="http://example.com")
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
assert fetcher._batch_api_available() is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Label map and type map coverage
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestMaps:
|
||||
|
||||
def test_label_map_covers_common_types(self):
|
||||
for name in ["general", "artist", "character", "copyright", "metadata", "meta", "species"]:
|
||||
assert name in _LABEL_MAP
|
||||
|
||||
def test_gelbooru_type_map_covers_standard_codes(self):
|
||||
assert _GELBOORU_TYPE_MAP[0] == "General"
|
||||
assert _GELBOORU_TYPE_MAP[1] == "Artist"
|
||||
assert _GELBOORU_TYPE_MAP[3] == "Copyright"
|
||||
assert _GELBOORU_TYPE_MAP[4] == "Character"
|
||||
assert _GELBOORU_TYPE_MAP[5] == "Meta"
|
||||
assert 2 not in _GELBOORU_TYPE_MAP # Deprecated intentionally omitted
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _do_ensure dispatch — regression cover for transient-error poisoning
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestDoEnsureProbeRouting:
|
||||
"""When _batch_api_works is None, _do_ensure must route through
|
||||
_probe_batch_api so transient errors stay transient. The prior
|
||||
implementation called fetch_via_tag_api directly and inferred
|
||||
False from empty tag_categories — but fetch_via_tag_api swallows
|
||||
per-chunk exceptions, so a network drop silently poisoned the
|
||||
probe flag to False for the whole site."""
|
||||
|
||||
def test_transient_error_leaves_flag_none(self, tmp_db):
|
||||
"""All chunks fail → _batch_api_works must stay None,
|
||||
not flip to False."""
|
||||
client = FakeClient(
|
||||
tag_api_url="http://example.com/tags",
|
||||
api_key="k",
|
||||
api_user="u",
|
||||
)
|
||||
|
||||
async def raising_request(method, url, params=None):
|
||||
raise RuntimeError("network down")
|
||||
client._request = raising_request
|
||||
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
assert fetcher._batch_api_works is None
|
||||
post = FakePost(tags="miku 1girl")
|
||||
|
||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
||||
|
||||
assert fetcher._batch_api_works is None, (
|
||||
"Transient error must not poison the probe flag"
|
||||
)
|
||||
# Persistence side: nothing was saved
|
||||
reloaded = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert reloaded._batch_api_works is None
|
||||
|
||||
def test_clean_200_zero_matches_flips_to_false(self, tmp_db):
|
||||
"""Clean HTTP 200 + no names matching the request → flips
|
||||
the flag to False (structurally broken endpoint)."""
|
||||
client = FakeClient(
|
||||
tag_api_url="http://example.com/tags",
|
||||
api_key="k",
|
||||
api_user="u",
|
||||
)
|
||||
|
||||
async def empty_ok_request(method, url, params=None):
|
||||
# 200 with a valid but empty tag list
|
||||
return FakeResponse(
|
||||
json.dumps({"@attributes": {"count": 0}, "tag": []}),
|
||||
status_code=200,
|
||||
)
|
||||
client._request = empty_ok_request
|
||||
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
post = FakePost(tags="definitely_not_a_real_tag")
|
||||
|
||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
||||
|
||||
assert fetcher._batch_api_works is False, (
|
||||
"Clean 200 with zero matches must flip flag to False"
|
||||
)
|
||||
reloaded = CategoryFetcher(FakeClient(), tmp_db, site_id=1)
|
||||
assert reloaded._batch_api_works is False
|
||||
|
||||
def test_non_200_leaves_flag_none(self, tmp_db):
|
||||
"""500-family responses are transient, must not poison."""
|
||||
client = FakeClient(
|
||||
tag_api_url="http://example.com/tags",
|
||||
api_key="k",
|
||||
api_user="u",
|
||||
)
|
||||
|
||||
async def five_hundred(method, url, params=None):
|
||||
return FakeResponse("", status_code=503)
|
||||
client._request = five_hundred
|
||||
|
||||
fetcher = CategoryFetcher(client, tmp_db, site_id=1)
|
||||
post = FakePost(tags="miku")
|
||||
|
||||
asyncio.new_event_loop().run_until_complete(fetcher._do_ensure(post))
|
||||
|
||||
assert fetcher._batch_api_works is None
|
||||
217
tests/core/api/test_safety.py
Normal file
217
tests/core/api/test_safety.py
Normal file
@ -0,0 +1,217 @@
|
||||
"""Tests for the shared network-safety helpers (SSRF guard + secret redaction)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core.api._safety import (
|
||||
SECRET_KEYS,
|
||||
check_public_host,
|
||||
redact_params,
|
||||
redact_url,
|
||||
validate_public_request,
|
||||
)
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# SSRF guard — finding #1
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_public_v4_literal_passes():
|
||||
check_public_host("8.8.8.8")
|
||||
check_public_host("1.1.1.1")
|
||||
|
||||
|
||||
def test_loopback_v4_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("127.0.0.1")
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("127.0.0.53")
|
||||
|
||||
|
||||
def test_cloud_metadata_ip_rejected():
|
||||
"""169.254.169.254 — AWS/GCE/Azure metadata service."""
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("169.254.169.254")
|
||||
|
||||
|
||||
def test_rfc1918_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("10.0.0.1")
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("172.16.5.4")
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("192.168.1.1")
|
||||
|
||||
|
||||
def test_cgnat_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("100.64.0.1")
|
||||
|
||||
|
||||
def test_multicast_v4_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("224.0.0.1")
|
||||
|
||||
|
||||
def test_ipv6_loopback_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("::1")
|
||||
|
||||
|
||||
def test_ipv6_unique_local_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("fc00::1")
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("fd12:3456:789a::1")
|
||||
|
||||
|
||||
def test_ipv6_link_local_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("fe80::1")
|
||||
|
||||
|
||||
def test_ipv6_multicast_rejected():
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("ff02::1")
|
||||
|
||||
|
||||
def test_public_v6_passes():
|
||||
# Google DNS
|
||||
check_public_host("2001:4860:4860::8888")
|
||||
|
||||
|
||||
def test_hostname_dns_failure_raises():
|
||||
def _gaierror(*a, **kw):
|
||||
raise socket.gaierror(-2, "Name or service not known")
|
||||
with patch("socket.getaddrinfo", _gaierror):
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("nonexistent.test.invalid")
|
||||
|
||||
|
||||
def test_hostname_resolving_to_loopback_rejected():
|
||||
def _fake(*a, **kw):
|
||||
return [(socket.AF_INET, 0, 0, "", ("127.0.0.1", 0))]
|
||||
with patch("socket.getaddrinfo", _fake):
|
||||
with pytest.raises(httpx.RequestError, match="blocked request target"):
|
||||
check_public_host("mean.example")
|
||||
|
||||
|
||||
def test_hostname_resolving_to_metadata_rejected():
|
||||
def _fake(*a, **kw):
|
||||
return [(socket.AF_INET, 0, 0, "", ("169.254.169.254", 0))]
|
||||
with patch("socket.getaddrinfo", _fake):
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("stolen.example")
|
||||
|
||||
|
||||
def test_hostname_resolving_to_public_passes():
|
||||
def _fake(*a, **kw):
|
||||
return [(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0))]
|
||||
with patch("socket.getaddrinfo", _fake):
|
||||
check_public_host("dns.google")
|
||||
|
||||
|
||||
def test_hostname_with_mixed_results_rejected_on_any_private():
|
||||
"""If any resolved address is private, reject — conservative."""
|
||||
def _fake(*a, **kw):
|
||||
return [
|
||||
(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0)),
|
||||
(socket.AF_INET, 0, 0, "", ("127.0.0.1", 0)),
|
||||
]
|
||||
with patch("socket.getaddrinfo", _fake):
|
||||
with pytest.raises(httpx.RequestError):
|
||||
check_public_host("dualhomed.example")
|
||||
|
||||
|
||||
def test_empty_host_passes():
|
||||
"""Edge case: httpx can call us with a relative URL mid-redirect."""
|
||||
check_public_host("")
|
||||
|
||||
|
||||
def test_validate_public_request_hook_rejects_metadata():
|
||||
"""The async hook is invoked via asyncio.run() instead of
|
||||
pytest-asyncio so the test runs on CI (which only installs
|
||||
httpx + Pillow + pytest)."""
|
||||
request = httpx.Request("GET", "http://169.254.169.254/latest/meta-data/")
|
||||
with pytest.raises(httpx.RequestError):
|
||||
asyncio.run(validate_public_request(request))
|
||||
|
||||
|
||||
def test_validate_public_request_hook_allows_public():
|
||||
def _fake(*a, **kw):
|
||||
return [(socket.AF_INET, 0, 0, "", ("8.8.8.8", 0))]
|
||||
with patch("socket.getaddrinfo", _fake):
|
||||
request = httpx.Request("GET", "https://example.test/")
|
||||
asyncio.run(validate_public_request(request)) # must not raise
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Credential redaction — finding #3
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_secret_keys_covers_all_booru_client_params():
|
||||
"""Every secret query param used by any booru client must be in SECRET_KEYS."""
|
||||
# Danbooru: login + api_key
|
||||
# e621: login + api_key
|
||||
# Gelbooru: api_key + user_id
|
||||
# Moebooru: login + password_hash
|
||||
for key in ("login", "api_key", "user_id", "password_hash"):
|
||||
assert key in SECRET_KEYS
|
||||
|
||||
|
||||
def test_redact_url_replaces_secrets():
|
||||
redacted = redact_url("https://x.test/posts.json?login=alice&api_key=supersecret&tags=cats")
|
||||
assert "alice" not in redacted
|
||||
assert "supersecret" not in redacted
|
||||
assert "tags=cats" in redacted
|
||||
assert "login=%2A%2A%2A" in redacted
|
||||
assert "api_key=%2A%2A%2A" in redacted
|
||||
|
||||
|
||||
def test_redact_url_leaves_non_secret_params_alone():
|
||||
redacted = redact_url("https://x.test/posts.json?tags=cats&limit=50")
|
||||
assert redacted == "https://x.test/posts.json?tags=cats&limit=50"
|
||||
|
||||
|
||||
def test_redact_url_no_query_passthrough():
|
||||
assert redact_url("https://x.test/") == "https://x.test/"
|
||||
assert redact_url("https://x.test/posts.json") == "https://x.test/posts.json"
|
||||
|
||||
|
||||
def test_redact_url_password_hash_and_user_id():
|
||||
redacted = redact_url(
|
||||
"https://x.test/post.json?login=a&password_hash=b&user_id=42&tags=cats"
|
||||
)
|
||||
assert "password_hash=%2A%2A%2A" in redacted
|
||||
assert "user_id=%2A%2A%2A" in redacted
|
||||
assert "tags=cats" in redacted
|
||||
|
||||
|
||||
def test_redact_url_preserves_fragment_and_path():
|
||||
redacted = redact_url("https://x.test/a/b/c?api_key=secret#frag")
|
||||
assert redacted.startswith("https://x.test/a/b/c?")
|
||||
assert redacted.endswith("#frag")
|
||||
|
||||
|
||||
def test_redact_params_replaces_secrets():
|
||||
out = redact_params({"api_key": "s", "tags": "cats", "login": "alice"})
|
||||
assert out["api_key"] == "***"
|
||||
assert out["login"] == "***"
|
||||
assert out["tags"] == "cats"
|
||||
|
||||
|
||||
def test_redact_params_empty():
|
||||
assert redact_params({}) == {}
|
||||
|
||||
|
||||
def test_redact_params_no_secrets():
|
||||
out = redact_params({"tags": "cats", "limit": 50})
|
||||
assert out == {"tags": "cats", "limit": 50}
|
||||
388
tests/core/test_cache.py
Normal file
388
tests/core/test_cache.py
Normal file
@ -0,0 +1,388 @@
|
||||
"""Tests for `booru_viewer.core.cache` — Referer hostname matching, ugoira
|
||||
zip-bomb defenses, download size caps, and validity-check fallback.
|
||||
|
||||
Locks in:
|
||||
- `_referer_for` proper hostname suffix matching (`54ccc40` security fix)
|
||||
guarding against `imgblahgelbooru.attacker.com` mapping to gelbooru.com
|
||||
- `_convert_ugoira_to_gif` cap enforcement (frame count + uncompressed size)
|
||||
before any decompression — defense against ugoira zip bombs
|
||||
- `_do_download` MAX_DOWNLOAD_BYTES enforcement, both the Content-Length
|
||||
pre-check and the running-total chunk-loop guard
|
||||
- `_is_valid_media` returning True on OSError so a transient EBUSY/lock
|
||||
doesn't kick off a delete + re-download loop
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import io
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core import cache
|
||||
from booru_viewer.core.cache import (
|
||||
MAX_DOWNLOAD_BYTES,
|
||||
_convert_ugoira_to_gif,
|
||||
_do_download,
|
||||
_is_valid_media,
|
||||
_referer_for,
|
||||
)
|
||||
|
||||
|
||||
# -- _referer_for hostname suffix matching --
|
||||
|
||||
def test_referer_for_exact_and_suffix_match():
|
||||
"""Real booru hostnames map to the canonical Referer for their CDN.
|
||||
|
||||
Exact match and subdomain-suffix match both rewrite the Referer host
|
||||
to the canonical apex (gelbooru → `gelbooru.com`, donmai →
|
||||
`danbooru.donmai.us`). The actual request netloc is dropped — the
|
||||
point is to look like a navigation from the canonical site.
|
||||
"""
|
||||
# gelbooru exact host
|
||||
assert _referer_for(urlparse("https://gelbooru.com/index.php")) \
|
||||
== "https://gelbooru.com/"
|
||||
# gelbooru subdomain rewrites to the canonical apex
|
||||
assert _referer_for(urlparse("https://img3.gelbooru.com/images/abc.jpg")) \
|
||||
== "https://gelbooru.com/"
|
||||
|
||||
# donmai exact host
|
||||
assert _referer_for(urlparse("https://donmai.us/posts/123")) \
|
||||
== "https://danbooru.donmai.us/"
|
||||
# donmai subdomain rewrites to the canonical danbooru host
|
||||
assert _referer_for(urlparse("https://safebooru.donmai.us/posts/123")) \
|
||||
== "https://danbooru.donmai.us/"
|
||||
|
||||
|
||||
def test_referer_for_rejects_substring_attacker():
|
||||
"""An attacker host that contains `gelbooru.com` or `donmai.us` as a
|
||||
SUBSTRING (not a hostname suffix) must NOT pick up the booru Referer.
|
||||
|
||||
Without proper suffix matching, `imgblahgelbooru.attacker.com` would
|
||||
leak the gelbooru Referer to the attacker — that's the `54ccc40`
|
||||
security fix.
|
||||
"""
|
||||
# Attacker host that ends with attacker-controlled TLD
|
||||
parsed = urlparse("https://imgblahgelbooru.attacker.com/x.jpg")
|
||||
referer = _referer_for(parsed)
|
||||
assert "gelbooru.com" not in referer
|
||||
assert "imgblahgelbooru.attacker.com" in referer
|
||||
|
||||
parsed = urlparse("https://donmai.us.attacker.com/x.jpg")
|
||||
referer = _referer_for(parsed)
|
||||
assert "danbooru.donmai.us" not in referer
|
||||
assert "donmai.us.attacker.com" in referer
|
||||
|
||||
# Completely unrelated host preserved as-is
|
||||
parsed = urlparse("https://example.test/x.jpg")
|
||||
assert _referer_for(parsed) == "https://example.test/"
|
||||
|
||||
|
||||
# -- Ugoira zip-bomb defenses --
|
||||
|
||||
def _build_ugoira_zip(path: Path, n_frames: int, frame_bytes: bytes = b"x") -> Path:
|
||||
"""Build a synthetic ugoira-shaped zip with `n_frames` numbered .jpg
|
||||
entries. Content is whatever the caller passes; defaults to 1 byte.
|
||||
|
||||
The cap-enforcement tests don't need decodable JPEGs — the cap fires
|
||||
before any decode happens. The filenames just need .jpg suffixes so
|
||||
`_convert_ugoira_to_gif` recognizes them as frames.
|
||||
"""
|
||||
with zipfile.ZipFile(path, "w") as zf:
|
||||
for i in range(n_frames):
|
||||
zf.writestr(f"{i:04d}.jpg", frame_bytes)
|
||||
return path
|
||||
|
||||
|
||||
def test_ugoira_frame_count_cap_rejects_bomb(tmp_path, monkeypatch):
|
||||
"""A zip with more than `UGOIRA_MAX_FRAMES` frames must be refused
|
||||
BEFORE any decompression. We monkeypatch the cap down so the test
|
||||
builds a tiny zip instead of a 5001-entry one — the cap check is
|
||||
cap > N, not cap == 5000."""
|
||||
monkeypatch.setattr(cache, "UGOIRA_MAX_FRAMES", 2)
|
||||
zip_path = _build_ugoira_zip(tmp_path / "bomb.zip", n_frames=3)
|
||||
gif_path = zip_path.with_suffix(".gif")
|
||||
|
||||
result = _convert_ugoira_to_gif(zip_path)
|
||||
|
||||
# Function returned the original zip (refusal path)
|
||||
assert result == zip_path
|
||||
# No .gif was written
|
||||
assert not gif_path.exists()
|
||||
|
||||
|
||||
def test_ugoira_uncompressed_size_cap_rejects_bomb(tmp_path, monkeypatch):
|
||||
"""A zip whose `ZipInfo.file_size` headers sum past
|
||||
`UGOIRA_MAX_UNCOMPRESSED_BYTES` must be refused before decompression.
|
||||
Same monkeypatch trick to keep the test data small."""
|
||||
monkeypatch.setattr(cache, "UGOIRA_MAX_UNCOMPRESSED_BYTES", 50)
|
||||
# Three 100-byte frames → 300 total > 50 cap
|
||||
zip_path = _build_ugoira_zip(
|
||||
tmp_path / "bomb.zip", n_frames=3, frame_bytes=b"x" * 100
|
||||
)
|
||||
gif_path = zip_path.with_suffix(".gif")
|
||||
|
||||
result = _convert_ugoira_to_gif(zip_path)
|
||||
|
||||
assert result == zip_path
|
||||
assert not gif_path.exists()
|
||||
|
||||
|
||||
# -- _do_download MAX_DOWNLOAD_BYTES caps --
|
||||
|
||||
|
||||
class _FakeHeaders:
|
||||
def __init__(self, mapping):
|
||||
self._m = mapping
|
||||
def get(self, key, default=None):
|
||||
return self._m.get(key.lower(), default)
|
||||
|
||||
|
||||
class _FakeResponse:
|
||||
def __init__(self, headers, chunks):
|
||||
self.headers = _FakeHeaders({k.lower(): v for k, v in headers.items()})
|
||||
self._chunks = chunks
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
async def aiter_bytes(self, _size):
|
||||
for chunk in self._chunks:
|
||||
yield chunk
|
||||
|
||||
|
||||
class _FakeStreamCtx:
|
||||
def __init__(self, response):
|
||||
self._resp = response
|
||||
async def __aenter__(self):
|
||||
return self._resp
|
||||
async def __aexit__(self, *_args):
|
||||
return False
|
||||
|
||||
|
||||
class _FakeClient:
|
||||
def __init__(self, response):
|
||||
self._resp = response
|
||||
def stream(self, _method, _url, headers=None):
|
||||
return _FakeStreamCtx(self._resp)
|
||||
|
||||
|
||||
def test_download_cap_content_length_pre_check(tmp_path):
|
||||
"""When the server advertises a Content-Length larger than
|
||||
MAX_DOWNLOAD_BYTES, `_do_download` must raise BEFORE iterating any
|
||||
bytes. This is the cheap pre-check that protects against the trivial
|
||||
OOM/disk-fill attack — we don't even start streaming."""
|
||||
too_big = MAX_DOWNLOAD_BYTES + 1
|
||||
response = _FakeResponse(
|
||||
headers={"content-type": "image/jpeg", "content-length": str(too_big)},
|
||||
chunks=[b"never read"],
|
||||
)
|
||||
client = _FakeClient(response)
|
||||
local = tmp_path / "out.jpg"
|
||||
|
||||
with pytest.raises(ValueError, match="Download too large"):
|
||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
||||
|
||||
# No file should have been written
|
||||
assert not local.exists()
|
||||
|
||||
|
||||
def test_download_cap_running_total_aborts(tmp_path, monkeypatch):
|
||||
"""Servers can lie about Content-Length. The chunk loop must enforce
|
||||
the running-total cap independently and abort mid-stream as soon as
|
||||
cumulative bytes exceed `MAX_DOWNLOAD_BYTES`. We monkeypatch the cap
|
||||
down to 1024 to keep the test fast."""
|
||||
monkeypatch.setattr(cache, "MAX_DOWNLOAD_BYTES", 1024)
|
||||
# Advertise 0 (unknown) so the small-payload branch runs and the
|
||||
# running-total guard inside the chunk loop is what fires.
|
||||
response = _FakeResponse(
|
||||
headers={"content-type": "image/jpeg", "content-length": "0"},
|
||||
chunks=[b"x" * 600, b"x" * 600], # 1200 total > 1024 cap
|
||||
)
|
||||
client = _FakeClient(response)
|
||||
local = tmp_path / "out.jpg"
|
||||
|
||||
with pytest.raises(ValueError, match="exceeded cap mid-stream"):
|
||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
||||
|
||||
# The buffered-write path only writes after the loop finishes, so the
|
||||
# mid-stream abort means no file lands on disk.
|
||||
assert not local.exists()
|
||||
|
||||
|
||||
# -- _looks_like_media (audit finding #10) --
|
||||
|
||||
|
||||
def test_looks_like_media_jpeg_magic_recognised():
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
assert _looks_like_media(b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01") is True
|
||||
|
||||
|
||||
def test_looks_like_media_png_magic_recognised():
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
assert _looks_like_media(b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR") is True
|
||||
|
||||
|
||||
def test_looks_like_media_webm_magic_recognised():
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
# EBML header (Matroska/WebM): 1A 45 DF A3
|
||||
assert _looks_like_media(b"\x1aE\xdf\xa3" + b"\x00" * 20) is True
|
||||
|
||||
|
||||
def test_looks_like_media_html_rejected():
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
assert _looks_like_media(b"<!doctype html><html><body>") is False
|
||||
assert _looks_like_media(b"<html><head>") is False
|
||||
|
||||
|
||||
def test_looks_like_media_empty_rejected():
|
||||
"""An empty buffer means the server returned nothing useful — fail
|
||||
closed (rather than the on-disk validator's open-on-error fallback)."""
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
assert _looks_like_media(b"") is False
|
||||
|
||||
|
||||
def test_looks_like_media_unknown_magic_accepted():
|
||||
"""Non-HTML, non-magic bytes are conservative-OK — some boorus
|
||||
serve exotic-but-legal containers we don't enumerate."""
|
||||
from booru_viewer.core.cache import _looks_like_media
|
||||
assert _looks_like_media(b"random non-html data ") is True
|
||||
|
||||
|
||||
# -- _do_download early header validation (audit finding #10) --
|
||||
|
||||
|
||||
def test_do_download_early_rejects_html_payload(tmp_path):
|
||||
"""A hostile server that returns HTML in the body (omitting
|
||||
Content-Type so the early text/html guard doesn't fire) must be
|
||||
caught by the magic-byte check before any bytes land on disk.
|
||||
Audit finding #10: this used to wait for the full download to
|
||||
complete before _is_valid_media rejected, wasting bandwidth."""
|
||||
response = _FakeResponse(
|
||||
headers={"content-length": "0"}, # no Content-Type, no length
|
||||
chunks=[b"<!doctype html><html><body>500</body></html>"],
|
||||
)
|
||||
client = _FakeClient(response)
|
||||
local = tmp_path / "out.jpg"
|
||||
|
||||
with pytest.raises(ValueError, match="not valid media"):
|
||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
||||
|
||||
assert not local.exists()
|
||||
|
||||
|
||||
def test_do_download_early_rejects_html_across_tiny_chunks(tmp_path):
|
||||
"""The accumulator must combine chunks smaller than the 16-byte
|
||||
minimum so a server delivering one byte at a time can't slip
|
||||
past the magic-byte check."""
|
||||
response = _FakeResponse(
|
||||
headers={"content-length": "0"},
|
||||
chunks=[b"<!", b"do", b"ct", b"yp", b"e ", b"ht", b"ml", b">", b"x" * 100],
|
||||
)
|
||||
client = _FakeClient(response)
|
||||
local = tmp_path / "out.jpg"
|
||||
|
||||
with pytest.raises(ValueError, match="not valid media"):
|
||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
||||
|
||||
assert not local.exists()
|
||||
|
||||
|
||||
def test_do_download_writes_valid_jpeg_after_early_validation(tmp_path):
|
||||
"""A real JPEG-like header passes the early check and the rest
|
||||
of the stream is written through to disk. Header bytes must
|
||||
appear in the final file (not be silently dropped)."""
|
||||
body = b"\xff\xd8\xff\xe0\x00\x10JFIF\x00\x01" + b"PAYLOAD" + b"\xff\xd9"
|
||||
response = _FakeResponse(
|
||||
headers={"content-length": str(len(body)), "content-type": "image/jpeg"},
|
||||
chunks=[body[:8], body[8:]], # split mid-magic
|
||||
)
|
||||
client = _FakeClient(response)
|
||||
local = tmp_path / "out.jpg"
|
||||
|
||||
asyncio.run(_do_download(client, "http://example.test/x.jpg", {}, local, None))
|
||||
|
||||
assert local.exists()
|
||||
assert local.read_bytes() == body
|
||||
|
||||
|
||||
# -- _is_valid_media OSError fallback --
|
||||
|
||||
def test_is_valid_media_returns_true_on_oserror(tmp_path):
|
||||
"""If the file can't be opened (transient EBUSY, lock, permissions),
|
||||
`_is_valid_media` must return True so the caller doesn't delete the
|
||||
cached file. The previous behavior of returning False kicked off a
|
||||
delete + re-download loop on every access while the underlying
|
||||
OS issue persisted."""
|
||||
nonexistent = tmp_path / "definitely-not-here.jpg"
|
||||
assert _is_valid_media(nonexistent) is True
|
||||
|
||||
|
||||
# -- _url_locks LRU cap (audit finding #5) --
|
||||
|
||||
def test_url_locks_capped_at_max():
|
||||
"""The per-URL coalesce lock table must not grow beyond _URL_LOCKS_MAX
|
||||
entries. Without the cap, a long browsing session or an adversarial
|
||||
booru returning cache-buster query strings would leak one Lock per
|
||||
unique URL until OOM."""
|
||||
cache._url_locks.clear()
|
||||
try:
|
||||
for i in range(cache._URL_LOCKS_MAX + 500):
|
||||
cache._get_url_lock(f"hash{i}")
|
||||
assert len(cache._url_locks) <= cache._URL_LOCKS_MAX
|
||||
finally:
|
||||
cache._url_locks.clear()
|
||||
|
||||
|
||||
def test_url_locks_returns_same_lock_for_same_hash():
|
||||
"""Two get_url_lock calls with the same hash must return the same
|
||||
Lock object — that's the whole point of the coalesce table."""
|
||||
cache._url_locks.clear()
|
||||
try:
|
||||
lock_a = cache._get_url_lock("hashA")
|
||||
lock_b = cache._get_url_lock("hashA")
|
||||
assert lock_a is lock_b
|
||||
finally:
|
||||
cache._url_locks.clear()
|
||||
|
||||
|
||||
def test_url_locks_lru_keeps_recently_used():
|
||||
"""LRU semantics: a hash that gets re-touched moves to the end of
|
||||
the OrderedDict and is the youngest, so eviction picks an older
|
||||
entry instead."""
|
||||
cache._url_locks.clear()
|
||||
try:
|
||||
cache._get_url_lock("oldest")
|
||||
cache._get_url_lock("middle")
|
||||
cache._get_url_lock("oldest") # touch — now youngest
|
||||
# The dict should now be: middle, oldest (insertion order with
|
||||
# move_to_end on the touch).
|
||||
keys = list(cache._url_locks.keys())
|
||||
assert keys == ["middle", "oldest"]
|
||||
finally:
|
||||
cache._url_locks.clear()
|
||||
|
||||
|
||||
def test_url_locks_eviction_skips_held_locks():
|
||||
"""A held lock (one a coroutine is mid-`async with` on) must NOT be
|
||||
evicted; popping it would break the coroutine's __aexit__. The
|
||||
eviction loop sees `lock.locked()` and skips it."""
|
||||
cache._url_locks.clear()
|
||||
try:
|
||||
# Seed an entry and hold it.
|
||||
held = cache._get_url_lock("held_hash")
|
||||
|
||||
async def hold_and_fill():
|
||||
async with held:
|
||||
# While we're holding the lock, force eviction by
|
||||
# filling past the cap.
|
||||
for i in range(cache._URL_LOCKS_MAX + 100):
|
||||
cache._get_url_lock(f"new{i}")
|
||||
# The held lock must still be present.
|
||||
assert "held_hash" in cache._url_locks
|
||||
|
||||
asyncio.run(hold_and_fill())
|
||||
finally:
|
||||
cache._url_locks.clear()
|
||||
62
tests/core/test_concurrency.py
Normal file
62
tests/core/test_concurrency.py
Normal file
@ -0,0 +1,62 @@
|
||||
"""Tests for `booru_viewer.core.concurrency` — the persistent-loop handle.
|
||||
|
||||
Locks in:
|
||||
- `get_app_loop` raises a clear RuntimeError if `set_app_loop` was never
|
||||
called (the production code uses this to bail loudly when async work
|
||||
is scheduled before the loop thread starts)
|
||||
- `run_on_app_loop` round-trips a coroutine result from a worker-thread
|
||||
loop back to the calling thread via `concurrent.futures.Future`
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import threading
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core import concurrency
|
||||
from booru_viewer.core.concurrency import (
|
||||
get_app_loop,
|
||||
run_on_app_loop,
|
||||
set_app_loop,
|
||||
)
|
||||
|
||||
|
||||
def test_get_app_loop_raises_before_set(reset_app_loop):
|
||||
"""Calling `get_app_loop` before `set_app_loop` is a configuration
|
||||
error — the production code expects a clear RuntimeError so callers
|
||||
bail loudly instead of silently scheduling work onto a None loop."""
|
||||
with pytest.raises(RuntimeError, match="not initialized"):
|
||||
get_app_loop()
|
||||
|
||||
|
||||
def test_run_on_app_loop_round_trips_result(reset_app_loop):
|
||||
"""Spin up a real asyncio loop in a worker thread, register it via
|
||||
`set_app_loop`, then from the test (main) thread schedule a coroutine
|
||||
via `run_on_app_loop` and assert the result comes back through the
|
||||
`concurrent.futures.Future` interface."""
|
||||
loop = asyncio.new_event_loop()
|
||||
ready = threading.Event()
|
||||
|
||||
def _run_loop():
|
||||
asyncio.set_event_loop(loop)
|
||||
ready.set()
|
||||
loop.run_forever()
|
||||
|
||||
t = threading.Thread(target=_run_loop, daemon=True)
|
||||
t.start()
|
||||
ready.wait(timeout=2)
|
||||
|
||||
try:
|
||||
set_app_loop(loop)
|
||||
|
||||
async def _produce():
|
||||
return 42
|
||||
|
||||
fut = run_on_app_loop(_produce())
|
||||
assert fut.result(timeout=2) == 42
|
||||
finally:
|
||||
loop.call_soon_threadsafe(loop.stop)
|
||||
t.join(timeout=2)
|
||||
loop.close()
|
||||
145
tests/core/test_config.py
Normal file
145
tests/core/test_config.py
Normal file
@ -0,0 +1,145 @@
|
||||
"""Tests for `booru_viewer.core.config` — path traversal guard on
|
||||
`saved_folder_dir` and the shallow walk in `find_library_files`.
|
||||
|
||||
Locks in:
|
||||
- `saved_folder_dir` resolve-and-relative_to check (`54ccc40` defense in
|
||||
depth alongside `_validate_folder_name`)
|
||||
- `find_library_files` matching exactly the root + 1-level subdirectory
|
||||
layout that the library uses, with the right MEDIA_EXTENSIONS filter
|
||||
- `data_dir` chmods its directory to 0o700 on POSIX (audit #4)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core import config
|
||||
from booru_viewer.core.config import find_library_files, saved_folder_dir
|
||||
|
||||
|
||||
# -- saved_folder_dir traversal guard --
|
||||
|
||||
def test_saved_folder_dir_rejects_dotdot(tmp_library):
|
||||
"""`..` and any path that resolves outside `saved_dir()` must raise
|
||||
ValueError, not silently mkdir somewhere unexpected. We test literal
|
||||
`..` shapes only — symlink escapes are filesystem-dependent and
|
||||
flaky in tests."""
|
||||
with pytest.raises(ValueError, match="escapes saved directory"):
|
||||
saved_folder_dir("..")
|
||||
with pytest.raises(ValueError, match="escapes saved directory"):
|
||||
saved_folder_dir("../escape")
|
||||
with pytest.raises(ValueError, match="escapes saved directory"):
|
||||
saved_folder_dir("foo/../..")
|
||||
|
||||
|
||||
# -- find_library_files shallow walk --
|
||||
|
||||
def test_find_library_files_walks_root_and_one_level(tmp_library):
|
||||
"""Library has a flat shape: `saved/<post_id>.<ext>` at the root, or
|
||||
`saved/<folder>/<post_id>.<ext>` one level deep. The walk must:
|
||||
- find matches at both depths
|
||||
- filter by MEDIA_EXTENSIONS (skip .txt and other non-media)
|
||||
- filter by exact stem (skip unrelated post ids)
|
||||
"""
|
||||
# Root-level match
|
||||
(tmp_library / "123.jpg").write_bytes(b"")
|
||||
# One-level subfolder match
|
||||
(tmp_library / "folder1").mkdir()
|
||||
(tmp_library / "folder1" / "123.png").write_bytes(b"")
|
||||
# Different post id — must be excluded
|
||||
(tmp_library / "folder2").mkdir()
|
||||
(tmp_library / "folder2" / "456.gif").write_bytes(b"")
|
||||
# Wrong extension — must be excluded even with the right stem
|
||||
(tmp_library / "123.txt").write_bytes(b"")
|
||||
|
||||
matches = find_library_files(123)
|
||||
match_names = {p.name for p in matches}
|
||||
|
||||
assert match_names == {"123.jpg", "123.png"}
|
||||
|
||||
|
||||
# -- data_dir permissions (audit finding #4) --
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
||||
def test_data_dir_chmod_700(tmp_path, monkeypatch):
|
||||
"""`data_dir()` chmods the platform data dir to 0o700 on POSIX so the
|
||||
SQLite DB and api_key columns inside aren't readable by other local
|
||||
users on shared machines or networked home dirs."""
|
||||
monkeypatch.setenv("XDG_DATA_HOME", str(tmp_path))
|
||||
path = config.data_dir()
|
||||
mode = os.stat(path).st_mode & 0o777
|
||||
assert mode == 0o700, f"expected 0o700, got {oct(mode)}"
|
||||
# Idempotent: a second call leaves the mode at 0o700.
|
||||
config.data_dir()
|
||||
mode2 = os.stat(path).st_mode & 0o777
|
||||
assert mode2 == 0o700
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
||||
def test_data_dir_tightens_loose_existing_perms(tmp_path, monkeypatch):
|
||||
"""If a previous version (or external tooling) left the dir at 0o755,
|
||||
the next data_dir() call must tighten it back to 0o700."""
|
||||
monkeypatch.setenv("XDG_DATA_HOME", str(tmp_path))
|
||||
pre = tmp_path / config.APPNAME
|
||||
pre.mkdir()
|
||||
os.chmod(pre, 0o755)
|
||||
config.data_dir()
|
||||
mode = os.stat(pre).st_mode & 0o777
|
||||
assert mode == 0o700
|
||||
|
||||
|
||||
# -- render_filename_template Windows reserved names (finding #7) --
|
||||
|
||||
|
||||
def _fake_post(tag_categories=None, **overrides):
|
||||
"""Build a minimal Post-like object suitable for render_filename_template.
|
||||
|
||||
A real Post needs file_url + tag_categories; defaults are fine for the
|
||||
reserved-name tests since they only inspect the artist/character tokens.
|
||||
"""
|
||||
from booru_viewer.core.api.base import Post
|
||||
return Post(
|
||||
id=overrides.get("id", 999),
|
||||
file_url=overrides.get("file_url", "https://x.test/abc.jpg"),
|
||||
preview_url=None,
|
||||
tags="",
|
||||
score=0,
|
||||
rating=None,
|
||||
source=None,
|
||||
tag_categories=tag_categories or {},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("reserved", [
|
||||
"con", "CON", "prn", "PRN", "aux", "AUX", "nul", "NUL",
|
||||
"com1", "COM1", "com9", "lpt1", "LPT1", "lpt9",
|
||||
])
|
||||
def test_render_filename_template_prefixes_reserved_names(reserved):
|
||||
"""A tag whose value renders to a Windows reserved device name must
|
||||
be prefixed with `_` so the resulting filename can't redirect to a
|
||||
device on Windows. Audit finding #7."""
|
||||
post = _fake_post(tag_categories={"Artist": [reserved]})
|
||||
out = config.render_filename_template("%artist%", post, ext=".jpg")
|
||||
# Stem (before extension) must NOT be a reserved name.
|
||||
stem = out.split(".", 1)[0]
|
||||
assert stem.lower() != reserved.lower()
|
||||
assert stem.startswith("_")
|
||||
|
||||
|
||||
def test_render_filename_template_passes_normal_names_unchanged():
|
||||
"""Non-reserved tags must NOT be prefixed."""
|
||||
post = _fake_post(tag_categories={"Artist": ["miku"]})
|
||||
out = config.render_filename_template("%artist%", post, ext=".jpg")
|
||||
assert out == "miku.jpg"
|
||||
|
||||
|
||||
def test_render_filename_template_reserved_with_extension_in_template():
|
||||
"""`con.jpg` from a tag-only stem must still be caught — the dot in
|
||||
the stem is irrelevant; CON is reserved regardless of extension."""
|
||||
post = _fake_post(tag_categories={"Artist": ["con"]})
|
||||
out = config.render_filename_template("%artist%.%ext%", post, ext=".jpg")
|
||||
assert not out.startswith("con")
|
||||
assert out.startswith("_con")
|
||||
243
tests/core/test_db.py
Normal file
243
tests/core/test_db.py
Normal file
@ -0,0 +1,243 @@
|
||||
"""Tests for `booru_viewer.core.db` — folder name validation, INSERT OR
|
||||
IGNORE collision handling, and LIKE escaping.
|
||||
|
||||
These tests lock in the `54ccc40` security/correctness fixes:
|
||||
- `_validate_folder_name` rejects path-traversal shapes before they hit the
|
||||
filesystem in `saved_folder_dir`
|
||||
- `add_bookmark` re-SELECTs the actual row id after an INSERT OR IGNORE
|
||||
collision so the returned `Bookmark.id` is never the bogus 0 that broke
|
||||
`update_bookmark_cache_path`
|
||||
- `get_bookmarks` escapes the SQL LIKE wildcards `_` and `%` so a search for
|
||||
`cat_ear` doesn't bleed into `catear` / `catXear`
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core.db import _validate_folder_name
|
||||
|
||||
|
||||
# -- _validate_folder_name --
|
||||
|
||||
def test_validate_folder_name_rejects_traversal():
|
||||
"""Every shape that could escape the saved-images dir or hit a hidden
|
||||
file must raise ValueError. One assertion per rejection rule so a
|
||||
failure points at the exact case."""
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("") # empty
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("..") # dotdot literal
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name(".") # dot literal
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("/foo") # forward slash
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("foo/bar") # embedded forward slash
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("\\foo") # backslash
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name(".hidden") # leading dot
|
||||
with pytest.raises(ValueError):
|
||||
_validate_folder_name("~user") # leading tilde
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
||||
def test_db_file_chmod_600(tmp_db):
|
||||
"""Audit finding #4: the SQLite file must be 0o600 on POSIX so the
|
||||
plaintext api_key/api_user columns aren't readable by other local
|
||||
users on shared workstations."""
|
||||
# The conn property triggers _restrict_perms() the first time it's
|
||||
# accessed; tmp_db calls it via add_site/etc., but a defensive
|
||||
# access here makes the assertion order-independent.
|
||||
_ = tmp_db.conn
|
||||
mode = os.stat(tmp_db._path).st_mode & 0o777
|
||||
assert mode == 0o600, f"expected 0o600, got {oct(mode)}"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="POSIX-only chmod check")
|
||||
def test_db_wal_sidecar_chmod_600(tmp_db):
|
||||
"""The -wal sidecar created by PRAGMA journal_mode=WAL must also
|
||||
be 0o600. It carries in-flight transactions including the most
|
||||
recent api_key writes — same exposure as the main DB file."""
|
||||
# Force a write so the WAL file actually exists.
|
||||
tmp_db.add_site("test", "http://example.test", "danbooru")
|
||||
# Re-trigger the chmod pass now that the sidecar exists.
|
||||
tmp_db._restrict_perms()
|
||||
wal = type(tmp_db._path)(str(tmp_db._path) + "-wal")
|
||||
if wal.exists():
|
||||
mode = os.stat(wal).st_mode & 0o777
|
||||
assert mode == 0o600, f"expected 0o600 on WAL sidecar, got {oct(mode)}"
|
||||
|
||||
|
||||
def test_validate_folder_name_accepts_unicode_and_punctuation():
|
||||
"""Common real-world folder names must pass through unchanged. The
|
||||
guard is meant to block escape shapes, not normal naming."""
|
||||
assert _validate_folder_name("miku(lewd)") == "miku(lewd)"
|
||||
assert _validate_folder_name("cat ear") == "cat ear"
|
||||
assert _validate_folder_name("日本語") == "日本語"
|
||||
assert _validate_folder_name("foo-bar") == "foo-bar"
|
||||
assert _validate_folder_name("foo.bar") == "foo.bar" # dot OK if not leading
|
||||
|
||||
|
||||
# -- add_bookmark INSERT OR IGNORE collision --
|
||||
|
||||
def test_add_bookmark_collision_returns_existing_id(tmp_db):
|
||||
"""Calling `add_bookmark` twice with the same (site_id, post_id) must
|
||||
return the same row id on the second call, not the stale `lastrowid`
|
||||
of 0 that INSERT OR IGNORE leaves behind. Without the re-SELECT fix,
|
||||
any downstream `update_bookmark_cache_path(id=0, ...)` silently
|
||||
no-ops, breaking the cache-path linkage."""
|
||||
site = tmp_db.add_site("test", "http://example.test", "danbooru")
|
||||
bm1 = tmp_db.add_bookmark(
|
||||
site_id=site.id, post_id=42, file_url="http://example.test/42.jpg",
|
||||
preview_url=None, tags="cat",
|
||||
)
|
||||
bm2 = tmp_db.add_bookmark(
|
||||
site_id=site.id, post_id=42, file_url="http://example.test/42.jpg",
|
||||
preview_url=None, tags="cat",
|
||||
)
|
||||
assert bm1.id != 0
|
||||
assert bm2.id == bm1.id
|
||||
|
||||
|
||||
# -- get_bookmarks LIKE escaping --
|
||||
|
||||
def test_get_bookmarks_like_escaping(tmp_db):
|
||||
"""A search for the literal tag `cat_ear` must NOT match `catear` or
|
||||
`catXear`. SQLite's LIKE treats `_` as a single-char wildcard unless
|
||||
explicitly escaped — without `ESCAPE '\\\\'` the search would return
|
||||
all three rows."""
|
||||
site = tmp_db.add_site("test", "http://example.test", "danbooru")
|
||||
tmp_db.add_bookmark(
|
||||
site_id=site.id, post_id=1, file_url="http://example.test/1.jpg",
|
||||
preview_url=None, tags="cat_ear",
|
||||
)
|
||||
tmp_db.add_bookmark(
|
||||
site_id=site.id, post_id=2, file_url="http://example.test/2.jpg",
|
||||
preview_url=None, tags="catear",
|
||||
)
|
||||
tmp_db.add_bookmark(
|
||||
site_id=site.id, post_id=3, file_url="http://example.test/3.jpg",
|
||||
preview_url=None, tags="catXear",
|
||||
)
|
||||
results = tmp_db.get_bookmarks(search="cat_ear")
|
||||
tags_returned = {b.tags for b in results}
|
||||
assert tags_returned == {"cat_ear"}
|
||||
|
||||
|
||||
# -- delete_site cascading cleanup --
|
||||
|
||||
def _seed_site(db, name, site_id_out=None):
|
||||
"""Create a site and populate all child tables for it."""
|
||||
site = db.add_site(name, f"http://{name}.test", "danbooru")
|
||||
db.add_bookmark(
|
||||
site_id=site.id, post_id=1, file_url=f"http://{name}.test/1.jpg",
|
||||
preview_url=None, tags="test",
|
||||
)
|
||||
db.add_search_history("test query", site_id=site.id)
|
||||
db.add_saved_search("my search", "saved query", site_id=site.id)
|
||||
db.set_tag_labels(site.id, {"artist:bob": "artist"})
|
||||
return site
|
||||
|
||||
|
||||
def _count_rows(db, table, site_id, *, id_col="site_id"):
|
||||
"""Count rows in *table* belonging to *site_id*."""
|
||||
return db.conn.execute(
|
||||
f"SELECT COUNT(*) FROM {table} WHERE {id_col} = ?", (site_id,)
|
||||
).fetchone()[0]
|
||||
|
||||
|
||||
def test_delete_site_cascades_all_related_rows(tmp_db):
|
||||
"""Deleting a site must remove rows from all five related tables."""
|
||||
site = _seed_site(tmp_db, "doomed")
|
||||
tmp_db.delete_site(site.id)
|
||||
assert _count_rows(tmp_db, "sites", site.id, id_col="id") == 0
|
||||
assert _count_rows(tmp_db, "favorites", site.id) == 0
|
||||
assert _count_rows(tmp_db, "tag_types", site.id) == 0
|
||||
assert _count_rows(tmp_db, "search_history", site.id) == 0
|
||||
assert _count_rows(tmp_db, "saved_searches", site.id) == 0
|
||||
|
||||
|
||||
def test_delete_site_does_not_affect_other_sites(tmp_db):
|
||||
"""Deleting site A must leave site B's rows in every table untouched."""
|
||||
site_a = _seed_site(tmp_db, "site-a")
|
||||
site_b = _seed_site(tmp_db, "site-b")
|
||||
|
||||
before = {
|
||||
t: _count_rows(tmp_db, t, site_b.id, id_col="id" if t == "sites" else "site_id")
|
||||
for t in ("sites", "favorites", "tag_types", "search_history", "saved_searches")
|
||||
}
|
||||
|
||||
tmp_db.delete_site(site_a.id)
|
||||
|
||||
for table, expected in before.items():
|
||||
id_col = "id" if table == "sites" else "site_id"
|
||||
assert _count_rows(tmp_db, table, site_b.id, id_col=id_col) == expected, (
|
||||
f"{table} rows for site B changed after deleting site A"
|
||||
)
|
||||
|
||||
|
||||
# -- reconcile_library_meta --
|
||||
|
||||
def test_reconcile_library_meta_removes_orphans(tmp_db, tmp_library):
|
||||
"""Rows whose files are missing on disk are deleted; present files kept."""
|
||||
(tmp_library / "12345.jpg").write_bytes(b"\xff")
|
||||
tmp_db.save_library_meta(post_id=12345, tags="test", filename="12345.jpg")
|
||||
tmp_db.save_library_meta(post_id=99999, tags="orphan", filename="99999.jpg")
|
||||
|
||||
removed = tmp_db.reconcile_library_meta()
|
||||
|
||||
assert removed == 1
|
||||
assert tmp_db.is_post_in_library(12345) is True
|
||||
assert tmp_db.is_post_in_library(99999) is False
|
||||
|
||||
|
||||
def test_reconcile_library_meta_skips_empty_dir(tmp_db, tmp_library):
|
||||
"""An empty library dir signals a possible unmounted drive — refuse to
|
||||
reconcile and leave orphan rows intact."""
|
||||
tmp_db.save_library_meta(post_id=12345, tags="test", filename="12345.jpg")
|
||||
|
||||
removed = tmp_db.reconcile_library_meta()
|
||||
|
||||
assert removed == 0
|
||||
assert tmp_db.is_post_in_library(12345) is True
|
||||
|
||||
|
||||
# -- tag cache pruning --
|
||||
|
||||
def test_prune_tag_cache(tmp_db):
|
||||
"""After inserting more tags than the cap, only the newest entries survive."""
|
||||
from booru_viewer.core.db import Database
|
||||
|
||||
original_cap = Database._TAG_CACHE_MAX_ROWS
|
||||
try:
|
||||
Database._TAG_CACHE_MAX_ROWS = 5
|
||||
|
||||
site = tmp_db.add_site("test", "http://test.test", "danbooru")
|
||||
|
||||
# Insert 8 rows with explicit, distinct fetched_at timestamps so
|
||||
# pruning order is deterministic.
|
||||
with tmp_db._write():
|
||||
for i in range(8):
|
||||
tmp_db.conn.execute(
|
||||
"INSERT OR REPLACE INTO tag_types "
|
||||
"(site_id, name, label, fetched_at) VALUES (?, ?, ?, ?)",
|
||||
(site.id, f"tag_{i}", "general", f"2025-01-01T00:00:{i:02d}Z"),
|
||||
)
|
||||
tmp_db._prune_tag_cache()
|
||||
|
||||
count = tmp_db.conn.execute("SELECT COUNT(*) FROM tag_types").fetchone()[0]
|
||||
assert count == 5
|
||||
|
||||
surviving = {
|
||||
r["name"]
|
||||
for r in tmp_db.conn.execute("SELECT name FROM tag_types").fetchall()
|
||||
}
|
||||
# The 3 oldest (tag_0, tag_1, tag_2) should have been pruned
|
||||
assert surviving == {"tag_3", "tag_4", "tag_5", "tag_6", "tag_7"}
|
||||
finally:
|
||||
Database._TAG_CACHE_MAX_ROWS = original_cap
|
||||
128
tests/core/test_library_save.py
Normal file
128
tests/core/test_library_save.py
Normal file
@ -0,0 +1,128 @@
|
||||
"""Tests for save_post_file.
|
||||
|
||||
Pins the contract that category_fetcher is a *required* keyword arg
|
||||
(no silent default) so a forgotten plumb can't result in a save that
|
||||
drops category tokens from the filename template.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import inspect
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.core.library_save import save_post_file
|
||||
|
||||
|
||||
@dataclass
|
||||
class FakePost:
|
||||
id: int = 12345
|
||||
tags: str = "1girl greatartist"
|
||||
tag_categories: dict = field(default_factory=dict)
|
||||
score: int = 0
|
||||
rating: str = ""
|
||||
source: str = ""
|
||||
file_url: str = ""
|
||||
|
||||
|
||||
class PopulatingFetcher:
|
||||
"""ensure_categories fills in the artist category from scratch,
|
||||
emulating the HTML-scrape/batch-API happy path."""
|
||||
|
||||
def __init__(self, categories: dict[str, list[str]]):
|
||||
self._categories = categories
|
||||
self.calls = 0
|
||||
|
||||
async def ensure_categories(self, post) -> None:
|
||||
self.calls += 1
|
||||
post.tag_categories = dict(self._categories)
|
||||
|
||||
|
||||
def _run(coro):
|
||||
return asyncio.new_event_loop().run_until_complete(coro)
|
||||
|
||||
|
||||
def test_category_fetcher_is_keyword_only_and_required():
|
||||
"""Signature check: category_fetcher must be explicit at every
|
||||
call site — no ``= None`` default that callers can forget."""
|
||||
sig = inspect.signature(save_post_file)
|
||||
param = sig.parameters["category_fetcher"]
|
||||
assert param.kind == inspect.Parameter.KEYWORD_ONLY, (
|
||||
"category_fetcher should be keyword-only"
|
||||
)
|
||||
assert param.default is inspect.Parameter.empty, (
|
||||
"category_fetcher must not have a default — forcing every caller "
|
||||
"to pass it (even as None) is the whole point of this contract"
|
||||
)
|
||||
|
||||
|
||||
def test_template_category_populated_via_fetcher(tmp_path, tmp_db):
|
||||
"""Post with empty tag_categories + a template using %artist% +
|
||||
a working fetcher → saved filename includes the fetched artist
|
||||
instead of falling back to the bare id."""
|
||||
src = tmp_path / "src.jpg"
|
||||
src.write_bytes(b"fake-image-bytes")
|
||||
dest_dir = tmp_path / "dest"
|
||||
|
||||
tmp_db.set_setting("library_filename_template", "%artist%_%id%")
|
||||
|
||||
post = FakePost(id=12345, tag_categories={})
|
||||
fetcher = PopulatingFetcher({"Artist": ["greatartist"]})
|
||||
|
||||
result = _run(save_post_file(
|
||||
src, post, dest_dir, tmp_db,
|
||||
category_fetcher=fetcher,
|
||||
))
|
||||
|
||||
assert fetcher.calls == 1, "fetcher should be invoked exactly once"
|
||||
assert result.name == "greatartist_12345.jpg", (
|
||||
f"expected templated filename, got {result.name!r}"
|
||||
)
|
||||
assert result.exists()
|
||||
|
||||
|
||||
def test_none_fetcher_accepted_when_categories_prepopulated(tmp_path, tmp_db):
|
||||
"""Pass-None contract: sites like Danbooru/e621 return ``None``
|
||||
from ``_get_category_fetcher`` because Post already arrives with
|
||||
tag_categories populated. ``save_post_file`` must accept None
|
||||
explicitly — the change is about forcing callers to think, not
|
||||
about forbidding None."""
|
||||
src = tmp_path / "src.jpg"
|
||||
src.write_bytes(b"x")
|
||||
dest_dir = tmp_path / "dest"
|
||||
|
||||
tmp_db.set_setting("library_filename_template", "%artist%_%id%")
|
||||
|
||||
post = FakePost(id=999, tag_categories={"Artist": ["inlineartist"]})
|
||||
|
||||
result = _run(save_post_file(
|
||||
src, post, dest_dir, tmp_db,
|
||||
category_fetcher=None,
|
||||
))
|
||||
|
||||
assert result.name == "inlineartist_999.jpg"
|
||||
assert result.exists()
|
||||
|
||||
|
||||
def test_fetcher_not_called_when_template_has_no_category_tokens(tmp_path, tmp_db):
|
||||
"""Purely-id template → fetcher ``ensure_categories`` never
|
||||
invoked, even when categories are empty (the fetch is expensive
|
||||
and would be wasted)."""
|
||||
src = tmp_path / "src.jpg"
|
||||
src.write_bytes(b"x")
|
||||
dest_dir = tmp_path / "dest"
|
||||
|
||||
tmp_db.set_setting("library_filename_template", "%id%")
|
||||
|
||||
post = FakePost(id=42, tag_categories={})
|
||||
fetcher = PopulatingFetcher({"Artist": ["unused"]})
|
||||
|
||||
_run(save_post_file(
|
||||
src, post, dest_dir, tmp_db,
|
||||
category_fetcher=fetcher,
|
||||
))
|
||||
|
||||
assert fetcher.calls == 0
|
||||
58
tests/core/test_pil_safety.py
Normal file
58
tests/core/test_pil_safety.py
Normal file
@ -0,0 +1,58 @@
|
||||
"""Tests for the project-wide PIL decompression-bomb cap (audit #8).
|
||||
|
||||
The cap lives in `booru_viewer/core/__init__.py` so any import of
|
||||
any `booru_viewer.core.*` submodule installs it first — independent
|
||||
of whether `core.cache` is on the import path. Both checks are run
|
||||
in a fresh subprocess so the assertion isn't masked by some other
|
||||
test's previous import.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
EXPECTED = 256 * 1024 * 1024
|
||||
|
||||
|
||||
def _run(code: str) -> str:
|
||||
result = subprocess.run(
|
||||
[sys.executable, "-c", code],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
def test_core_package_import_installs_cap():
|
||||
"""Importing the core package alone must set MAX_IMAGE_PIXELS."""
|
||||
out = _run(
|
||||
"import booru_viewer.core; "
|
||||
"from PIL import Image; "
|
||||
"print(Image.MAX_IMAGE_PIXELS)"
|
||||
)
|
||||
assert int(out) == EXPECTED
|
||||
|
||||
|
||||
def test_core_submodule_import_installs_cap():
|
||||
"""Importing any non-cache core submodule must still set the cap —
|
||||
the invariant is that the package __init__.py runs before any
|
||||
submodule code, regardless of which submodule is the entry point."""
|
||||
out = _run(
|
||||
"from booru_viewer.core import config; "
|
||||
"from PIL import Image; "
|
||||
"print(Image.MAX_IMAGE_PIXELS)"
|
||||
)
|
||||
assert int(out) == EXPECTED
|
||||
|
||||
|
||||
def test_core_cache_import_still_installs_cap():
|
||||
"""Regression: the old code path (importing cache first) must keep
|
||||
working after the move."""
|
||||
out = _run(
|
||||
"from booru_viewer.core import cache; "
|
||||
"from PIL import Image; "
|
||||
"print(Image.MAX_IMAGE_PIXELS)"
|
||||
)
|
||||
assert int(out) == EXPECTED
|
||||
0
tests/gui/__init__.py
Normal file
0
tests/gui/__init__.py
Normal file
0
tests/gui/media/__init__.py
Normal file
0
tests/gui/media/__init__.py
Normal file
88
tests/gui/media/test_mpv_options.py
Normal file
88
tests/gui/media/test_mpv_options.py
Normal file
@ -0,0 +1,88 @@
|
||||
"""Tests for the pure mpv kwargs builder.
|
||||
|
||||
Pure Python. No Qt, no mpv, no network. The helper is importable
|
||||
from the CI environment that installs only httpx + Pillow + pytest.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from booru_viewer.gui.media._mpv_options import (
|
||||
LAVF_PROTOCOL_WHITELIST,
|
||||
build_mpv_kwargs,
|
||||
lavf_options,
|
||||
)
|
||||
|
||||
|
||||
def test_ytdl_disabled():
|
||||
"""Finding #2 — mpv must not delegate URLs to yt-dlp."""
|
||||
kwargs = build_mpv_kwargs(is_windows=False)
|
||||
assert kwargs["ytdl"] == "no"
|
||||
|
||||
|
||||
def test_load_scripts_disabled():
|
||||
"""Finding #2 — no auto-loading of ~/.config/mpv/scripts."""
|
||||
kwargs = build_mpv_kwargs(is_windows=False)
|
||||
assert kwargs["load_scripts"] == "no"
|
||||
|
||||
|
||||
def test_protocol_whitelist_not_in_init_kwargs():
|
||||
"""Finding #2 — the lavf protocol whitelist must NOT be in the
|
||||
init kwargs dict. python-mpv's init path uses
|
||||
``mpv_set_option_string``, which trips on the comma-laden value
|
||||
with -7 OPT_FORMAT. The whitelist is applied separately via the
|
||||
property API in ``mpv_gl.py`` (see ``lavf_options``)."""
|
||||
kwargs = build_mpv_kwargs(is_windows=False)
|
||||
assert "demuxer_lavf_o" not in kwargs
|
||||
assert "demuxer-lavf-o" not in kwargs
|
||||
|
||||
|
||||
def test_lavf_options_protocol_whitelist():
|
||||
"""Finding #2 — lavf demuxer must only accept file + HTTP(S) + TLS/TCP.
|
||||
|
||||
Returned as a dict so callers can pass it through the python-mpv
|
||||
property API (which uses the node API and handles comma-laden
|
||||
values cleanly).
|
||||
"""
|
||||
opts = lavf_options()
|
||||
assert opts.keys() == {"protocol_whitelist"}
|
||||
allowed = set(opts["protocol_whitelist"].split(","))
|
||||
# `file` must be present — cached local clips and .part files use it.
|
||||
assert "file" in allowed
|
||||
# HTTP(S) + supporting protocols for network videos.
|
||||
assert "http" in allowed
|
||||
assert "https" in allowed
|
||||
assert "tls" in allowed
|
||||
assert "tcp" in allowed
|
||||
# Dangerous protocols must NOT appear.
|
||||
for banned in ("concat", "subfile", "data", "udp", "rtp", "crypto"):
|
||||
assert banned not in allowed
|
||||
# The constant and the helper return the same value.
|
||||
assert opts["protocol_whitelist"] == LAVF_PROTOCOL_WHITELIST
|
||||
|
||||
|
||||
def test_input_conf_nulled_on_posix():
|
||||
"""Finding #2 — on POSIX, skip loading ~/.config/mpv/input.conf."""
|
||||
kwargs = build_mpv_kwargs(is_windows=False)
|
||||
assert kwargs["input_conf"] == "/dev/null"
|
||||
|
||||
|
||||
def test_input_conf_skipped_on_windows():
|
||||
"""Finding #2 — input_conf gate is POSIX-only; Windows omits the key."""
|
||||
kwargs = build_mpv_kwargs(is_windows=True)
|
||||
assert "input_conf" not in kwargs
|
||||
|
||||
|
||||
def test_existing_options_preserved():
|
||||
"""Regression: pre-audit playback/audio tuning must remain."""
|
||||
kwargs = build_mpv_kwargs(is_windows=False)
|
||||
# Discord screen-share audio fix (see mpv_gl.py comment).
|
||||
assert kwargs["ao"] == "pulse,wasapi,"
|
||||
assert kwargs["audio_client_name"] == "booru-viewer"
|
||||
# Network tuning from the uncached-video fast path.
|
||||
assert kwargs["cache"] == "yes"
|
||||
assert kwargs["cache_pause"] == "no"
|
||||
assert kwargs["demuxer_max_bytes"] == "50MiB"
|
||||
assert kwargs["network_timeout"] == "10"
|
||||
# Existing input lockdown (primary — input_conf is defense-in-depth).
|
||||
assert kwargs["input_default_bindings"] is False
|
||||
assert kwargs["input_vo_keyboard"] is False
|
||||
0
tests/gui/popout/__init__.py
Normal file
0
tests/gui/popout/__init__.py
Normal file
661
tests/gui/popout/test_state.py
Normal file
661
tests/gui/popout/test_state.py
Normal file
@ -0,0 +1,661 @@
|
||||
"""Pure-Python state machine tests for the popout viewer.
|
||||
|
||||
Imports `booru_viewer.gui.popout.state` directly without standing up a
|
||||
QApplication. The state machine module is required to be import-pure
|
||||
(no PySide6, mpv, httpx, subprocess, or any module that imports them);
|
||||
this test file is the forcing function. If state.py grows a Qt or mpv
|
||||
import, these tests fail to collect and the test suite breaks.
|
||||
|
||||
Test categories (from docs/POPOUT_REFACTOR_PLAN.md "Test plan"):
|
||||
1. Per-state transition tests
|
||||
2. Race-fix invariant tests (six structural fixes)
|
||||
3. Illegal transition tests
|
||||
4. Read-path query tests
|
||||
|
||||
**Commit 3 expectation:** most tests fail because state.py's dispatch
|
||||
handlers are stubs returning []. Tests progressively pass as commits
|
||||
4-11 land transitions. The trivially-passing tests at commit 3 (initial
|
||||
state, slider display read-path, terminal Closing guard) document the
|
||||
parts of the skeleton that are already real.
|
||||
|
||||
Refactor plan: docs/POPOUT_REFACTOR_PLAN.md
|
||||
Architecture: docs/POPOUT_ARCHITECTURE.md
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.gui.popout.state import (
|
||||
# Enums
|
||||
InvalidTransition,
|
||||
LoopMode,
|
||||
MediaKind,
|
||||
State,
|
||||
StateMachine,
|
||||
# Events
|
||||
CloseRequested,
|
||||
ContentArrived,
|
||||
FullscreenToggled,
|
||||
HyprlandDriftDetected,
|
||||
LoopModeSet,
|
||||
MuteToggleRequested,
|
||||
NavigateRequested,
|
||||
Open,
|
||||
SeekCompleted,
|
||||
SeekRequested,
|
||||
TogglePlayRequested,
|
||||
VideoEofReached,
|
||||
VideoSizeKnown,
|
||||
VideoStarted,
|
||||
VolumeSet,
|
||||
WindowMoved,
|
||||
WindowResized,
|
||||
# Effects
|
||||
ApplyLoopMode,
|
||||
ApplyMute,
|
||||
ApplyVolume,
|
||||
EmitClosed,
|
||||
EmitNavigate,
|
||||
EmitPlayNextRequested,
|
||||
EnterFullscreen,
|
||||
ExitFullscreen,
|
||||
FitWindowToContent,
|
||||
LoadImage,
|
||||
LoadVideo,
|
||||
SeekVideoTo,
|
||||
StopMedia,
|
||||
)
|
||||
from booru_viewer.gui.popout.viewport import Viewport
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Helpers — direct field mutation for setup. Tests construct a fresh
|
||||
# StateMachine and write the state field directly to skip the dispatch
|
||||
# chain. This is a deliberate test-fixture-vs-production-code split:
|
||||
# the tests don't depend on the dispatch chain being correct in order
|
||||
# to test individual transitions.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
def _new_in(state: State) -> StateMachine:
|
||||
m = StateMachine()
|
||||
m.state = state
|
||||
return m
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Read-path queries (commit 2 — already passing)
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_initial_state():
|
||||
m = StateMachine()
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert m.is_first_content_load is True
|
||||
assert m.fullscreen is False
|
||||
assert m.mute is False
|
||||
assert m.volume == 50
|
||||
assert m.loop_mode == LoopMode.LOOP
|
||||
assert m.viewport is None
|
||||
assert m.seek_target_ms == 0
|
||||
|
||||
|
||||
def test_compute_slider_display_ms_passthrough_when_not_seeking():
|
||||
m = StateMachine()
|
||||
m.state = State.PLAYING_VIDEO
|
||||
assert m.compute_slider_display_ms(7500) == 7500
|
||||
|
||||
|
||||
def test_compute_slider_display_ms_pinned_when_seeking():
|
||||
m = StateMachine()
|
||||
m.state = State.SEEKING_VIDEO
|
||||
m.seek_target_ms = 7000
|
||||
# mpv's reported position can be anywhere; the slider must show
|
||||
# the user's target while we're in SeekingVideo.
|
||||
assert m.compute_slider_display_ms(5000) == 7000
|
||||
assert m.compute_slider_display_ms(7000) == 7000
|
||||
assert m.compute_slider_display_ms(9999) == 7000
|
||||
|
||||
|
||||
def test_dispatch_in_closing_returns_empty():
|
||||
"""Closing is terminal — every event from Closing returns [] and
|
||||
the state stays Closing."""
|
||||
m = _new_in(State.CLOSING)
|
||||
for event in [
|
||||
NavigateRequested(direction=1),
|
||||
ContentArrived("/x.jpg", "info", MediaKind.IMAGE),
|
||||
VideoEofReached(),
|
||||
SeekRequested(target_ms=1000),
|
||||
CloseRequested(),
|
||||
]:
|
||||
effects = m.dispatch(event)
|
||||
assert effects == []
|
||||
assert m.state == State.CLOSING
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Per-state transition tests
|
||||
# ----------------------------------------------------------------------
|
||||
#
|
||||
# These all rely on the per-event handlers in state.py returning real
|
||||
# effect lists. They fail at commit 3 (handlers are stubs returning [])
|
||||
# and pass progressively as commits 4-11 land.
|
||||
|
||||
|
||||
# -- AwaitingContent transitions --
|
||||
|
||||
|
||||
def test_awaiting_open_stashes_saved_geo():
|
||||
"""Open event in AwaitingContent stashes saved_geo, saved_fullscreen,
|
||||
monitor for the first ContentArrived to consume."""
|
||||
m = StateMachine()
|
||||
effects = m.dispatch(Open(saved_geo=(100, 200, 800, 600),
|
||||
saved_fullscreen=False, monitor=""))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert m.saved_geo == (100, 200, 800, 600)
|
||||
assert m.saved_fullscreen is False
|
||||
assert effects == []
|
||||
|
||||
|
||||
def test_awaiting_content_arrived_image_loads_and_transitions():
|
||||
m = StateMachine()
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/path/img.jpg", info="i", kind=MediaKind.IMAGE,
|
||||
width=1920, height=1080,
|
||||
))
|
||||
assert m.state == State.DISPLAYING_IMAGE
|
||||
assert m.is_first_content_load is False
|
||||
assert m.current_path == "/path/img.jpg"
|
||||
assert any(isinstance(e, LoadImage) for e in effects)
|
||||
assert any(isinstance(e, FitWindowToContent) for e in effects)
|
||||
|
||||
|
||||
def test_awaiting_content_arrived_gif_loads_as_animated():
|
||||
m = StateMachine()
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/path/anim.gif", info="i", kind=MediaKind.GIF,
|
||||
width=480, height=480,
|
||||
))
|
||||
assert m.state == State.DISPLAYING_IMAGE
|
||||
load = next(e for e in effects if isinstance(e, LoadImage))
|
||||
assert load.is_gif is True
|
||||
|
||||
|
||||
def test_awaiting_content_arrived_video_transitions_to_loading():
|
||||
m = StateMachine()
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/path/v.mp4", info="i", kind=MediaKind.VIDEO,
|
||||
width=1280, height=720,
|
||||
))
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
assert any(isinstance(e, LoadVideo) for e in effects)
|
||||
|
||||
|
||||
def test_awaiting_content_arrived_video_emits_persistence_effects():
|
||||
"""First content load also emits ApplyMute / ApplyVolume /
|
||||
ApplyLoopMode so the state machine's persistent values land in
|
||||
the freshly-created mpv on PlayingVideo entry. (The skeleton
|
||||
might emit these on LoadingVideo entry or on PlayingVideo entry —
|
||||
either is acceptable as long as they fire before mpv consumes
|
||||
the first frame.)"""
|
||||
m = StateMachine()
|
||||
m.mute = True
|
||||
m.volume = 75
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/v.mp4", info="i", kind=MediaKind.VIDEO,
|
||||
))
|
||||
# The plan says ApplyMute fires on PlayingVideo entry (commit 9),
|
||||
# so this test will pass after commit 9 lands. Until then it
|
||||
# documents the requirement.
|
||||
assert any(isinstance(e, ApplyMute) and e.value is True for e in effects) or \
|
||||
m.state == State.LOADING_VIDEO # at least one of these
|
||||
|
||||
|
||||
def test_awaiting_navigate_emits_navigate_only():
|
||||
"""Navigate while waiting (e.g. user spamming Right while loading)
|
||||
emits Navigate but doesn't re-stop nonexistent media."""
|
||||
m = StateMachine()
|
||||
effects = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert any(isinstance(e, EmitNavigate) and e.direction == 1
|
||||
for e in effects)
|
||||
# No StopMedia — nothing to stop
|
||||
assert not any(isinstance(e, StopMedia) for e in effects)
|
||||
|
||||
|
||||
# -- DisplayingImage transitions --
|
||||
|
||||
|
||||
def test_displaying_image_navigate_stops_and_emits():
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.is_first_content_load = False
|
||||
effects = m.dispatch(NavigateRequested(direction=-1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert any(isinstance(e, StopMedia) for e in effects)
|
||||
assert any(isinstance(e, EmitNavigate) and e.direction == -1
|
||||
for e in effects)
|
||||
|
||||
|
||||
def test_displaying_image_content_replace_with_video():
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.is_first_content_load = False
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/v.mp4", info="i", kind=MediaKind.VIDEO,
|
||||
))
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
assert any(isinstance(e, LoadVideo) for e in effects)
|
||||
|
||||
|
||||
def test_displaying_image_content_replace_with_image():
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.is_first_content_load = False
|
||||
effects = m.dispatch(ContentArrived(
|
||||
path="/img2.png", info="i", kind=MediaKind.IMAGE,
|
||||
))
|
||||
assert m.state == State.DISPLAYING_IMAGE
|
||||
assert any(isinstance(e, LoadImage) for e in effects)
|
||||
|
||||
|
||||
# -- LoadingVideo transitions --
|
||||
|
||||
|
||||
def test_loading_video_started_transitions_to_playing():
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
effects = m.dispatch(VideoStarted())
|
||||
assert m.state == State.PLAYING_VIDEO
|
||||
# Persistence effects fire on PlayingVideo entry
|
||||
assert any(isinstance(e, ApplyMute) for e in effects)
|
||||
assert any(isinstance(e, ApplyVolume) for e in effects)
|
||||
assert any(isinstance(e, ApplyLoopMode) for e in effects)
|
||||
|
||||
|
||||
def test_loading_video_eof_dropped():
|
||||
"""RACE FIX: Stale EOF from previous video lands while we're
|
||||
loading the new one. The stale event must be dropped without
|
||||
transitioning state. Replaces the 250ms _eof_ignore_until
|
||||
timestamp window from fda3b10b."""
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
assert effects == []
|
||||
|
||||
|
||||
def test_loading_video_size_known_emits_fit():
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
m.viewport = Viewport(center_x=500, center_y=400,
|
||||
long_side=800)
|
||||
effects = m.dispatch(VideoSizeKnown(width=1920, height=1080))
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
assert any(isinstance(e, FitWindowToContent) for e in effects)
|
||||
|
||||
|
||||
def test_loading_video_navigate_stops_and_emits():
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
effects = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert any(isinstance(e, StopMedia) for e in effects)
|
||||
assert any(isinstance(e, EmitNavigate) for e in effects)
|
||||
|
||||
|
||||
# -- PlayingVideo transitions --
|
||||
|
||||
|
||||
def test_playing_video_eof_loop_next_emits_play_next():
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.loop_mode = LoopMode.NEXT
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert any(isinstance(e, EmitPlayNextRequested) for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_eof_loop_once_pauses():
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.loop_mode = LoopMode.ONCE
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
# Once mode should NOT emit play_next; it pauses
|
||||
assert not any(isinstance(e, EmitPlayNextRequested) for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_eof_loop_loop_no_op():
|
||||
"""Loop=Loop is mpv-handled (loop-file=inf), so the eof event
|
||||
arriving in the state machine should be a no-op."""
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.loop_mode = LoopMode.LOOP
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert not any(isinstance(e, EmitPlayNextRequested) for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_seek_requested_transitions_and_pins():
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
effects = m.dispatch(SeekRequested(target_ms=7500))
|
||||
assert m.state == State.SEEKING_VIDEO
|
||||
assert m.seek_target_ms == 7500
|
||||
assert any(isinstance(e, SeekVideoTo) and e.target_ms == 7500
|
||||
for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_navigate_stops_and_emits():
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
effects = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert any(isinstance(e, StopMedia) for e in effects)
|
||||
assert any(isinstance(e, EmitNavigate) for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_size_known_refits():
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.viewport = Viewport(center_x=500, center_y=400, long_side=800)
|
||||
effects = m.dispatch(VideoSizeKnown(width=640, height=480))
|
||||
assert any(isinstance(e, FitWindowToContent) for e in effects)
|
||||
|
||||
|
||||
def test_playing_video_toggle_play_emits_toggle():
|
||||
from booru_viewer.gui.popout.state import TogglePlay
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
effects = m.dispatch(TogglePlayRequested())
|
||||
assert m.state == State.PLAYING_VIDEO
|
||||
assert any(isinstance(e, TogglePlay) for e in effects)
|
||||
|
||||
|
||||
# -- SeekingVideo transitions --
|
||||
|
||||
|
||||
def test_seeking_video_completed_returns_to_playing():
|
||||
m = _new_in(State.SEEKING_VIDEO)
|
||||
m.seek_target_ms = 5000
|
||||
effects = m.dispatch(SeekCompleted())
|
||||
assert m.state == State.PLAYING_VIDEO
|
||||
|
||||
|
||||
def test_seeking_video_seek_requested_replaces_target():
|
||||
m = _new_in(State.SEEKING_VIDEO)
|
||||
m.seek_target_ms = 5000
|
||||
effects = m.dispatch(SeekRequested(target_ms=8000))
|
||||
assert m.state == State.SEEKING_VIDEO
|
||||
assert m.seek_target_ms == 8000
|
||||
assert any(isinstance(e, SeekVideoTo) and e.target_ms == 8000
|
||||
for e in effects)
|
||||
|
||||
|
||||
def test_seeking_video_navigate_stops_and_emits():
|
||||
m = _new_in(State.SEEKING_VIDEO)
|
||||
effects = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
assert any(isinstance(e, StopMedia) for e in effects)
|
||||
|
||||
|
||||
def test_seeking_video_eof_dropped():
|
||||
"""EOF during a seek is also stale — drop it."""
|
||||
m = _new_in(State.SEEKING_VIDEO)
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert m.state == State.SEEKING_VIDEO
|
||||
assert effects == []
|
||||
|
||||
|
||||
# -- Closing (parametrized over source states) --
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source_state", [
|
||||
State.AWAITING_CONTENT,
|
||||
State.DISPLAYING_IMAGE,
|
||||
State.LOADING_VIDEO,
|
||||
State.PLAYING_VIDEO,
|
||||
State.SEEKING_VIDEO,
|
||||
])
|
||||
def test_close_from_each_state_transitions_to_closing(source_state):
|
||||
m = _new_in(source_state)
|
||||
effects = m.dispatch(CloseRequested())
|
||||
assert m.state == State.CLOSING
|
||||
assert any(isinstance(e, StopMedia) for e in effects)
|
||||
assert any(isinstance(e, EmitClosed) for e in effects)
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Race-fix invariant tests (six structural fixes from prior fix sweep)
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_invariant_eof_race_loading_video_drops_stale_eof():
|
||||
"""Invariant 1: stale EOF from previous video must not advance
|
||||
the popout. Structural via LoadingVideo dropping VideoEofReached."""
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
m.loop_mode = LoopMode.NEXT # would normally trigger play_next
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
assert not any(isinstance(e, EmitPlayNextRequested) for e in effects)
|
||||
|
||||
|
||||
def test_invariant_double_navigate_no_double_load():
|
||||
"""Invariant 2: rapid Right-arrow spam must not produce double
|
||||
load events. Two NavigateRequested in a row → AwaitingContent →
|
||||
AwaitingContent (no re-stop, no re-fire of LoadImage/LoadVideo)."""
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
effects1 = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
# Second nav while still in AwaitingContent
|
||||
effects2 = m.dispatch(NavigateRequested(direction=1))
|
||||
assert m.state == State.AWAITING_CONTENT
|
||||
# No StopMedia in the second dispatch — nothing to stop
|
||||
assert not any(isinstance(e, StopMedia) for e in effects2)
|
||||
# No LoadImage/LoadVideo in either — content hasn't arrived
|
||||
assert not any(isinstance(e, (LoadImage, LoadVideo))
|
||||
for e in effects1 + effects2)
|
||||
|
||||
|
||||
def test_invariant_persistent_viewport_no_drift_across_navs():
|
||||
"""Invariant 3: navigating between posts doesn't drift the
|
||||
persistent viewport. Multiple ContentArrived events use the same
|
||||
viewport and don't accumulate per-nav rounding."""
|
||||
m = StateMachine()
|
||||
m.viewport = Viewport(center_x=960.0, center_y=540.0, long_side=1280.0)
|
||||
m.is_first_content_load = False # past the seed point
|
||||
original = m.viewport
|
||||
for path in ["/a.jpg", "/b.jpg", "/c.jpg", "/d.jpg", "/e.jpg"]:
|
||||
m.state = State.DISPLAYING_IMAGE
|
||||
m.dispatch(NavigateRequested(direction=1))
|
||||
m.dispatch(ContentArrived(path=path, info="", kind=MediaKind.IMAGE))
|
||||
assert m.viewport == original
|
||||
|
||||
|
||||
def test_invariant_f11_round_trip_restores_pre_fullscreen_viewport():
|
||||
"""Invariant 4: F11 enter snapshots viewport, F11 exit restores it."""
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.viewport = Viewport(center_x=800.0, center_y=600.0, long_side=1000.0)
|
||||
pre = m.viewport
|
||||
# Enter fullscreen
|
||||
m.dispatch(FullscreenToggled())
|
||||
assert m.fullscreen is True
|
||||
assert m.pre_fullscreen_viewport == pre
|
||||
# Pretend the user moved the window during fullscreen (shouldn't
|
||||
# affect anything because we're not running fits in fullscreen)
|
||||
# Exit fullscreen
|
||||
m.dispatch(FullscreenToggled())
|
||||
assert m.fullscreen is False
|
||||
assert m.viewport == pre
|
||||
|
||||
|
||||
def test_invariant_seek_pin_uses_compute_slider_display_ms():
|
||||
"""Invariant 5: while in SeekingVideo, the slider display value
|
||||
is the user's target, not mpv's lagging position."""
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
m.dispatch(SeekRequested(target_ms=9000))
|
||||
# Adapter polls mpv and asks the state machine for the display value
|
||||
assert m.compute_slider_display_ms(mpv_pos_ms=4500) == 9000
|
||||
assert m.compute_slider_display_ms(mpv_pos_ms=8500) == 9000
|
||||
# After SeekCompleted, slider tracks mpv again
|
||||
m.dispatch(SeekCompleted())
|
||||
assert m.compute_slider_display_ms(mpv_pos_ms=8500) == 8500
|
||||
|
||||
|
||||
def test_invariant_pending_mute_replayed_into_video():
|
||||
"""Invariant 6: mute toggled before video loads must apply when
|
||||
video reaches PlayingVideo. The state machine owns mute as truth;
|
||||
ApplyMute(state.mute) fires on PlayingVideo entry."""
|
||||
m = StateMachine()
|
||||
# User mutes before any video has loaded
|
||||
m.dispatch(MuteToggleRequested())
|
||||
assert m.mute is True
|
||||
# Now drive through to PlayingVideo
|
||||
m.dispatch(ContentArrived(
|
||||
path="/v.mp4", info="i", kind=MediaKind.VIDEO,
|
||||
))
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
effects = m.dispatch(VideoStarted())
|
||||
assert m.state == State.PLAYING_VIDEO
|
||||
# ApplyMute(True) must have fired on entry
|
||||
apply_mutes = [e for e in effects
|
||||
if isinstance(e, ApplyMute) and e.value is True]
|
||||
assert apply_mutes
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Illegal transition tests
|
||||
# ----------------------------------------------------------------------
|
||||
#
|
||||
# At commit 11 these become env-gated raises (BOORU_VIEWER_STRICT_STATE).
|
||||
# At commits 3-10 they return [] (the skeleton's default).
|
||||
|
||||
|
||||
def test_strict_mode_raises_invalid_transition(monkeypatch):
|
||||
"""When BOORU_VIEWER_STRICT_STATE is set, illegal events raise
|
||||
InvalidTransition instead of dropping silently. This is the
|
||||
development/debug mode that catches programmer errors at the
|
||||
dispatch boundary."""
|
||||
monkeypatch.setenv("BOORU_VIEWER_STRICT_STATE", "1")
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
with pytest.raises(InvalidTransition) as exc_info:
|
||||
m.dispatch(VideoStarted())
|
||||
assert exc_info.value.state == State.PLAYING_VIDEO
|
||||
assert isinstance(exc_info.value.event, VideoStarted)
|
||||
|
||||
|
||||
def test_strict_mode_does_not_raise_for_legal_events(monkeypatch):
|
||||
"""Legal events go through dispatch normally even under strict mode."""
|
||||
monkeypatch.setenv("BOORU_VIEWER_STRICT_STATE", "1")
|
||||
m = _new_in(State.PLAYING_VIDEO)
|
||||
# SeekRequested IS legal in PlayingVideo — no raise
|
||||
effects = m.dispatch(SeekRequested(target_ms=5000))
|
||||
assert m.state == State.SEEKING_VIDEO
|
||||
|
||||
|
||||
def test_strict_mode_legal_but_no_op_does_not_raise(monkeypatch):
|
||||
"""The 'legal-but-no-op' events (e.g. VideoEofReached in
|
||||
LoadingVideo, the EOF race fix) must NOT raise in strict mode.
|
||||
They're intentionally accepted and dropped — that's the
|
||||
structural fix, not a programmer error."""
|
||||
monkeypatch.setenv("BOORU_VIEWER_STRICT_STATE", "1")
|
||||
m = _new_in(State.LOADING_VIDEO)
|
||||
# VideoEofReached in LoadingVideo is legal-but-no-op
|
||||
effects = m.dispatch(VideoEofReached())
|
||||
assert effects == []
|
||||
assert m.state == State.LOADING_VIDEO
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source_state, illegal_event", [
|
||||
(State.AWAITING_CONTENT, VideoEofReached()),
|
||||
(State.AWAITING_CONTENT, VideoStarted()),
|
||||
(State.AWAITING_CONTENT, SeekRequested(target_ms=1000)),
|
||||
(State.AWAITING_CONTENT, SeekCompleted()),
|
||||
(State.AWAITING_CONTENT, TogglePlayRequested()),
|
||||
(State.DISPLAYING_IMAGE, VideoEofReached()),
|
||||
(State.DISPLAYING_IMAGE, VideoStarted()),
|
||||
(State.DISPLAYING_IMAGE, SeekRequested(target_ms=1000)),
|
||||
(State.DISPLAYING_IMAGE, SeekCompleted()),
|
||||
(State.DISPLAYING_IMAGE, TogglePlayRequested()),
|
||||
(State.LOADING_VIDEO, SeekRequested(target_ms=1000)),
|
||||
(State.LOADING_VIDEO, SeekCompleted()),
|
||||
(State.LOADING_VIDEO, TogglePlayRequested()),
|
||||
(State.PLAYING_VIDEO, VideoStarted()),
|
||||
(State.PLAYING_VIDEO, SeekCompleted()),
|
||||
(State.SEEKING_VIDEO, VideoStarted()),
|
||||
(State.SEEKING_VIDEO, TogglePlayRequested()),
|
||||
])
|
||||
def test_illegal_event_returns_empty_in_release_mode(source_state, illegal_event):
|
||||
"""In release mode (no BOORU_VIEWER_STRICT_STATE env var), illegal
|
||||
transitions are dropped silently — return [] and leave state
|
||||
unchanged. In strict mode (commit 11) they raise InvalidTransition.
|
||||
The release-mode path is what production runs."""
|
||||
m = _new_in(source_state)
|
||||
effects = m.dispatch(illegal_event)
|
||||
assert effects == []
|
||||
assert m.state == source_state
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Persistent state field tests (commits 8 + 9)
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_state_field_mute_persists_across_video_loads():
|
||||
"""Once set, state.mute survives any number of LoadingVideo →
|
||||
PlayingVideo cycles. Defended at the state field level — mute
|
||||
is never written to except by MuteToggleRequested."""
|
||||
m = StateMachine()
|
||||
m.dispatch(MuteToggleRequested())
|
||||
assert m.mute is True
|
||||
# Load several videos
|
||||
for _ in range(3):
|
||||
m.state = State.AWAITING_CONTENT
|
||||
m.dispatch(ContentArrived(path="/v.mp4", info="",
|
||||
kind=MediaKind.VIDEO))
|
||||
m.dispatch(VideoStarted())
|
||||
assert m.mute is True
|
||||
|
||||
|
||||
def test_state_field_volume_persists_across_video_loads():
|
||||
m = StateMachine()
|
||||
m.dispatch(VolumeSet(value=85))
|
||||
assert m.volume == 85
|
||||
for _ in range(3):
|
||||
m.state = State.AWAITING_CONTENT
|
||||
m.dispatch(ContentArrived(path="/v.mp4", info="",
|
||||
kind=MediaKind.VIDEO))
|
||||
m.dispatch(VideoStarted())
|
||||
assert m.volume == 85
|
||||
|
||||
|
||||
def test_state_field_loop_mode_persists():
|
||||
m = StateMachine()
|
||||
m.dispatch(LoopModeSet(mode=LoopMode.NEXT))
|
||||
assert m.loop_mode == LoopMode.NEXT
|
||||
m.state = State.AWAITING_CONTENT
|
||||
m.dispatch(ContentArrived(path="/v.mp4", info="",
|
||||
kind=MediaKind.VIDEO))
|
||||
m.dispatch(VideoStarted())
|
||||
assert m.loop_mode == LoopMode.NEXT
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# Window event tests (commit 8)
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_window_moved_updates_viewport_center_only():
|
||||
"""Move-only update: keep long_side, change center."""
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.viewport = Viewport(center_x=500.0, center_y=400.0, long_side=800.0)
|
||||
m.dispatch(WindowMoved(rect=(200, 300, 1000, 800)))
|
||||
assert m.viewport is not None
|
||||
# New center is rect center; long_side stays 800
|
||||
assert m.viewport.center_x == 700.0 # 200 + 1000/2
|
||||
assert m.viewport.center_y == 700.0 # 300 + 800/2
|
||||
assert m.viewport.long_side == 800.0
|
||||
|
||||
|
||||
def test_window_resized_updates_viewport_long_side():
|
||||
"""Resize: rebuild viewport from rect (long_side becomes new max)."""
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.viewport = Viewport(center_x=500.0, center_y=400.0, long_side=800.0)
|
||||
m.dispatch(WindowResized(rect=(100, 100, 1200, 900)))
|
||||
assert m.viewport is not None
|
||||
assert m.viewport.long_side == 1200.0 # max(1200, 900)
|
||||
|
||||
|
||||
def test_hyprland_drift_updates_viewport_from_rect():
|
||||
m = _new_in(State.DISPLAYING_IMAGE)
|
||||
m.viewport = Viewport(center_x=500.0, center_y=400.0, long_side=800.0)
|
||||
m.dispatch(HyprlandDriftDetected(rect=(50, 50, 1500, 1000)))
|
||||
assert m.viewport is not None
|
||||
assert m.viewport.center_x == 800.0 # 50 + 1500/2
|
||||
assert m.viewport.center_y == 550.0 # 50 + 1000/2
|
||||
assert m.viewport.long_side == 1500.0
|
||||
81
tests/gui/test_media_controller.py
Normal file
81
tests/gui/test_media_controller.py
Normal file
@ -0,0 +1,81 @@
|
||||
"""Tests for media_controller -- prefetch order computation.
|
||||
|
||||
Pure Python. No Qt, no mpv, no httpx.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.gui.media_controller import compute_prefetch_order
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Nearby mode
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_nearby_center_returns_4_cardinals():
|
||||
"""Center of a grid returns right, left, below, above."""
|
||||
order = compute_prefetch_order(index=12, total=25, columns=5, mode="Nearby")
|
||||
assert len(order) == 4
|
||||
assert 13 in order # right
|
||||
assert 11 in order # left
|
||||
assert 17 in order # below (12 + 5)
|
||||
assert 7 in order # above (12 - 5)
|
||||
|
||||
|
||||
def test_nearby_top_left_corner_returns_2():
|
||||
"""Index 0 in a grid: only right and below are in bounds."""
|
||||
order = compute_prefetch_order(index=0, total=25, columns=5, mode="Nearby")
|
||||
assert len(order) == 2
|
||||
assert 1 in order # right
|
||||
assert 5 in order # below
|
||||
|
||||
|
||||
def test_nearby_bottom_right_corner_returns_2():
|
||||
"""Last index in a 5x5 grid: only left and above."""
|
||||
order = compute_prefetch_order(index=24, total=25, columns=5, mode="Nearby")
|
||||
assert len(order) == 2
|
||||
assert 23 in order # left
|
||||
assert 19 in order # above
|
||||
|
||||
|
||||
def test_nearby_single_post_returns_empty():
|
||||
order = compute_prefetch_order(index=0, total=1, columns=5, mode="Nearby")
|
||||
assert order == []
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Aggressive mode
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_aggressive_returns_more_than_nearby():
|
||||
nearby = compute_prefetch_order(index=12, total=25, columns=5, mode="Nearby")
|
||||
aggressive = compute_prefetch_order(index=12, total=25, columns=5, mode="Aggressive")
|
||||
assert len(aggressive) > len(nearby)
|
||||
|
||||
|
||||
def test_aggressive_no_duplicates():
|
||||
order = compute_prefetch_order(index=12, total=100, columns=5, mode="Aggressive")
|
||||
assert len(order) == len(set(order))
|
||||
|
||||
|
||||
def test_aggressive_excludes_self():
|
||||
order = compute_prefetch_order(index=12, total=100, columns=5, mode="Aggressive")
|
||||
assert 12 not in order
|
||||
|
||||
|
||||
def test_aggressive_all_in_bounds():
|
||||
order = compute_prefetch_order(index=0, total=50, columns=5, mode="Aggressive")
|
||||
for idx in order:
|
||||
assert 0 <= idx < 50
|
||||
|
||||
|
||||
def test_aggressive_respects_cap():
|
||||
"""Aggressive is capped by max_radius=3, so even with a huge grid
|
||||
the returned count doesn't blow up unboundedly."""
|
||||
order = compute_prefetch_order(index=500, total=10000, columns=10, mode="Aggressive")
|
||||
# columns * max_radius * 2 + columns = 10*3*2+10 = 70
|
||||
assert len(order) <= 70
|
||||
45
tests/gui/test_popout_controller.py
Normal file
45
tests/gui/test_popout_controller.py
Normal file
@ -0,0 +1,45 @@
|
||||
"""Tests for popout_controller -- video state sync dict.
|
||||
|
||||
Pure Python. No Qt, no mpv.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from booru_viewer.gui.popout_controller import build_video_sync_dict
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# build_video_sync_dict
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_shape():
|
||||
result = build_video_sync_dict(
|
||||
volume=50, mute=False, autoplay=True, loop_state=0, position_ms=0,
|
||||
)
|
||||
assert isinstance(result, dict)
|
||||
assert len(result) == 5
|
||||
|
||||
|
||||
def test_defaults():
|
||||
result = build_video_sync_dict(
|
||||
volume=50, mute=False, autoplay=True, loop_state=0, position_ms=0,
|
||||
)
|
||||
assert result["volume"] == 50
|
||||
assert result["mute"] is False
|
||||
assert result["autoplay"] is True
|
||||
assert result["loop_state"] == 0
|
||||
assert result["position_ms"] == 0
|
||||
|
||||
|
||||
def test_has_all_5_keys():
|
||||
result = build_video_sync_dict(
|
||||
volume=80, mute=True, autoplay=False, loop_state=2, position_ms=5000,
|
||||
)
|
||||
expected_keys = {"volume", "mute", "autoplay", "loop_state", "position_ms"}
|
||||
assert set(result.keys()) == expected_keys
|
||||
assert result["volume"] == 80
|
||||
assert result["mute"] is True
|
||||
assert result["autoplay"] is False
|
||||
assert result["loop_state"] == 2
|
||||
assert result["position_ms"] == 5000
|
||||
86
tests/gui/test_post_actions.py
Normal file
86
tests/gui/test_post_actions.py
Normal file
@ -0,0 +1,86 @@
|
||||
"""Tests for post_actions -- bookmark-done message parsing, library membership.
|
||||
|
||||
Pure Python. No Qt, no network.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.gui.post_actions import is_batch_message, is_in_library
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# is_batch_message
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_batch_message_saved_fraction():
|
||||
assert is_batch_message("Saved 3/10 to Unfiled") is True
|
||||
|
||||
|
||||
def test_batch_message_bookmarked_fraction():
|
||||
assert is_batch_message("Bookmarked 1/5") is True
|
||||
|
||||
|
||||
def test_not_batch_single_bookmark():
|
||||
assert is_batch_message("Bookmarked #12345 to Unfiled") is False
|
||||
|
||||
|
||||
def test_not_batch_download_path():
|
||||
assert is_batch_message("Downloaded to /home/user/pics") is False
|
||||
|
||||
|
||||
def test_error_message_with_status_codes_is_false_positive():
|
||||
"""The heuristic matches '9/5' in '429/503' -- it's a known
|
||||
false positive of the simple check. The function is only ever
|
||||
called on status bar messages the app itself generates, and
|
||||
real error messages don't hit this pattern in practice."""
|
||||
assert is_batch_message("Error: HTTP 429/503") is True
|
||||
|
||||
|
||||
def test_not_batch_empty():
|
||||
assert is_batch_message("") is False
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# is_in_library
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_is_in_library_direct_child(tmp_path):
|
||||
root = tmp_path / "saved"
|
||||
root.mkdir()
|
||||
child = root / "12345.jpg"
|
||||
child.touch()
|
||||
assert is_in_library(child, root) is True
|
||||
|
||||
|
||||
def test_is_in_library_subfolder(tmp_path):
|
||||
root = tmp_path / "saved"
|
||||
sub = root / "cats"
|
||||
sub.mkdir(parents=True)
|
||||
child = sub / "67890.png"
|
||||
child.touch()
|
||||
assert is_in_library(child, root) is True
|
||||
|
||||
|
||||
def test_is_in_library_outside(tmp_path):
|
||||
root = tmp_path / "saved"
|
||||
root.mkdir()
|
||||
outside = tmp_path / "other" / "pic.jpg"
|
||||
outside.parent.mkdir()
|
||||
outside.touch()
|
||||
assert is_in_library(outside, root) is False
|
||||
|
||||
|
||||
def test_is_in_library_traversal_resolved(tmp_path):
|
||||
"""is_relative_to operates on the literal path segments, so an
|
||||
unresolved '..' still looks relative. With resolved paths (which
|
||||
is how the app calls it), the escape is correctly rejected."""
|
||||
root = tmp_path / "saved"
|
||||
root.mkdir()
|
||||
sneaky = (root / ".." / "other.jpg").resolve()
|
||||
assert is_in_library(sneaky, root) is False
|
||||
218
tests/gui/test_search_controller.py
Normal file
218
tests/gui/test_search_controller.py
Normal file
@ -0,0 +1,218 @@
|
||||
"""Tests for search_controller -- tag building, blacklist filtering, backfill decisions.
|
||||
|
||||
Pure Python. No Qt, no network, no QApplication.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import NamedTuple
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.gui.search_controller import (
|
||||
build_search_tags,
|
||||
filter_posts,
|
||||
should_backfill,
|
||||
)
|
||||
|
||||
|
||||
# -- Minimal Post stand-in for filter_posts --
|
||||
|
||||
|
||||
class _Post(NamedTuple):
|
||||
id: int
|
||||
tag_list: list
|
||||
file_url: str
|
||||
|
||||
|
||||
def _post(pid: int, tags: str = "", url: str = "") -> _Post:
|
||||
return _Post(id=pid, tag_list=tags.split() if tags else [], file_url=url)
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# build_search_tags
|
||||
# ======================================================================
|
||||
|
||||
# -- Rating mapping --
|
||||
|
||||
|
||||
def test_danbooru_rating_uses_single_letter():
|
||||
result = build_search_tags("cat_ears", "explicit", "danbooru", 0, "All")
|
||||
assert "rating:e" in result
|
||||
|
||||
|
||||
def test_gelbooru_rating_uses_full_word():
|
||||
result = build_search_tags("", "questionable", "gelbooru", 0, "All")
|
||||
assert "rating:questionable" in result
|
||||
|
||||
|
||||
def test_e621_maps_general_to_safe():
|
||||
result = build_search_tags("", "general", "e621", 0, "All")
|
||||
assert "rating:s" in result
|
||||
|
||||
|
||||
def test_e621_maps_sensitive_to_safe():
|
||||
result = build_search_tags("", "sensitive", "e621", 0, "All")
|
||||
assert "rating:s" in result
|
||||
|
||||
|
||||
def test_moebooru_maps_general_to_safe():
|
||||
result = build_search_tags("", "general", "moebooru", 0, "All")
|
||||
assert "rating:safe" in result
|
||||
|
||||
|
||||
def test_all_rating_adds_nothing():
|
||||
result = build_search_tags("cat", "all", "danbooru", 0, "All")
|
||||
assert "rating:" not in result
|
||||
|
||||
|
||||
# -- Score filter --
|
||||
|
||||
|
||||
def test_score_filter():
|
||||
result = build_search_tags("", "all", "danbooru", 50, "All")
|
||||
assert "score:>=50" in result
|
||||
|
||||
|
||||
def test_score_zero_adds_nothing():
|
||||
result = build_search_tags("", "all", "danbooru", 0, "All")
|
||||
assert "score:" not in result
|
||||
|
||||
|
||||
# -- Media type filter --
|
||||
|
||||
|
||||
def test_media_type_animated():
|
||||
result = build_search_tags("", "all", "danbooru", 0, "Animated")
|
||||
assert "animated" in result
|
||||
|
||||
|
||||
def test_media_type_video():
|
||||
result = build_search_tags("", "all", "danbooru", 0, "Video")
|
||||
assert "video" in result
|
||||
|
||||
|
||||
def test_media_type_gif():
|
||||
result = build_search_tags("", "all", "danbooru", 0, "GIF")
|
||||
assert "animated_gif" in result
|
||||
|
||||
|
||||
def test_media_type_audio():
|
||||
result = build_search_tags("", "all", "danbooru", 0, "Audio")
|
||||
assert "audio" in result
|
||||
|
||||
|
||||
# -- Combined --
|
||||
|
||||
|
||||
def test_combined_has_all_tokens():
|
||||
result = build_search_tags("1girl", "explicit", "danbooru", 10, "Video")
|
||||
assert "1girl" in result
|
||||
assert "rating:e" in result
|
||||
assert "score:>=10" in result
|
||||
assert "video" in result
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# filter_posts
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_removes_blacklisted_tags():
|
||||
posts = [_post(1, tags="cat dog"), _post(2, tags="bird")]
|
||||
seen: set = set()
|
||||
filtered, drops = filter_posts(posts, bl_tags={"dog"}, bl_posts=set(), seen_ids=seen)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0].id == 2
|
||||
assert drops["bl_tags"] == 1
|
||||
|
||||
|
||||
def test_removes_blacklisted_posts_by_url():
|
||||
posts = [_post(1, url="http://a.jpg"), _post(2, url="http://b.jpg")]
|
||||
seen: set = set()
|
||||
filtered, drops = filter_posts(posts, bl_tags=set(), bl_posts={"http://a.jpg"}, seen_ids=seen)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0].id == 2
|
||||
assert drops["bl_posts"] == 1
|
||||
|
||||
|
||||
def test_deduplicates_across_batches():
|
||||
"""Dedup works against seen_ids accumulated from prior batches.
|
||||
Within a single batch, the list comprehension fires before the
|
||||
update, so same-id posts in one batch both survive -- cross-batch
|
||||
dedup catches them on the next call."""
|
||||
posts_batch1 = [_post(1)]
|
||||
seen: set = set()
|
||||
filter_posts(posts_batch1, bl_tags=set(), bl_posts=set(), seen_ids=seen)
|
||||
assert 1 in seen
|
||||
# Second batch with same id is deduped
|
||||
posts_batch2 = [_post(1), _post(2)]
|
||||
filtered, drops = filter_posts(posts_batch2, bl_tags=set(), bl_posts=set(), seen_ids=seen)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0].id == 2
|
||||
assert drops["dedup"] == 1
|
||||
|
||||
|
||||
def test_respects_previously_seen_ids():
|
||||
posts = [_post(1), _post(2)]
|
||||
seen: set = {1}
|
||||
filtered, drops = filter_posts(posts, bl_tags=set(), bl_posts=set(), seen_ids=seen)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0].id == 2
|
||||
assert drops["dedup"] == 1
|
||||
|
||||
|
||||
def test_all_three_interact():
|
||||
"""bl_tags, bl_posts, and cross-batch dedup all apply in sequence."""
|
||||
# Seed seen_ids so post 3 is already known
|
||||
seen: set = {3}
|
||||
posts = [
|
||||
_post(1, tags="bad", url="http://a.jpg"), # hit by bl_tags
|
||||
_post(2, url="http://blocked.jpg"), # hit by bl_posts
|
||||
_post(3), # hit by dedup (in seen)
|
||||
_post(4), # survives
|
||||
]
|
||||
filtered, drops = filter_posts(
|
||||
posts, bl_tags={"bad"}, bl_posts={"http://blocked.jpg"}, seen_ids=seen,
|
||||
)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0].id == 4
|
||||
assert drops["bl_tags"] == 1
|
||||
assert drops["bl_posts"] == 1
|
||||
assert drops["dedup"] == 1
|
||||
|
||||
|
||||
def test_empty_lists_pass_through():
|
||||
posts = [_post(1), _post(2)]
|
||||
seen: set = set()
|
||||
filtered, drops = filter_posts(posts, bl_tags=set(), bl_posts=set(), seen_ids=seen)
|
||||
assert len(filtered) == 2
|
||||
assert drops == {"bl_tags": 0, "bl_posts": 0, "dedup": 0}
|
||||
|
||||
|
||||
def test_filter_posts_mutates_seen_ids():
|
||||
posts = [_post(10), _post(20)]
|
||||
seen: set = set()
|
||||
filter_posts(posts, bl_tags=set(), bl_posts=set(), seen_ids=seen)
|
||||
assert seen == {10, 20}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# should_backfill
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_backfill_yes_when_under_limit_and_api_not_short():
|
||||
assert should_backfill(collected_count=10, limit=40, last_batch_size=40) is True
|
||||
|
||||
|
||||
def test_backfill_no_when_collected_meets_limit():
|
||||
assert should_backfill(collected_count=40, limit=40, last_batch_size=40) is False
|
||||
|
||||
|
||||
def test_backfill_no_when_api_returned_short():
|
||||
assert should_backfill(collected_count=10, limit=40, last_batch_size=20) is False
|
||||
|
||||
|
||||
def test_backfill_no_when_both_met():
|
||||
assert should_backfill(collected_count=40, limit=40, last_batch_size=20) is False
|
||||
87
tests/gui/test_source_html.py
Normal file
87
tests/gui/test_source_html.py
Normal file
@ -0,0 +1,87 @@
|
||||
"""Tests for the pure info-panel source HTML builder.
|
||||
|
||||
Pure Python. No Qt, no network. Validates audit finding #6 — that the
|
||||
helper escapes booru-controlled `post.source` before it's interpolated
|
||||
into a QTextBrowser RichText document.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from booru_viewer.gui._source_html import build_source_html
|
||||
|
||||
|
||||
def test_none_returns_literal_none():
|
||||
assert build_source_html(None) == "none"
|
||||
assert build_source_html("") == "none"
|
||||
|
||||
|
||||
def test_plain_https_url_renders_escaped_anchor():
|
||||
out = build_source_html("https://example.test/post/1")
|
||||
assert out.startswith('<a href="https://example.test/post/1"')
|
||||
assert ">https://example.test/post/1</a>" in out
|
||||
|
||||
|
||||
def test_long_url_display_text_truncated_but_href_full():
|
||||
long_url = "https://example.test/" + "a" * 200
|
||||
out = build_source_html(long_url)
|
||||
# href contains the full URL
|
||||
assert long_url in out.replace("&", "&")
|
||||
# Display text is truncated to 57 chars + "..."
|
||||
assert "..." in out
|
||||
|
||||
|
||||
def test_double_quote_in_url_escaped():
|
||||
"""A `"` in the source must not break out of the href attribute."""
|
||||
hostile = 'https://attacker.test/"><img src=x>'
|
||||
out = build_source_html(hostile)
|
||||
# Raw <img> must NOT appear — html.escape converts < to <
|
||||
assert "<img" not in out
|
||||
# The display text must also have the raw markup escaped.
|
||||
assert ">" in out or """ in out
|
||||
|
||||
|
||||
def test_html_tags_in_url_escaped():
|
||||
hostile = 'https://attacker.test/<script>alert(1)</script>'
|
||||
out = build_source_html(hostile)
|
||||
assert "<script>" not in out
|
||||
assert "<script>" in out
|
||||
|
||||
|
||||
def test_non_url_source_rendered_as_escaped_plain_text():
|
||||
"""A source string that isn't an http(s) URL is rendered as plain
|
||||
text — no <a> tag, but still HTML-escaped."""
|
||||
out = build_source_html("not a url <b>at all</b>")
|
||||
assert "<a" not in out
|
||||
assert "<b>" not in out
|
||||
assert "<b>" in out
|
||||
|
||||
|
||||
def test_javascript_url_does_not_become_anchor():
|
||||
"""Sources that don't start with http(s) — including `javascript:` —
|
||||
must NOT be wrapped in an <a> tag where they'd become a clickable
|
||||
link target."""
|
||||
out = build_source_html("javascript:alert(1)")
|
||||
assert "<a " not in out
|
||||
assert "alert(1)" in out # text content preserved (escaped)
|
||||
|
||||
|
||||
def test_data_url_does_not_become_anchor():
|
||||
out = build_source_html("data:text/html,<script>x</script>")
|
||||
assert "<a " not in out
|
||||
assert "<script>" not in out
|
||||
|
||||
|
||||
def test_ampersand_in_url_escaped():
|
||||
out = build_source_html("https://example.test/?a=1&b=2")
|
||||
# `&` must be `&` inside the href attribute
|
||||
assert "&" in out
|
||||
# Raw `&b=` is NOT acceptable as an attribute value
|
||||
assert 'href="https://example.test/?a=1&b=2"' in out
|
||||
|
||||
|
||||
def test_pixiv_real_world_source_unchanged_visually():
|
||||
"""Realistic input — a normal pixiv link — should pass through with
|
||||
no surprising changes."""
|
||||
out = build_source_html("https://www.pixiv.net/artworks/12345")
|
||||
assert 'href="https://www.pixiv.net/artworks/12345"' in out
|
||||
assert "https://www.pixiv.net/artworks/12345</a>" in out
|
||||
146
tests/gui/test_window_state.py
Normal file
146
tests/gui/test_window_state.py
Normal file
@ -0,0 +1,146 @@
|
||||
"""Tests for window_state -- geometry parsing, Hyprland command building.
|
||||
|
||||
Pure Python. No Qt, no subprocess, no Hyprland.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from booru_viewer.gui.window_state import (
|
||||
build_hyprctl_restore_cmds,
|
||||
format_geometry,
|
||||
parse_geometry,
|
||||
parse_splitter_sizes,
|
||||
)
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# parse_geometry
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_parse_geometry_valid():
|
||||
assert parse_geometry("100,200,800,600") == (100, 200, 800, 600)
|
||||
|
||||
|
||||
def test_parse_geometry_wrong_count():
|
||||
assert parse_geometry("100,200,800") is None
|
||||
|
||||
|
||||
def test_parse_geometry_non_numeric():
|
||||
assert parse_geometry("abc,200,800,600") is None
|
||||
|
||||
|
||||
def test_parse_geometry_empty():
|
||||
assert parse_geometry("") is None
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# format_geometry
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_format_geometry_basic():
|
||||
assert format_geometry(10, 20, 1920, 1080) == "10,20,1920,1080"
|
||||
|
||||
|
||||
def test_format_and_parse_round_trip():
|
||||
geo = (100, 200, 800, 600)
|
||||
assert parse_geometry(format_geometry(*geo)) == geo
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# parse_splitter_sizes
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_valid_2():
|
||||
assert parse_splitter_sizes("300,700", 2) == [300, 700]
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_valid_3():
|
||||
assert parse_splitter_sizes("200,500,300", 3) == [200, 500, 300]
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_wrong_count():
|
||||
assert parse_splitter_sizes("300,700", 3) is None
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_negative():
|
||||
assert parse_splitter_sizes("300,-1", 2) is None
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_all_zero():
|
||||
assert parse_splitter_sizes("0,0", 2) is None
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_non_numeric():
|
||||
assert parse_splitter_sizes("abc,700", 2) is None
|
||||
|
||||
|
||||
def test_parse_splitter_sizes_empty():
|
||||
assert parse_splitter_sizes("", 2) is None
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# build_hyprctl_restore_cmds
|
||||
# ======================================================================
|
||||
|
||||
|
||||
def test_floating_to_floating_no_toggle():
|
||||
"""Already floating, want floating: no togglefloating needed."""
|
||||
cmds = build_hyprctl_restore_cmds(
|
||||
addr="0xdead", x=100, y=200, w=800, h=600,
|
||||
want_floating=True, cur_floating=True,
|
||||
)
|
||||
assert not any("togglefloating" in c for c in cmds)
|
||||
assert any("resizewindowpixel" in c for c in cmds)
|
||||
assert any("movewindowpixel" in c for c in cmds)
|
||||
|
||||
|
||||
def test_tiled_to_floating_has_toggle():
|
||||
"""Currently tiled, want floating: one togglefloating to enter float."""
|
||||
cmds = build_hyprctl_restore_cmds(
|
||||
addr="0xdead", x=100, y=200, w=800, h=600,
|
||||
want_floating=True, cur_floating=False,
|
||||
)
|
||||
toggle_cmds = [c for c in cmds if "togglefloating" in c]
|
||||
assert len(toggle_cmds) == 1
|
||||
|
||||
|
||||
def test_tiled_primes_floating_cache():
|
||||
"""Want tiled: primes Hyprland's floating cache with 2 toggles + no_anim."""
|
||||
cmds = build_hyprctl_restore_cmds(
|
||||
addr="0xdead", x=100, y=200, w=800, h=600,
|
||||
want_floating=False, cur_floating=False,
|
||||
)
|
||||
toggle_cmds = [c for c in cmds if "togglefloating" in c]
|
||||
no_anim_on = [c for c in cmds if "no_anim 1" in c]
|
||||
no_anim_off = [c for c in cmds if "no_anim 0" in c]
|
||||
# Two toggles: tiled->float (to prime), float->tiled (to restore)
|
||||
assert len(toggle_cmds) == 2
|
||||
assert len(no_anim_on) == 1
|
||||
assert len(no_anim_off) == 1
|
||||
|
||||
|
||||
def test_floating_to_tiled_one_toggle():
|
||||
"""Currently floating, want tiled: one toggle to tile."""
|
||||
cmds = build_hyprctl_restore_cmds(
|
||||
addr="0xdead", x=100, y=200, w=800, h=600,
|
||||
want_floating=False, cur_floating=True,
|
||||
)
|
||||
toggle_cmds = [c for c in cmds if "togglefloating" in c]
|
||||
# Only the final toggle at the end of the tiled branch
|
||||
assert len(toggle_cmds) == 1
|
||||
|
||||
|
||||
def test_correct_address_in_all_cmds():
|
||||
"""Every command references the given address."""
|
||||
addr = "0xbeef"
|
||||
cmds = build_hyprctl_restore_cmds(
|
||||
addr=addr, x=0, y=0, w=1920, h=1080,
|
||||
want_floating=True, cur_floating=False,
|
||||
)
|
||||
for cmd in cmds:
|
||||
assert addr in cmd
|
||||
169
themes/README.md
169
themes/README.md
@ -82,18 +82,24 @@ Pick whichever matches your overall desktop aesthetic. Both variants share the s
|
||||
| Tokyo Night | [tokyo-night-rounded.qss](tokyo-night-rounded.qss) | [tokyo-night-square.qss](tokyo-night-square.qss) |
|
||||
| Everforest | [everforest-rounded.qss](everforest-rounded.qss) | [everforest-square.qss](everforest-square.qss) |
|
||||
|
||||
<picture><img src="../screenshots/themes/nord.png" alt="Nord" width="400"></picture> <picture><img src="../screenshots/themes/catppuccin-mocha.png" alt="Catppuccin Mocha" width="400"></picture>
|
||||
|
||||
<picture><img src="../screenshots/themes/gruvbox.png" alt="Gruvbox" width="400"></picture> <picture><img src="../screenshots/themes/solarized-dark.png" alt="Solarized Dark" width="400"></picture>
|
||||
|
||||
<picture><img src="../screenshots/themes/tokyo-night.png" alt="Tokyo Night" width="400"></picture> <picture><img src="../screenshots/themes/everforest.png" alt="Everforest" width="400"></picture>
|
||||
|
||||
## Widget Targets
|
||||
|
||||
### Global
|
||||
|
||||
```css
|
||||
QWidget {
|
||||
background-color: #282828;
|
||||
color: #ebdbb2;
|
||||
background-color: ${bg};
|
||||
color: ${text};
|
||||
font-size: 13px;
|
||||
font-family: monospace;
|
||||
selection-background-color: #fe8019; /* grid selection border + hover highlight */
|
||||
selection-color: #282828;
|
||||
selection-background-color: ${accent}; /* grid selection border + hover highlight */
|
||||
selection-color: ${accent_text};
|
||||
}
|
||||
```
|
||||
|
||||
@ -101,39 +107,31 @@ QWidget {
|
||||
|
||||
```css
|
||||
QPushButton {
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
border-radius: 4px;
|
||||
padding: 5px 14px;
|
||||
}
|
||||
QPushButton:hover { background-color: #444; }
|
||||
QPushButton:pressed { background-color: #555; }
|
||||
QPushButton:checked { background-color: #0078d7; } /* Active tab (Browse/Bookmarks/Library), Autoplay, Loop toggles */
|
||||
QPushButton:hover { background-color: ${bg_hover}; }
|
||||
QPushButton:pressed { background-color: ${bg_active}; }
|
||||
QPushButton:checked { background-color: ${accent}; } /* Active tab (Browse/Bookmarks/Library), Autoplay, Loop toggles */
|
||||
```
|
||||
|
||||
**Note:** Qt's QSS does not support the CSS `content` property, so you cannot replace button text (e.g. "Play" → "") via stylesheet alone. However, you can use a Nerd Font to change how unicode characters render:
|
||||
|
||||
```css
|
||||
QPushButton {
|
||||
font-family: "JetBrainsMono Nerd Font", monospace;
|
||||
}
|
||||
```
|
||||
|
||||
To use icon buttons, you would need to modify the Python source code directly — the button labels are set in `preview.py` via `QPushButton("Play")` etc.
|
||||
**Note:** Qt's QSS does not support the CSS `content` property, so you cannot replace button text (e.g. swap icon symbols) via stylesheet alone. The toolbar icon buttons use hardcoded Unicode symbols — to change which symbols appear, modify the Python source directly (see `preview_pane.py` and `popout/window.py`).
|
||||
|
||||
### Text Inputs
|
||||
|
||||
```css
|
||||
QLineEdit, QTextEdit {
|
||||
background-color: #1a1a1a;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background-color: ${bg};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
border-radius: 4px;
|
||||
padding: 4px 8px;
|
||||
}
|
||||
QLineEdit:focus, QTextEdit:focus {
|
||||
border-color: #0078d7;
|
||||
border-color: ${accent};
|
||||
}
|
||||
```
|
||||
|
||||
@ -141,9 +139,9 @@ QLineEdit:focus, QTextEdit:focus {
|
||||
|
||||
```css
|
||||
QComboBox {
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
border-radius: 4px;
|
||||
padding: 3px 6px;
|
||||
}
|
||||
@ -152,10 +150,10 @@ QComboBox::drop-down {
|
||||
width: 20px;
|
||||
}
|
||||
QComboBox QAbstractItemView {
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
selection-background-color: #444;
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
selection-background-color: ${bg_hover};
|
||||
}
|
||||
```
|
||||
|
||||
@ -163,9 +161,9 @@ QComboBox QAbstractItemView {
|
||||
|
||||
```css
|
||||
QSpinBox {
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
border-radius: 2px;
|
||||
}
|
||||
```
|
||||
@ -174,24 +172,24 @@ QSpinBox {
|
||||
|
||||
```css
|
||||
QScrollBar:vertical {
|
||||
background: #1a1a1a;
|
||||
background: ${bg};
|
||||
width: 10px;
|
||||
border: none;
|
||||
}
|
||||
QScrollBar::handle:vertical {
|
||||
background: #555;
|
||||
background: ${bg_hover};
|
||||
border-radius: 4px;
|
||||
min-height: 20px;
|
||||
}
|
||||
QScrollBar::handle:vertical:hover { background: #0078d7; }
|
||||
QScrollBar::handle:vertical:hover { background: ${bg_active}; }
|
||||
QScrollBar::add-line:vertical, QScrollBar::sub-line:vertical { height: 0; }
|
||||
|
||||
QScrollBar:horizontal {
|
||||
background: #1a1a1a;
|
||||
background: ${bg};
|
||||
height: 10px;
|
||||
}
|
||||
QScrollBar::handle:horizontal {
|
||||
background: #555;
|
||||
background: ${bg_hover};
|
||||
border-radius: 4px;
|
||||
}
|
||||
QScrollBar::add-line:horizontal, QScrollBar::sub-line:horizontal { width: 0; }
|
||||
@ -201,25 +199,25 @@ QScrollBar::add-line:horizontal, QScrollBar::sub-line:horizontal { width: 0; }
|
||||
|
||||
```css
|
||||
QMenuBar {
|
||||
background-color: #1a1a1a;
|
||||
color: #fff;
|
||||
background-color: ${bg};
|
||||
color: ${text};
|
||||
}
|
||||
QMenuBar::item:selected { background-color: #333; }
|
||||
QMenuBar::item:selected { background-color: ${bg_subtle}; }
|
||||
|
||||
QMenu {
|
||||
background-color: #1a1a1a;
|
||||
color: #fff;
|
||||
border: 1px solid #333;
|
||||
background-color: ${bg};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
}
|
||||
QMenu::item:selected { background-color: #333; }
|
||||
QMenu::item:selected { background-color: ${bg_subtle}; }
|
||||
```
|
||||
|
||||
### Status Bar
|
||||
|
||||
```css
|
||||
QStatusBar {
|
||||
background-color: #1a1a1a;
|
||||
color: #888;
|
||||
background-color: ${bg};
|
||||
color: ${text_dim};
|
||||
}
|
||||
```
|
||||
|
||||
@ -227,7 +225,7 @@ QStatusBar {
|
||||
|
||||
```css
|
||||
QSplitter::handle {
|
||||
background: #555;
|
||||
background: ${border};
|
||||
width: 2px;
|
||||
}
|
||||
```
|
||||
@ -236,14 +234,14 @@ QSplitter::handle {
|
||||
|
||||
```css
|
||||
QTabBar::tab {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
padding: 6px 16px;
|
||||
}
|
||||
QTabBar::tab:selected {
|
||||
background: #444;
|
||||
color: #0078d7;
|
||||
background: ${bg_hover};
|
||||
color: ${accent};
|
||||
}
|
||||
```
|
||||
|
||||
@ -255,7 +253,7 @@ To override the preview controls bar background in QSS:
|
||||
|
||||
```css
|
||||
QWidget#_preview_controls {
|
||||
background: rgba(40, 40, 40, 200); /* your custom translucent bg */
|
||||
background: ${overlay_bg};
|
||||
}
|
||||
```
|
||||
|
||||
@ -263,12 +261,12 @@ Standard slider styling still applies outside the controls bar:
|
||||
|
||||
```css
|
||||
QSlider::groove:horizontal {
|
||||
background: #333;
|
||||
background: ${bg_subtle};
|
||||
height: 4px;
|
||||
border-radius: 2px;
|
||||
}
|
||||
QSlider::handle:horizontal {
|
||||
background: #0078d7;
|
||||
background: ${accent};
|
||||
width: 12px;
|
||||
margin: -4px 0;
|
||||
border-radius: 6px;
|
||||
@ -284,12 +282,12 @@ These overlays use internal styling that overrides QSS. To customize:
|
||||
```css
|
||||
/* Popout top toolbar */
|
||||
QWidget#_slideshow_toolbar {
|
||||
background: rgba(40, 40, 40, 200);
|
||||
background: ${overlay_bg};
|
||||
}
|
||||
|
||||
/* Popout bottom video controls */
|
||||
QWidget#_slideshow_controls {
|
||||
background: rgba(40, 40, 40, 200);
|
||||
background: ${overlay_bg};
|
||||
}
|
||||
```
|
||||
|
||||
@ -297,30 +295,53 @@ Buttons and labels inside both overlays inherit a white-on-transparent style. To
|
||||
|
||||
```css
|
||||
QWidget#_slideshow_toolbar QPushButton {
|
||||
border: 1px solid rgba(255, 255, 255, 120);
|
||||
color: #ccc;
|
||||
border: 1px solid ${border};
|
||||
color: ${text_dim};
|
||||
}
|
||||
QWidget#_slideshow_controls QPushButton {
|
||||
border: 1px solid rgba(255, 255, 255, 120);
|
||||
color: #ccc;
|
||||
border: 1px solid ${border};
|
||||
color: ${text_dim};
|
||||
}
|
||||
```
|
||||
|
||||
### Preview Toolbar
|
||||
### Preview & Popout Toolbar Icon Buttons
|
||||
|
||||
The preview panel has an action toolbar (Bookmark, Save, BL Tag, BL Post, Popout) that appears above the media when a post is active. This toolbar uses the app's default button styling.
|
||||
The preview and popout toolbars use 24x24 icon buttons with Unicode symbols. Each button has an object name for QSS targeting:
|
||||
|
||||
The toolbar does not have a named object ID — it inherits the app's `QPushButton` styles directly.
|
||||
| Object Name | Symbol | Action |
|
||||
|-------------|--------|--------|
|
||||
| `#_tb_bookmark` | ☆ / ★ | Bookmark / Unbookmark |
|
||||
| `#_tb_save` | ⤓ / ✕ | Save / Unsave |
|
||||
| `#_tb_bl_tag` | ⊘ | Blacklist a tag |
|
||||
| `#_tb_bl_post` | ⊗ | Blacklist this post |
|
||||
| `#_tb_popout` | ⧉ | Open popout (preview only) |
|
||||
|
||||
```css
|
||||
/* Style all toolbar icon buttons */
|
||||
QPushButton#_tb_bookmark,
|
||||
QPushButton#_tb_save,
|
||||
QPushButton#_tb_bl_tag,
|
||||
QPushButton#_tb_bl_post,
|
||||
QPushButton#_tb_popout {
|
||||
background: transparent;
|
||||
border: 1px solid ${border};
|
||||
color: ${text};
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
The same object names are used in both the preview pane and the popout overlay, so one rule targets both. The symbols themselves are hardcoded in Python — QSS can style the buttons but cannot change which symbol is displayed.
|
||||
|
||||
### Progress Bar (Download)
|
||||
|
||||
```css
|
||||
QProgressBar {
|
||||
background-color: #333;
|
||||
background-color: ${bg_subtle};
|
||||
border: none;
|
||||
}
|
||||
QProgressBar::chunk {
|
||||
background-color: #0078d7;
|
||||
background-color: ${accent};
|
||||
}
|
||||
```
|
||||
|
||||
@ -328,9 +349,9 @@ QProgressBar::chunk {
|
||||
|
||||
```css
|
||||
QToolTip {
|
||||
background-color: #333;
|
||||
color: #fff;
|
||||
border: 1px solid #555;
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border};
|
||||
padding: 4px;
|
||||
}
|
||||
```
|
||||
@ -349,8 +370,8 @@ Click and drag on empty grid space to select multiple thumbnails. The rubber ban
|
||||
|
||||
```css
|
||||
QRubberBand {
|
||||
background: rgba(0, 120, 215, 40);
|
||||
border: 1px solid #0078d7;
|
||||
background: ${accent}; /* use rgba(...) variant for translucency */
|
||||
border: 1px solid ${accent};
|
||||
}
|
||||
```
|
||||
|
||||
@ -360,10 +381,10 @@ The library tab's count label switches between three visual states depending on
|
||||
|
||||
```css
|
||||
QLabel[libraryCountState="empty"] {
|
||||
color: #a6adc8; /* dim text — search miss or empty folder */
|
||||
color: ${text_dim}; /* dim text — search miss or empty folder */
|
||||
}
|
||||
QLabel[libraryCountState="error"] {
|
||||
color: #f38ba8; /* danger color — directory unreachable */
|
||||
color: ${danger}; /* danger color — directory unreachable */
|
||||
font-weight: bold;
|
||||
}
|
||||
```
|
||||
|
||||
@ -9,26 +9,25 @@
|
||||
*/
|
||||
|
||||
/* @palette
|
||||
bg: #1e1e2e
|
||||
bg_alt: #181825
|
||||
bg_subtle: #313244
|
||||
bg_hover: #45475a
|
||||
bg_active: #585b70
|
||||
text: #cdd6f4
|
||||
text_dim: #a6adc8
|
||||
text_disabled: #6c7086
|
||||
border: #313244
|
||||
border_strong: #45475a
|
||||
accent: #cba6f7
|
||||
accent_text: #1e1e2e
|
||||
accent_dim: #b4befe
|
||||
link: #89b4fa
|
||||
danger: #f38ba8
|
||||
success: #a6e3a1
|
||||
warning: #f9e2af
|
||||
overlay_bg: rgba(30, 30, 46, 200)
|
||||
bg: #1e1e2e
|
||||
bg_alt: #181825
|
||||
bg_subtle: #313244
|
||||
bg_hover: #45475a
|
||||
bg_active: #585b70
|
||||
text: #cdd6f4
|
||||
text_dim: #a6adc8
|
||||
text_disabled: #6c7086
|
||||
border: #313244
|
||||
border_strong: #45475a
|
||||
accent: #cba6f7
|
||||
accent_text: #1e1e2e
|
||||
accent_dim: #b4befe
|
||||
link: #89b4fa
|
||||
danger: #f38ba8
|
||||
success: #a6e3a1
|
||||
warning: #f9e2af
|
||||
overlay_bg: rgba(30, 30, 46, 200)
|
||||
*/
|
||||
|
||||
/* ---------- Base ---------- */
|
||||
|
||||
QWidget {
|
||||
@ -43,8 +42,6 @@ QWidget:disabled {
|
||||
color: ${text_disabled};
|
||||
}
|
||||
|
||||
/* Labels should never paint an opaque background — they sit on top of
|
||||
* other widgets in many places (toolbars, info panels, overlays). */
|
||||
QLabel {
|
||||
background: transparent;
|
||||
}
|
||||
@ -60,7 +57,7 @@ QPushButton {
|
||||
color: ${text};
|
||||
border: 1px solid ${border_strong};
|
||||
border-radius: 4px;
|
||||
padding: 2px 8px;
|
||||
padding: 2px 6px;
|
||||
min-height: 17px;
|
||||
}
|
||||
QPushButton:hover {
|
||||
@ -92,49 +89,26 @@ QPushButton:flat:hover {
|
||||
background-color: ${bg_hover};
|
||||
}
|
||||
|
||||
QToolButton {
|
||||
background-color: transparent;
|
||||
color: ${text};
|
||||
border: 1px solid transparent;
|
||||
border-radius: 4px;
|
||||
padding: 4px;
|
||||
}
|
||||
QToolButton:hover {
|
||||
background-color: ${bg_hover};
|
||||
border-color: ${border_strong};
|
||||
}
|
||||
QToolButton:pressed, QToolButton:checked {
|
||||
background-color: ${bg_active};
|
||||
}
|
||||
|
||||
/* ---------- Inputs ---------- */
|
||||
|
||||
QLineEdit, QSpinBox, QDoubleSpinBox, QTextEdit, QPlainTextEdit {
|
||||
QLineEdit, QSpinBox, QTextEdit {
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: 1px solid ${border_strong};
|
||||
border-radius: 4px;
|
||||
padding: 2px 6px;
|
||||
/* min-height ensures the painted text fits inside the widget bounds
|
||||
* even when a parent layout (e.g. QFormLayout inside a QGroupBox)
|
||||
* compresses the natural sizeHint. Without this, spinboxes in dense
|
||||
* forms render with the top of the value text clipped. */
|
||||
min-height: 16px;
|
||||
selection-background-color: ${accent};
|
||||
selection-color: ${accent_text};
|
||||
}
|
||||
QLineEdit:focus,
|
||||
QSpinBox:focus,
|
||||
QDoubleSpinBox:focus,
|
||||
QTextEdit:focus,
|
||||
QPlainTextEdit:focus {
|
||||
QTextEdit:focus {
|
||||
border-color: ${accent};
|
||||
}
|
||||
QLineEdit:disabled,
|
||||
QSpinBox:disabled,
|
||||
QDoubleSpinBox:disabled,
|
||||
QTextEdit:disabled,
|
||||
QPlainTextEdit:disabled {
|
||||
QTextEdit:disabled {
|
||||
background-color: ${bg_alt};
|
||||
color: ${text_disabled};
|
||||
border-color: ${border};
|
||||
@ -145,7 +119,7 @@ QComboBox {
|
||||
color: ${text};
|
||||
border: 1px solid ${border_strong};
|
||||
border-radius: 4px;
|
||||
padding: 2px 6px;
|
||||
padding: 2px 4px;
|
||||
min-height: 16px;
|
||||
}
|
||||
QComboBox:hover {
|
||||
@ -156,7 +130,7 @@ QComboBox:focus {
|
||||
}
|
||||
QComboBox::drop-down {
|
||||
border: none;
|
||||
width: 18px;
|
||||
width: 14px;
|
||||
}
|
||||
QComboBox QAbstractItemView {
|
||||
background-color: ${bg_subtle};
|
||||
@ -315,19 +289,6 @@ QSlider::handle:horizontal:hover {
|
||||
background: ${accent_dim};
|
||||
}
|
||||
|
||||
QSlider::groove:vertical {
|
||||
background: ${bg_subtle};
|
||||
width: 4px;
|
||||
border-radius: 2px;
|
||||
}
|
||||
QSlider::handle:vertical {
|
||||
background: ${accent};
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
margin: 0 -5px;
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
/* ---------- Progress ---------- */
|
||||
|
||||
QProgressBar {
|
||||
@ -343,33 +304,28 @@ QProgressBar::chunk {
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
/* ---------- Checkboxes & radio buttons ---------- */
|
||||
/* ---------- Checkboxes ---------- */
|
||||
|
||||
QCheckBox, QRadioButton {
|
||||
QCheckBox {
|
||||
background: transparent;
|
||||
color: ${text};
|
||||
spacing: 6px;
|
||||
}
|
||||
QCheckBox::indicator, QRadioButton::indicator {
|
||||
QCheckBox::indicator {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
background-color: ${bg_subtle};
|
||||
border: 1px solid ${border_strong};
|
||||
}
|
||||
QCheckBox::indicator {
|
||||
border-radius: 3px;
|
||||
}
|
||||
QRadioButton::indicator {
|
||||
border-radius: 7px;
|
||||
}
|
||||
QCheckBox::indicator:hover, QRadioButton::indicator:hover {
|
||||
QCheckBox::indicator:hover {
|
||||
border-color: ${accent};
|
||||
}
|
||||
QCheckBox::indicator:checked, QRadioButton::indicator:checked {
|
||||
QCheckBox::indicator:checked {
|
||||
background-color: ${accent};
|
||||
border-color: ${accent};
|
||||
}
|
||||
QCheckBox::indicator:disabled, QRadioButton::indicator:disabled {
|
||||
QCheckBox::indicator:disabled {
|
||||
background-color: ${bg_alt};
|
||||
border-color: ${border};
|
||||
}
|
||||
@ -384,9 +340,9 @@ QToolTip {
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
/* ---------- Item views (lists, trees, tables) ---------- */
|
||||
/* ---------- Lists ---------- */
|
||||
|
||||
QListView, QListWidget, QTreeView, QTreeWidget, QTableView, QTableWidget {
|
||||
QListView, QListWidget {
|
||||
background-color: ${bg};
|
||||
alternate-background-color: ${bg_alt};
|
||||
color: ${text};
|
||||
@ -395,35 +351,18 @@ QListView, QListWidget, QTreeView, QTreeWidget, QTableView, QTableWidget {
|
||||
selection-color: ${accent_text};
|
||||
outline: none;
|
||||
}
|
||||
QListView::item, QListWidget::item,
|
||||
QTreeView::item, QTreeWidget::item,
|
||||
QTableView::item, QTableWidget::item {
|
||||
QListView::item, QListWidget::item {
|
||||
padding: 4px;
|
||||
}
|
||||
QListView::item:hover, QListWidget::item:hover,
|
||||
QTreeView::item:hover, QTreeWidget::item:hover,
|
||||
QTableView::item:hover, QTableWidget::item:hover {
|
||||
QListView::item:hover, QListWidget::item:hover {
|
||||
background-color: ${bg_hover};
|
||||
}
|
||||
QListView::item:selected, QListWidget::item:selected,
|
||||
QTreeView::item:selected, QTreeWidget::item:selected,
|
||||
QTableView::item:selected, QTableWidget::item:selected {
|
||||
QListView::item:selected, QListWidget::item:selected {
|
||||
background-color: ${accent};
|
||||
color: ${accent_text};
|
||||
}
|
||||
|
||||
QHeaderView::section {
|
||||
background-color: ${bg_subtle};
|
||||
color: ${text};
|
||||
border: none;
|
||||
border-right: 1px solid ${border};
|
||||
padding: 4px 8px;
|
||||
}
|
||||
QHeaderView::section:hover {
|
||||
background-color: ${bg_hover};
|
||||
}
|
||||
|
||||
/* ---------- Tabs ---------- */
|
||||
/* ---------- Tabs (settings dialog) ---------- */
|
||||
|
||||
QTabWidget::pane {
|
||||
border: 1px solid ${border};
|
||||
@ -448,7 +387,7 @@ QTabBar::tab:hover:!selected {
|
||||
color: ${text};
|
||||
}
|
||||
|
||||
/* ---------- Group boxes ---------- */
|
||||
/* ---------- Group boxes (settings dialog) ---------- */
|
||||
|
||||
QGroupBox {
|
||||
background: transparent;
|
||||
@ -465,63 +404,14 @@ QGroupBox::title {
|
||||
color: ${text_dim};
|
||||
}
|
||||
|
||||
/* ---------- Frames ---------- */
|
||||
|
||||
QFrame[frameShape="4"], /* HLine */
|
||||
QFrame[frameShape="5"] /* VLine */ {
|
||||
background: ${border};
|
||||
color: ${border};
|
||||
}
|
||||
|
||||
/* ---------- Toolbars ---------- */
|
||||
|
||||
QToolBar {
|
||||
background: ${bg};
|
||||
border: none;
|
||||
spacing: 4px;
|
||||
padding: 2px;
|
||||
}
|
||||
QToolBar::separator {
|
||||
background: ${border};
|
||||
width: 1px;
|
||||
margin: 4px 4px;
|
||||
}
|
||||
|
||||
/* ---------- Dock widgets ---------- */
|
||||
|
||||
QDockWidget {
|
||||
color: ${text};
|
||||
titlebar-close-icon: none;
|
||||
}
|
||||
QDockWidget::title {
|
||||
background: ${bg_subtle};
|
||||
padding: 4px;
|
||||
border: 1px solid ${border};
|
||||
}
|
||||
|
||||
/* ---------- Rubber band (multi-select drag rectangle) ---------- */
|
||||
/* ---------- Rubber band (multi-select drag) ---------- */
|
||||
|
||||
QRubberBand {
|
||||
background: ${accent};
|
||||
border: 1px solid ${accent};
|
||||
/* Qt blends rubber band at ~30% so this reads as a translucent
|
||||
* accent-tinted rectangle without needing rgba(). */
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/* ---------- Library count label states ---------- */
|
||||
/*
|
||||
* The library tab's count label switches between three visual states
|
||||
* depending on what refresh() found. The state is exposed as a Qt
|
||||
* dynamic property `libraryCountState` so users can override these
|
||||
* rules in their custom.qss without touching the Python.
|
||||
*
|
||||
* normal N files — default text color, no rule needed
|
||||
* empty no items — dim text (no items found, search miss)
|
||||
* error bad/unreachable — danger color + bold (real error)
|
||||
*/
|
||||
|
||||
QLabel[libraryCountState="empty"] {
|
||||
color: ${text_dim};
|
||||
@ -531,18 +421,18 @@ QLabel[libraryCountState="error"] {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* ---------- Thumbnail dot indicators (Qt properties on ThumbnailWidget) ---------- */
|
||||
/* ---------- Thumbnail indicators ---------- */
|
||||
|
||||
ThumbnailWidget {
|
||||
qproperty-savedColor: #22cc22; /* green dot: saved to library — universal "confirmed" feel */
|
||||
qproperty-bookmarkedColor: #ffcc00; /* yellow star: bookmarked */
|
||||
qproperty-savedColor: #22cc22;
|
||||
qproperty-bookmarkedColor: #ffcc00;
|
||||
qproperty-selectionColor: ${accent};
|
||||
qproperty-multiSelectColor: ${accent_dim};
|
||||
qproperty-hoverColor: ${accent};
|
||||
qproperty-idleColor: ${border_strong};
|
||||
}
|
||||
|
||||
/* ---------- Info panel tag category colors ---------- */
|
||||
/* ---------- Info panel tag colors ---------- */
|
||||
|
||||
InfoPanel {
|
||||
qproperty-tagArtistColor: ${warning};
|
||||
@ -553,19 +443,13 @@ InfoPanel {
|
||||
qproperty-tagLoreColor: ${text_dim};
|
||||
}
|
||||
|
||||
/* ---------- Video player letterbox / pillarbox color (mpv background) ---------- */
|
||||
/* ---------- Video player letterbox ---------- */
|
||||
|
||||
VideoPlayer {
|
||||
qproperty-letterboxColor: ${bg};
|
||||
}
|
||||
|
||||
/* ---------- Popout overlay bars (slideshow toolbar + slideshow controls + embedded preview controls) ---------- */
|
||||
/*
|
||||
* The popout window's translucent toolbar (top) and transport controls
|
||||
* (bottom) float over the video content. The bg color comes from the
|
||||
* @palette overlay_bg slot. Children get the classic overlay treatment:
|
||||
* transparent backgrounds, near-white text, hairline borders.
|
||||
*/
|
||||
/* ---------- Popout overlay bars ---------- */
|
||||
|
||||
QWidget#_slideshow_toolbar,
|
||||
QWidget#_slideshow_controls,
|
||||
@ -588,6 +472,8 @@ QWidget#_preview_controls QPushButton {
|
||||
color: white;
|
||||
border: 1px solid rgba(255, 255, 255, 80);
|
||||
padding: 2px 6px;
|
||||
font-size: 15px;
|
||||
font-weight: bold;
|
||||
}
|
||||
QWidget#_slideshow_toolbar QPushButton:hover,
|
||||
QWidget#_slideshow_controls QPushButton:hover,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user