1
0
mirror of https://github.com/AvengeMedia/DankMaterialShell.git synced 2026-01-24 21:42:51 -05:00

Compare commits

...

254 Commits

Author SHA1 Message Date
bbedward
f08e2ef5b8 hypr: add disable output option 2025-12-28 23:15:43 -05:00
Joaquim S.
2b0070c31a matugen/template: Soothing neovim theme (#1201) 2025-12-28 21:49:44 -05:00
Marcus Ramberg
ae82716afa core: apply gopls automatic modernizers (#1198) 2025-12-28 21:48:56 -05:00
johngalt
c281bf3b53 Adding Zen Browser matugen template (#1181)
* Adding Zen Browser matugen template

* Fixing indentation for matugen.go edits

* Trying to fix linting again..

* Tweaking contrasting surface colors in css, renamed file to match how firefox userchrome is named, also changed output directory to DMS config directory (like firefox)

* Modifing Zen userChrome again: removing unused css stuff, tweaking colors to better align with how pywalfox handles backgrounds/toolbars

* Last few tweaks on CSS - changing url bar highlight color, changing contrast on selected urls in dropdown

* matugen.go: fix check command for zen browser

* search_index: add zen browser setting
2025-12-28 12:52:41 -05:00
bbedward
45b8b2a895 clipboard: don't store sensitive mime types in history
fixes #1185
2025-12-28 11:13:34 -05:00
Tacticalsmooth
7b9ba840fb fixed lambda issue on nixos (#1188) 2025-12-28 12:50:07 +01:00
xxyangyoulin
84fb567ff5 Notification: Right-click to toggle Do Not Disturb (#1174) 2025-12-28 00:34:52 -05:00
Lucas
bbd57e0673 nix: remove dgop input; expose quickshell package (#1183)
* nix: remove dgop input

* nix: update quickshell

* nix: expose quickshell in outputs
2025-12-27 20:41:26 -05:00
vha
b1632a0a03 Fix #1179 normal scrolling direction (#1182)
* added reverse scrolling to settings and widget

* added support for dankbar scrolling

* Better settings description

* removed isNiri conditional from search index

* fix #1179 breaking normal scroll direction
2025-12-27 18:23:31 -05:00
bbedward
7aff1182c8 desktop widgets: easier copy/delete 2025-12-27 14:13:29 -05:00
vha
fbe362cd20 feat: Added reverse workspace scrolling (#1179)
* added reverse scrolling to settings and widget

* added support for dankbar scrolling

* Better settings description

* removed isNiri conditional from search index
2025-12-27 13:47:50 -05:00
bbedward
e7f94c94cc i18n: fix RTL alignment of settings sidebar 2025-12-27 12:51:10 -05:00
bbedward
7523190b16 i18n: add farsi 2025-12-27 12:46:40 -05:00
bbedward
da37e16b6e window: remove unused signal 2025-12-27 12:32:42 -05:00
xxyangyoulin
5c420ab50b AppSearch: Add ID search fallback for non-English / non-standard apps (#1173) 2025-12-27 12:26:43 -05:00
Joaquim S.
4493b7c231 matugen/template: Improve on generated theme + using dank16 instead (#1177) 2025-12-27 11:14:18 -05:00
bbedward
40a96c6eaf vpn: initialize slices so they dont serialize as null 2025-12-27 10:52:34 -05:00
bbedward
23a93082c6 fix trailing whitespace 2025-12-27 09:43:42 -05:00
Tacticalsmooth
f7650b5e1f Added mirroring option to display configuration (#1156)
* <quickshell/Modules/Settings/DisplayConfig>: Added mirroring option to the displayconfiguration

* removed niri option for mirroring

* Fix trailing whitespace

* removed emty rows

---------

Co-authored-by: Postboote1 <stoessel.matthias>
2025-12-27 09:27:19 -05:00
bbedward
3ebdd5631c dankdash: fix binding breaking when clicking overview card
fixes #1170
2025-12-27 00:58:47 -05:00
Linken Quy Dinh
6c4caf121a add seconds to wallpaper cycling (#1169) 2025-12-27 00:53:48 -05:00
bbedward
89788e9ca7 workspace: chagne pill hover color 2025-12-27 00:53:01 -05:00
bbedward
0787c63fed bar: change widget base hover blend logic 2025-12-27 00:49:11 -05:00
bbedward
9fc0d5efff settings: add index extractor script for search 2025-12-26 22:21:20 -05:00
bbedward
6611dfbe05 settings: fix search height 2025-12-26 20:44:35 -05:00
bbedward
8a71ead51d themes: remove catpuccin, support accent colors 2025-12-26 20:28:55 -05:00
bbedward
d9d6ab5776 settings: add search
- because tabs arent loaded at runtime, we have to have a separate index
- Less ideal, but functional enough for now
2025-12-26 19:19:47 -05:00
bbedward
d6fe7bea27 vpn: remove redundant property definitions 2025-12-26 19:02:16 -05:00
xxyangyoulin
1194f3ffb8 media: add scroll wheel behavior configuration (#1160) 2025-12-26 14:43:47 -05:00
bbedward
5ac81e6dd6 dankbar: dont apply exclusive zone to popup positioning 2025-12-26 14:38:34 -05:00
Lucas
987856a1de nix: update flake inputs (#1161) 2025-12-26 14:15:22 -05:00
bbedward
ef52ce0990 themes: support for variants 2025-12-26 14:00:14 -05:00
bbedward
06b14a5869 dankinstall: fix plasma session collision 2025-12-26 13:06:21 -05:00
bbedward
fd839059c0 popout: use mapToItem instead of mapToGlobal for popout positioning
fixes #1152
2025-12-25 12:50:26 -05:00
bbedward
ec6db7962a i18n: sync terms 2025-12-25 12:09:40 -05:00
Aaron Tulino
adf92cbc46 Add battery charge limit (#1151) 2025-12-25 12:09:09 -05:00
Aaron Tulino
6b6f51cd1f Add volume and brightness percentages (#1148) 2025-12-25 12:07:29 -05:00
Aaron Tulino
df6c60213f Use volume_mute icon for volume==0 (#1150) 2025-12-25 11:31:05 -05:00
Aaron Tulino
6303304a10 Allow toggling mute with right-click on bar (#1147) 2025-12-24 16:12:47 -05:00
Aaron Tulino
8e76789119 Fix touchpad scrolling behavior (#1146)
* Fix touchpad scrolling behavior

* Make touchpad scroll smoothly
For a normal mouse wheel, adjusting by 5% per scroll makes sense. For a touchpad, however, it should adjust by the smallest increment possible for a smooth experience.
2025-12-24 16:12:34 -05:00
Aaron Tulino
10e81cfdd3 Clear lock screen textbox on Escape key press (#1139) 2025-12-24 09:47:55 -05:00
Aaron Tulino
03fd3a4f16 Add Do Not Disturb to IPC (#1140) 2025-12-24 09:47:44 -05:00
bbedward
8fdc748ed2 weather: fix icons 2025-12-24 00:33:54 -05:00
bbedward
6c56d23b93 themes: fix terminals always dark with custom themes 2025-12-23 22:33:33 -05:00
bbedward
45d34dcb5b themes: consistent usage of primaryPressed 2025-12-23 21:45:06 -05:00
bbedward
d7ac0d50fa launcher: use primaryPressed for hover 2025-12-23 21:36:59 -05:00
bbedward
1d4d145187 desktop plugins: enable by default 2025-12-23 14:10:55 -05:00
bbedward
a5b9ff98c0 displays: explicitly write scale 1 for niri
fixes #1116
2025-12-23 11:53:23 -05:00
bbedward
6feaecd92e niri: add gaps and radius override 2025-12-23 11:00:20 -05:00
bbedward
b066a25308 dankdash: use CachingImage in wallpaper tab
fixes #1130
2025-12-23 10:47:50 -05:00
bbedward
777a552b57 spotlight: restore darken background option
fixes #1126
2025-12-23 10:45:11 -05:00
bbedward
7dbe608c28 settings: fix theme application of default-settings json 2025-12-23 10:33:14 -05:00
bbedward
61630e447b desktop-widgets: add overlay IPC and overview option 2025-12-22 20:29:01 -05:00
bbedward
91385e7c83 dankbar: option to show when bar is hidden and no windows 2025-12-22 19:54:53 -05:00
bbedward
04648fcca7 spotlight: remove darken bg opt, improve performance 2025-12-22 16:07:40 -05:00
bbedward
080fc7e44e i18n: term update 2025-12-22 14:37:47 -05:00
bbedward
0b60da3d6d dock: add isolate runninig apps by display option 2025-12-22 14:35:04 -05:00
claymorwan
a4492b90e7 matugen: fix equibop theme not working (#1122)
* matugen: equibop theme

* style: withespace apparently

* matugen: fix equibop theme not working
2025-12-22 14:19:57 -05:00
bbedward
c9331b7338 dropdown: improve perf + add fuzzy search to printers 2025-12-22 14:18:24 -05:00
bbedward
4982ea53dd window: add support for startSystemMove, resize, maximize to floating
windows
2025-12-22 13:18:37 -05:00
claymorwan
c703cb6504 matugen: equibop theme (#1119)
* matugen: equibop theme

* style: withespace apparently
2025-12-22 12:03:20 -05:00
bbedward
a7494971fd desktop widgets: centralize config in desktop widgets tab, variants
always available
2025-12-22 10:39:19 -05:00
purian23
c548255bfc ubuntu: DMS-Greeter 2025-12-21 23:56:17 -05:00
purian23
9656c7afd7 ubuntu: Update hardcoded arcs 2025-12-21 23:37:08 -05:00
purian23
414b8c8272 Ubuntu: DMS - add ARM64 support 2025-12-21 22:31:59 -05:00
bbedward
b4f83d09d4 themes: incorporate theme registry, browser, dms URI scheme handling 2025-12-21 22:03:48 -05:00
purian23
67ee74ac20 core: Fix Debian Architecture logic 2025-12-21 21:59:20 -05:00
purian23
93539d2b6b core: Debian Sid/OpenSuse Leap, Slowroll support 2025-12-21 21:36:33 -05:00
bbedward
524d967745 matugen: remove bad kitty tab option
fixes #1109
2025-12-21 17:42:20 -05:00
bbedward
0effbebbb6 matugen: fix GTK4 light mode
fixes #1110
fixes #1056
2025-12-21 12:52:01 -05:00
bbedward
dca07a70f8 desktop widgets: put grid on bottom layer 2025-12-20 09:58:01 -05:00
bbedward
02936c97fd desktop widget: handle key events in widget 2025-12-20 09:34:52 -05:00
Ethan Todd
8f7e732827 notifications: add modal IPC command for dismissing all popups. rename clearAllPopups() to dismissAllPopups(), since clear is otherwise used to mean eliminated entirely rather than just sent to the notification center. (#1100) 2025-12-20 08:13:26 -05:00
johngalt
5ffe563b7d adding gruvbox material custom theme varieties (#1098) 2025-12-19 22:38:22 -05:00
Joaquim S.
6ef08c3d54 matugen/template: Added neovim to matugen pipeline (#1097) 2025-12-19 14:16:45 -05:00
bbedward
908b4b58cd desktop widgets: add grid/grid size hints 2025-12-19 14:05:04 -05:00
purian23
f2611e0de0 fedora: Remove cliphist on dms-git 2025-12-18 23:55:09 -05:00
purian23
ea75a9d351 distro: Convert DMS Greeter to Stable on Fedora Copr 2025-12-18 23:00:13 -05:00
bbedward
3a744d7d68 core: new line on version 2025-12-18 22:12:50 -05:00
purian23
195d312ae2 distro: Decople Fedora DMS Stable spec 2025-12-18 20:03:23 -05:00
Omar Abragh
76006a7377 matugen: Set cursor color for theme (#1088) 2025-12-18 17:23:42 -05:00
bbedward
11536da512 fix missing import 2025-12-18 17:22:01 -05:00
bbedward
2a91bc41f7 i18n: general term cleanup, add missing terms, interpolate some 2025-12-18 16:19:27 -05:00
bbedward
baf23157fc i18n: sync translations 2025-12-18 14:51:57 -05:00
bbedward
83b81be825 keybinds: add log if ShortcutInhibitor is missing 2025-12-18 14:15:54 -05:00
bbedward
4aefa0f1f7 core: skip replacing niri/dms configs
fixes #1072
2025-12-18 11:58:45 -05:00
bbedward
e53a7cee97 matugen: wrap pywalfox in sh 2025-12-18 11:51:37 -05:00
bbedward
8437e1aa7b desktop widgets: use preview window instead of margin shift for non-niri 2025-12-18 10:05:38 -05:00
bbedward
632f40cc0a desktop plugins: use mapToGlobal on moving widgets 2025-12-18 08:57:58 -05:00
Ethan Todd
7d81445341 notifications: add modal function for clearing all (#1082) 2025-12-18 08:28:58 -05:00
bbedward
78a5f401d7 core: remove ascii art from version 2025-12-18 00:14:02 -05:00
bbedward
8745f98c95 matugen: fix vscode editor color reload 2025-12-17 23:40:09 -05:00
bbedward
f0f5bcc630 matugen: add color reload capability to vscode theme 2025-12-17 23:30:06 -05:00
purian23
8a3c513605 distro: Relocate Ubuntu dgop/dsearch to danklinux 2025-12-17 23:08:37 -05:00
bbedward
145d2636dd clock: make desktop clock not use precision seconds always 2025-12-17 21:00:04 -05:00
bbedward
f2b9dc8988 displays: add adaptiveSyncSupported to wlroutput API 2025-12-17 20:36:54 -05:00
bbedward
2e4d56728b niri: track open modals in modal manager for focus transfers 2025-12-17 20:21:34 -05:00
bbedward
18231ed324 niri: don't rely on text field length for launching 2025-12-17 16:40:54 -05:00
bbedward
d0b61d8ed1 niri: release focus for popouts on overview 2025-12-17 16:17:44 -05:00
bbedward
d385a44949 notifications: attempt to minimize rapid window creation/destruction 2025-12-17 16:10:25 -05:00
bbedward
d97392d46e clipboard: remove ownership option 2025-12-17 15:34:40 -05:00
bbedward
6abb2c73fd desktop: fix widget display toggle 2025-12-17 14:24:13 -05:00
bbedward
7e141c6b36 dankbar/vpn: right click to quick connect 2025-12-17 14:16:11 -05:00
bbedward
53553c1f62 clock: add analog seconds option for desktop widget 2025-12-17 14:04:14 -05:00
bbedward
523ccc6bf8 i18n: WIP initial RTL support
- notifications
- color picker
- process list
- settings
- control center, dash
- launcher

part of #1059
2025-12-17 13:50:06 -05:00
bbedward
811e89fcfa matugen: change pywalfox post hook 2025-12-17 12:43:05 -05:00
bbedward
5d5be4d9d7 lock: different pam fallback 2025-12-17 12:40:28 -05:00
bbedward
88457ab139 lock: add pam login fallback locally 2025-12-17 12:31:45 -05:00
bbedward
0034926df7 plugins/desktop-widgets: create a new "desktop" widget plugin type
- Draggable per-monitor background layer widgets
- Add basic dms version checks on plugins
- Clock: built-in clock desktop plugin
- dgop: built-in system monitor desktop plugin
2025-12-17 12:08:03 -05:00
musjj
d082d41ab9 nix: refactor module structure and flake output (#1014)
- The program module is now called dank-material-shell
- The homeModules structure is flattened
2025-12-17 08:12:30 -03:00
purian23
b7911475b6 distros: Prefer stable quickshell 2025-12-16 23:52:37 -05:00
bbedward
672754b0b5 dankdash: fix weather tooltips
fixes #1065
2025-12-16 15:27:44 -05:00
bbedward
0d1553123b binds: accidentally deleted import 2025-12-16 15:16:44 -05:00
bbedward
ba6c51c102 core: exit non-zero when SIGUSR1 is received (for systemd r estart) 2025-12-16 14:47:46 -05:00
bbedward
d64206a9ff core: detect quickshell crash on SIGTERM 2025-12-16 14:44:22 -05:00
bbedward
d9a1089039 displays: add hyprland HDR options 2025-12-16 14:12:51 -05:00
bbedward
55fe463405 displays: break monolith config down and allow floats/fix integer
writing (niri)
2025-12-16 13:36:00 -05:00
bbedward
e84210e962 displays: fix niri hot corner config 2025-12-16 12:54:26 -05:00
bbedward
ff506548d3 displays: add niri-specific layout options to configurator 2025-12-16 12:23:34 -05:00
arfan
f6b09751e9 fix: update getWorkspaceIndex function to include index parameter also fix workspace padding number (#1062) 2025-12-16 11:32:21 -05:00
bbedward
3d863979c4 core: preserve quickshell exit code 2025-12-16 09:01:13 -05:00
purian23
2947ff4131 distro: Revise server side file handling 2025-12-16 01:08:12 -05:00
purian23
b8fca10896 Remove auto run on tags 2025-12-16 00:17:13 -05:00
purian23
33e45794d2 No run on push 2025-12-15 23:29:36 -05:00
purian23
42cc88ca65 Workflow update 2025-12-15 23:24:16 -05:00
purian23
0b7f2416ca distro: Bring up Stable 2025-12-15 23:10:24 -05:00
purian23
5d5c745ee5 Push the logs 2025-12-15 22:18:35 -05:00
purian23
e0429e4c60 distro: Re-add suffix 2025-12-15 21:31:13 -05:00
bbedward
0bece5287e dock: improve pinned app re-ordering feedback, fix vertical dock
ordering
fixes #1046
fixes #938
2025-12-15 20:46:36 -05:00
purian23
60b5e47836 update gitignore env 2025-12-15 19:06:43 -05:00
purian23
aa75b44790 distro: OBS version matching 2025-12-15 18:03:58 -05:00
bbedward
769f58caa9 displays: fix reverted state for position 2025-12-15 17:43:52 -05:00
bbedward
e7facf740d update CHANGELOG 2025-12-15 17:18:59 -05:00
Austin Farmer
04921eef62 Move Ghostty Application Theming (#1047)
* Moved ghostty config

First test. Seems to work but probably broke something.

* Updated test
2025-12-15 17:16:46 -05:00
Oliver Portee
8863c42879 fix light mode/dark mode switch for stock themes (#1057) 2025-12-15 17:16:23 -05:00
bbedward
2745116ac5 displays: add configurator for niri, Hyprland, and MangoWC
- Configure position, VRR, orientation, resolution, refresh rate
- Split Display section into Configuration, Gamma, and Widgets
- MangoWC omits VRR because it doesnt have per-display VRR
- HDR configuration not present for Hyprland
2025-12-15 16:36:14 -05:00
bbedward
bafe1c5fee niri: handle window urgency event
fixes #1033
2025-12-15 12:16:43 -05:00
bbedward
306d7b2ce0 gamma: guard against application
- QML will sync its desired state with GO, when IE settings are changed
  or opened. Go was applying gamma even if unchanged
- Track last applied gamma to avoid sends
2025-12-15 11:43:16 -05:00
bbedward
e9f6583c60 workspaces: add scroll handler to widget itself 2025-12-15 11:12:27 -05:00
redybcs
42a2835929 Update flake.nix to fix Hash Mismatch (#1035)
Looks like there hasn't been any go.mod updates since the workflow to fix the hash was repaired.
2025-12-15 14:14:29 +01:00
purian23
c2c90c680e distro: OBS edgecase 2025-12-15 01:28:00 -05:00
purian23
cd01f6378c Revise OBS / PPA Workflows 2025-12-15 00:37:42 -05:00
purian23
6033075de6 distro: Revise builds to use API variants 2025-12-15 00:32:40 -05:00
tsukasa
79794d3441 dankmodal: removed backgroundWindow to fix clicking twice (#1030)
* dankmodal: removed backgroundWindow

removed 'backgroundWindow' but combined it with 'contentWindow'

* made single window behavior specific to hyprland

this should keep other compositor behavior the same and fix double
clicking to exit out of Spotlight/ClipboardHist/Powermenu
2025-12-14 19:52:06 -05:00
bbedward
031f86b417 Revert "Fixed having to click twice to exit out of Spotlight/Cliphist/Powermenu (#1022)"
This reverts commit ca5fe6f7db.
2025-12-14 19:09:04 -05:00
bbedward
891f53cf6f battery: fix button group sclaing 2025-12-14 17:22:54 -05:00
bbedward
848991cf5b idle: implement screensaver interface
- Mainly used to create the idle inhibitor when an app requests
  screensaver inhibit
2025-12-14 16:49:59 -05:00
bbedward
d37ddd1d41 vpn: optim cc and dankbar widget 2025-12-14 16:12:46 -05:00
Pi Home Server
00d12acd5e Add hide option for updater widget (#1028) 2025-12-14 15:55:47 -05:00
bbedward
3bbc78a44f dankbar: make control center widget per-instance not global
fixes #1017
2025-12-14 15:52:46 -05:00
bbedward
b0a6652cc6 ci: simplify changelog handling 2025-12-14 14:23:27 -05:00
bbedward
cb710b2e5f notifications: fix redundant height animation 2025-12-14 13:40:21 -05:00
tsukasa
ca5fe6f7db Fixed having to click twice to exit out of Spotlight/Cliphist/Powermenu (#1022)
There's possibly more but this fix the need of having to click the
background twice to close those modals.
2025-12-14 11:16:25 -05:00
bbedward
fb75f4c68b lock/greeter: fix font alignment
fixes #1018
2025-12-14 11:13:48 -05:00
bbedward
5e2a418485 binds: fix to scale with arbitrary font sizes 2025-12-14 10:56:03 -05:00
bbedward
24fe215067 ci: pull changelogs from obs/launchpad APIs
- Get changelog from OBS/Launchpad API endpoints, instead of storing in
  git
2025-12-14 10:42:00 -05:00
bbedward
ab2e8875ac runningapps: round icon margin to integer 2025-12-14 10:25:36 -05:00
dms-ci[bot]
dec5740c74 ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-14 15:22:12 +00:00
bbedward
208266dfa3 dwl: fix layout popout 2025-12-14 10:17:58 -05:00
dms-ci[bot]
32f218d58c ci: Auto-update OBS packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-14 04:07:07 +00:00
dms-ci[bot]
6fdaab2ccd ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-14 03:58:50 +00:00
purian23
d336866f44 distro: Let the workflow run 2025-12-13 22:54:58 -05:00
purian23
b40df5f1c4 distro: Unify options across repos 2025-12-13 22:38:25 -05:00
dms-ci[bot]
3c9886ad1b ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-14 01:55:34 +00:00
bbedward
ea205ebd12 wallpaper: pause cycling when locked, clean state when changing modes 2025-12-13 20:29:02 -05:00
bbedward
30dad46c94 dankbar: add scroll wheel behavior configuration 2025-12-13 20:12:21 -05:00
dms-ci[bot]
fbf79e62e9 ci: Auto-update OBS packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 21:23:58 +00:00
dms-ci[bot]
efcf72bc08 ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 21:19:59 +00:00
bbedward
3b511e2f55 i18n: add hungarian 2025-12-13 14:03:49 -05:00
dms-ci[bot]
e4e20fb43a ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 15:26:22 +00:00
dms-ci[bot]
48ccff67a6 ci: Auto-update OBS packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 15:25:01 +00:00
Souyama
a783d6507b Change DPMS off to DPMS toggle in hyprland.conf (#1011) 2025-12-13 10:07:11 -05:00
bbedward
fd94e60797 cava: dont set method/source 2025-12-13 10:04:20 -05:00
bbedward
a1bcb7ea30 vpn: just try and import all types on errors 2025-12-13 10:02:57 -05:00
bbedward
31b67164c7 clipboard: re-add ownership option 2025-12-13 09:45:04 -05:00
bbedward
786c13f892 clipboard: fix mime type selection 2025-12-13 09:35:55 -05:00
bbedward
c652659d54 wallpaper: scale texture to physical pixels
- reverts a regression
2025-12-13 08:43:46 -05:00
dms-ci[bot]
ca39196f13 ci: Auto-update OBS packages [dms,dms-git]
🤖 Automated by GitHub Actions
2025-12-13 07:00:01 +00:00
dms-ci[bot]
f02dd8fd4b ci: Auto-update PPA packages [dms,dms-git,dms-greeter]
🤖 Automated by GitHub Actions
2025-12-13 06:51:47 +00:00
purian23
0f89886ce7 distro: Break the loop 2025-12-13 01:44:20 -05:00
dms-ci[bot]
1118404192 ci: Auto-update PPA packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 06:34:59 +00:00
dms-ci[bot]
f011ea6cce ci: Auto-update OBS packages [dms-git]
🤖 Automated by GitHub Actions
2025-12-13 06:30:45 +00:00
dms-ci[bot]
b2ac9c6c1a ci: Auto-update OBS packages [dms,dms-git]
🤖 Automated by GitHub Actions
2025-12-13 06:06:44 +00:00
dms-ci[bot]
fbab41abd6 ci: Auto-update PPA packages [dms,dms-git,dms-greeter]
🤖 Automated by GitHub Actions
2025-12-13 05:58:14 +00:00
bbedward
82f881af5b matugen: scrub the never implemented dynamic contrast palette 2025-12-13 00:51:51 -05:00
purian23
68de9b437d distro: Switch to dms-ci 2025-12-13 00:50:42 -05:00
bbedward
830a715b6d wlcontext: use poll with wake pipe instead of read deadlines 2025-12-13 00:46:30 -05:00
bbedward
ce4aca9a72 fix shellcheck 2025-12-13 00:29:20 -05:00
bbedward
7641171a01 clipboard: move cl receive to main wlcontext goroutine 2025-12-13 00:16:56 -05:00
purian23
119e084e52 distro: Remove PR tests 2025-12-13 00:15:37 -05:00
bbedward
7c6d52913e niri: fix test 2025-12-12 23:57:50 -05:00
bbedward
f63ab5cf7c ci: add workflow for pushing stable tag 2025-12-12 23:57:50 -05:00
purian23
50f1bc5017 distros: Remove false path dir 2025-12-12 23:52:31 -05:00
bbedward
c3ab409b6a clipboard: scrap persist, optimize mime-type handling 2025-12-12 23:48:07 -05:00
purian23
44f6ab4878 distro: Reformat workflow newlines 2025-12-12 23:35:37 -05:00
bbedward
5fda6e0f12 clipboard: allow configuration even when disabled 2025-12-12 23:17:55 -05:00
purian23
38068e78c9 distros: PR writeback 2025-12-12 23:02:52 -05:00
purian23
66d22727e9 distros: Enhance build automation 2025-12-12 22:41:51 -05:00
Lucas
db2f68e35d nix: fix qt-plugins path (#1005) 2025-12-13 01:34:25 +01:00
Marcus Ramberg
352277ec15 notifications: add ipc call for toggleDoNotDisturb (#1002) 2025-12-12 18:21:00 -05:00
bbedward
d6043e64f2 osd: increase shadow buffer
accounts for percentage view
2025-12-12 18:11:31 -05:00
bbedward
d3f5b8f32e niri: fix gap reactivity 2025-12-12 16:58:07 -05:00
bbedward
6c3c722674 niri: add warnings on auto-generated files 2025-12-12 16:53:52 -05:00
purian23
5b8edb13d8 distro: OBS updates 2025-12-12 15:42:21 -05:00
bbedward
c595727b94 osd: optimize surface damage
fixes #994
2025-12-12 15:37:39 -05:00
bbedward
d46302588a clipboard: add shift+enter to paste from clipboard history modal
fixes #358
2025-12-12 15:29:10 -05:00
bbedward
0ff9fdb365 notifications: add swipe to dismiss functionality
fixes #927
2025-12-12 14:39:51 -05:00
purian23
e95f7ce367 Update Copr specs 2025-12-12 12:30:18 -05:00
Pi Home Server
df1a8f4066 Add lock screen layout settings (#981)
* Add lock screen layout settings

* Update translation keys
2025-12-12 11:45:00 -05:00
bbedward
32e6c1660f wallpaper: clamp max texture size 2025-12-12 11:17:28 -05:00
bbedward
d6b9b72e9b ci: disable pkg builds from main release wf 2025-12-12 10:16:24 -05:00
bbedward
179ad03fa4 ci: switch to dispatch-based release flow 2025-12-12 10:01:44 -05:00
bbedward
c3cb82c84e dankinstall: call add-wants for niri/hyprland with dms service 2025-12-12 09:58:12 -05:00
bbedward
4b52e2ed9e niri: fix keybind handling of cooldown-ms parameter 2025-12-12 09:52:35 -05:00
bbedward
77fd61f81e workspaces: make icons scale with bar size, fixi valign of numbers
fixes #990
2025-12-12 00:23:40 -05:00
Lucas
c3ffb7f83b nix: remove wl-clipboard and cliphist dependencies (#991) 2025-12-11 21:44:36 -05:00
Lucas
89dcd72d70 nix: let paths be used instead of only packages in plugins (#988) 2025-12-11 23:57:22 +01:00
bbedward
5c3346aa9d core: fix test 2025-12-11 16:33:31 -05:00
bbedward
7c4b383477 clipboard: persistence off by default
- It's a little risky and messy of a default
2025-12-11 16:28:56 -05:00
bbedward
bdc0e8e0fc clipboard: dont take ownership on nil offers 2025-12-11 15:55:42 -05:00
bbedward
6d66f93565 core: mock wayland context for tests & add i18n guidance to CONTRIBUTING 2025-12-11 14:50:02 -05:00
Lucas
9cac93b724 nix: fix pre-commit hook in dev-shell (#987) 2025-12-11 14:40:19 -05:00
bbedward
0709f263af core: add test coverage for some of the wayland stack
- mostly targeting any race issue detection
2025-12-11 13:47:18 -05:00
Lucas
4e4effd8b1 nix: fix home-manager module plugins (#984) 2025-12-11 19:36:32 +01:00
bbedward
f9632cba61 core: remove hyprpicker remnant 2025-12-11 13:05:07 -05:00
bbedward
38db6a41d5 gamma: fix initial night mode enablement 2025-12-11 12:27:58 -05:00
bbedward
7c6f0432c8 clipboard: add copyEntry (by id) handler 2025-12-11 12:00:47 -05:00
bbedward
56ff9368be matugen: add option to disable DMS templates
fixes #983
2025-12-11 11:48:59 -05:00
bbedward
597e21d44d clipboard: remove wl-copy references 2025-12-11 11:10:27 -05:00
bbedward
5bf54632be media: add option to disable visualizer in bar widget
fixes #978
2025-12-11 10:55:32 -05:00
bbedward
3a8d3ee515 core: use stdlib for xdg dirs 2025-12-11 10:15:23 -05:00
bbedward
1c1cf866e2 settings: make default height screen-aware 2025-12-11 09:51:44 -05:00
bbedward
ccc1df75f1 nix: update vendorHash 2025-12-11 09:50:50 -05:00
bbedward
d2c3f87656 ci: fix nix vendor-hash workflow 2025-12-11 09:46:57 -05:00
bbedward
6d62229b5f clipboard: introduce native clipboard, clip-persist, clip-storage functionality 2025-12-11 09:41:07 -05:00
Marcus Ramberg
7c88865d67 Refactor pre-commit hooks to use prek (#976)
* ci: change to prek for pre-commit

* refactor: fix shellcheck warnings for the scripts

* chore: unify whitespace formatting

* nix: add prek to dev shell
2025-12-11 09:11:12 -05:00
bbedward
c8cfe0cb5a dwl: fix layout popout not opening
fixes #980
2025-12-11 09:05:53 -05:00
Lucas
e573bdba92 nix: add QML dependencies to dms-shell package (#967) 2025-12-11 09:19:43 +01:00
Lucas
d8cd15d361 nix: add plugins in NixOS module (#970)
* nix: remove unnecessary /etc/xdg/quickshell/dms and .config/quickshell/dms

* nix: add plugins in NixOS module
2025-12-11 09:03:22 +01:00
Lucas
1db3907838 nix: fix greeter per-monitor and per-mode wallpapers (#974) 2025-12-11 09:01:14 +01:00
Lucas
72cfd37ab7 nix: fix niri module (#969) 2025-12-10 23:21:52 -05:00
bbedward
1e67ee995e plugins: hide uninstall and update buttons for system plugins 2025-12-10 19:30:58 -05:00
bbedward
6c26b4080c core: fix socket reported CLI version 2025-12-10 16:48:44 -05:00
purian23
0dbd59b223 Manual Changelog versioning 2025-12-10 13:52:29 -05:00
Lucas
b2066c60d1 nix: drop unnecessary dependencies and enable power and accounts daemons (#963) 2025-12-10 19:35:58 +01:00
bbedward
8d7ae324ff Revert "distro: update ppa-build script to ref right version"
This reverts commit c0d3c4f875.
2025-12-10 13:31:15 -05:00
bbedward
c0d3c4f875 distro: update ppa-build script to ref right version 2025-12-10 13:28:34 -05:00
purian23
27a771648a Ubuntu workflow tweak 2025-12-10 12:37:56 -05:00
purian23
86affc7304 Add WorkDIR to build steps 2025-12-10 12:33:41 -05:00
purian23
d939b99628 Workflow build increment logic 2025-12-10 12:27:10 -05:00
purian23
1fcf777f3d Bump OBS spec 2025-12-10 12:13:43 -05:00
purian23
7a8e23faa9 Update build scripts 2025-12-10 12:06:28 -05:00
bbedward
73a4dd3321 change codename 2025-12-10 11:18:08 -05:00
purian23
13ce873a69 Update dms stable systemd & desktop path 2025-12-10 11:16:03 -05:00
520 changed files with 60181 additions and 12477 deletions

8
.editorconfig Normal file
View File

@@ -0,0 +1,8 @@
[*.sh]
# like -i=4
indent_style = space
indent_size = 4
[*.nix]
# like -i=4
indent_style = space
indent_size = 4

View File

@@ -1,69 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$HOOK_DIR/.." && pwd)"
cd "$REPO_ROOT"
# =============================================================================
# Go CI checks (when core/ files are staged)
# =============================================================================
STAGED_CORE_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep '^core/' || true)
if [[ -n "$STAGED_CORE_FILES" ]]; then
echo "Go files staged in core/, running CI checks..."
cd "$REPO_ROOT/core"
# Format check
echo " Checking gofmt..."
UNFORMATTED=$(gofmt -s -l . 2>/dev/null || true)
if [[ -n "$UNFORMATTED" ]]; then
echo "The following files are not formatted:"
echo "$UNFORMATTED"
echo ""
echo "Run: cd core && gofmt -s -w ."
exit 1
fi
# golangci-lint
if command -v golangci-lint &>/dev/null; then
echo " Running golangci-lint..."
golangci-lint run ./...
else
echo " Warning: golangci-lint not installed, skipping lint"
echo " Install: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"
fi
# Tests
echo " Running tests..."
if ! go test ./... >/dev/null 2>&1; then
echo "Tests failed! Run 'go test ./...' for details."
exit 1
fi
# Build checks
echo " Building..."
mkdir -p bin
go build -buildvcs=false -o bin/dms ./cmd/dms
go build -buildvcs=false -o bin/dms-distro -tags distro_binary ./cmd/dms
go build -buildvcs=false -o bin/dankinstall ./cmd/dankinstall
echo "All Go CI checks passed!"
cd "$REPO_ROOT"
fi
# =============================================================================
# i18n sync check (DISABLED for now)
# =============================================================================
# if [[ -n "${POEDITOR_API_TOKEN:-}" ]] && [[ -n "${POEDITOR_PROJECT_ID:-}" ]]; then
# if command -v python3 &>/dev/null; then
# if ! python3 scripts/i18nsync.py check &>/dev/null; then
# echo "Translations out of sync"
# echo "Run: python3 scripts/i18nsync.py sync"
# exit 1
# fi
# fi
# fi
exit 0

View File

@@ -6,7 +6,7 @@ labels: "bug"
assignees: ""
---
<!-- If your issue is related to ICONS
<!-- If your issue is related to ICONS
- Purple and black checkerboards are QT's way of signalling an icon doesn't exist
- FIX: Configure a QT6 or Icon Pack in DMS Settings that has the icon you want
- Follow the [THEMING](https://danklinux.com/docs/dankmaterialshell/icon-theming) section to ensure your QT environment variable is configured correctly for themes.
@@ -62,4 +62,4 @@ Paste error messages or logs here
## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings -->
<!-- If applicable, add screenshots or screen recordings -->

View File

@@ -30,4 +30,4 @@ Is this feature specific to one compositor?
## Alternatives/Existing Solutions
<!-- Include any similar/pre-existing products that solve this problem -->
<!-- Include any similar/pre-existing products that solve this problem -->

View File

@@ -37,4 +37,4 @@ assignees: ""
## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings -->
<!-- If applicable, add screenshots or screen recordings -->

383
.github/workflows/backup/run-obs.yml.bak vendored Normal file
View File

@@ -0,0 +1,383 @@
name: Update OBS Packages
on:
workflow_dispatch:
inputs:
package:
description: "Package to update (dms, dms-git, or all)"
required: false
default: "all"
force_upload:
description: "Force upload without version check"
required: false
default: "false"
type: choice
options:
- "false"
- "true"
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)"
required: false
default: ""
push:
tags:
- "v*"
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
version: ${{ steps.check.outputs.version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install OSC
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Check for updates
id: check
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION (always update)"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match spec format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Check OBS for last uploaded commit
OBS_BASE="$HOME/.cache/osc-checkouts"
mkdir -p "$OBS_BASE"
OBS_PROJECT="home:AvengeMedia:dms-git"
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
osc up -q 2>/dev/null || true
# Extract commit hash from spec Version line & format like; 0.6.2+git2264.a679be68
if [[ -f "dms-git.spec" ]]; then
OBS_COMMIT=$(grep "^Version:" "dms-git.spec" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already uploaded to OBS, skipping"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (OBS has $OBS_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract OBS commit, proceeding with update"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No spec file in OBS, proceeding with update"
fi
cd "${{ github.workspace }}"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 First upload to OBS, update needed"
fi
elif [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
PKG="${{ github.event.inputs.package }}"
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🚀 Force upload: all packages"
else
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🚀 Force upload: $PKG"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
update-obs:
name: Upload to OBS
needs: check-updates
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
if: |
github.event.inputs.force_upload == 'true' ||
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Generate GitHub App Token
id: generate_token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.generate_token.outputs.token }}
- name: Check if last commit was automated
id: check-loop
run: |
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "✅ Last commit was not automated, proceeding"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Determine packages to update
if: steps.check-loop.outputs.skip != 'true'
id: packages
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: updating git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Update dms-git spec version
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
# Update version in spec
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
# Add changelog entry
DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
- name: Update Debian dms-git changelog version
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
# Debian version format: 0.6.2+git2256.9162e314
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
CHANGELOG_DATE=$(date -R)
CHANGELOG_FILE="distro/debian/dms-git/debian/changelog"
# Get current version from changelog
CURRENT_VERSION=$(head -1 "$CHANGELOG_FILE" | sed 's/.*(\([^)]*\)).*/\1/')
echo "Current Debian version: $CURRENT_VERSION"
echo "New version: $NEW_VERSION"
# Only update if version changed
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
# Create new changelog entry at top
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms-git ($NEW_VERSION) nightly; urgency=medium
* Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
# Prepend to existing changelog
cat "$CHANGELOG_FILE" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "$CHANGELOG_FILE"
echo "✓ Updated Debian changelog: $CURRENT_VERSION → $NEW_VERSION"
else
echo "✓ Debian changelog already at version $NEW_VERSION"
fi
- name: Update dms stable version
if: steps.check-loop.outputs.skip != 'true' && steps.packages.outputs.version != ''
run: |
VERSION="${{ steps.packages.outputs.version }}"
VERSION_NO_V="${VERSION#v}"
echo "Updating packaging to version $VERSION_NO_V"
# Update openSUSE dms spec (stable only)
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
# Update openSUSE spec changelog
DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1\\n- Update to stable $VERSION release\\n- Bug fixes and improvements"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY\\n" distro/opensuse/dms.spec
# Update Debian _service files (both tar_scm and download_url formats)
for service in distro/debian/*/_service; do
if [[ -f "$service" ]]; then
# Update tar_scm revision parameter (for dms-git)
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
# Update download_url paths (for dms stable)
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
fi
done
# Update Debian changelog for dms stable
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
CHANGELOG_DATE=$(date -R)
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms ($VERSION_NO_V) stable; urgency=medium
* Update to $VERSION stable release
* Bug fixes and improvements
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
cat "distro/debian/dms/debian/changelog" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "distro/debian/dms/debian/changelog"
echo "✓ Updated Debian changelog to $VERSION_NO_V"
fi
- name: Install Go
if: steps.check-loop.outputs.skip != 'true'
uses: actions/setup-go@v5
with:
go-version: "1.24"
- name: Install OSC
if: steps.check-loop.outputs.skip != 'true'
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Upload to OBS
if: steps.check-loop.outputs.skip != 'true'
env:
FORCE_UPLOAD: ${{ github.event.inputs.force_upload }}
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
MESSAGE="Automated update from GitHub Actions"
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
fi
if [[ "$PACKAGES" == "all" ]]; then
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
fi
- name: Get changed packages
if: steps.check-loop.outputs.skip != 'true'
id: changed-packages
run: |
# Check if there are any changes to commit
if git diff --exit-code distro/debian/ distro/opensuse/ >/dev/null 2>&1; then
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "📋 No changelog or spec changes to commit"
else
echo "has_changes=true" >> $GITHUB_OUTPUT
# Get list of changed packages for commit message
CHANGED_DEB=$(git diff --name-only distro/debian/ 2>/dev/null | grep 'debian/changelog' | xargs dirname 2>/dev/null | xargs dirname 2>/dev/null | xargs basename 2>/dev/null | tr '\n' ', ' | sed 's/, $//' || echo "")
CHANGED_SUSE=$(git diff --name-only distro/opensuse/ 2>/dev/null | grep '\.spec$' | sed 's|distro/opensuse/||' | sed 's/\.spec$//' | tr '\n' ', ' | sed 's/, $//' || echo "")
PKGS=$(echo "$CHANGED_DEB,$CHANGED_SUSE" | tr ',' '\n' | grep -v '^$' | sort -u | tr '\n' ',' | sed 's/,$//')
echo "packages=$PKGS" >> $GITHUB_OUTPUT
echo "📋 Changed packages: $PKGS"
fi
- name: Commit packaging changes
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
run: |
git config user.name "dms-ci[bot]"
git config user.email "dms-ci[bot]@users.noreply.github.com"
git add distro/debian/*/debian/changelog distro/opensuse/*.spec
git commit -m "ci: Auto-update OBS packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
git pull --rebase origin master
git push
- name: Summary
run: |
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY

298
.github/workflows/backup/run-ppa.yml.bak vendored Normal file
View File

@@ -0,0 +1,298 @@
name: Update PPA Packages
on:
workflow_dispatch:
inputs:
package:
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
required: false
default: "dms-git"
force_upload:
description: "Force upload without version check"
required: false
default: "false"
type: choice
options:
- "false"
- "true"
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
required: false
default: ""
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for updates
id: check
run: |
if [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match changelog format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Extract commit hash from changelog
# Format: dms-git (0.6.2+git2264.c5c5ce84) questing; urgency=medium
CHANGELOG_FILE="distro/ubuntu/dms-git/debian/changelog"
if [[ -f "$CHANGELOG_FILE" ]]; then
CHANGELOG_COMMIT=$(head -1 "$CHANGELOG_FILE" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$CHANGELOG_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$CHANGELOG_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already in changelog, skipping upload"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (changelog has $CHANGELOG_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract commit from changelog, proceeding with upload"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No changelog file found, proceeding with upload"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
upload-ppa:
name: Upload to PPA
needs: check-updates
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
if: |
github.event.inputs.force_upload == 'true' ||
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Generate GitHub App Token
id: generate_token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.generate_token.outputs.token }}
- name: Check if last commit was automated
id: check-loop
run: |
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "✅ Last commit was not automated, proceeding"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Set up Go
if: steps.check-loop.outputs.skip != 'true'
uses: actions/setup-go@v5
with:
go-version: "1.24"
cache: false
- name: Install build dependencies
if: steps.check-loop.outputs.skip != 'true'
run: |
sudo apt-get update
sudo apt-get install -y \
debhelper \
devscripts \
dput \
lftp \
build-essential \
fakeroot \
dpkg-dev
- name: Configure GPG
if: steps.check-loop.outputs.skip != 'true'
env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
run: |
echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Determine packages to upload
if: steps.check-loop.outputs.skip != 'true'
id: packages
run: |
if [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
PKG="${{ github.event.inputs.package }}"
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
echo "packages=all" >> $GITHUB_OUTPUT
echo "🚀 Force upload: all packages"
else
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "🚀 Force upload: $PKG"
fi
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: uploading git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual package selection should respect change detection
SELECTED_PKG="${{ github.event.inputs.package }}"
UPDATED_PKG="${{ needs.check-updates.outputs.packages }}"
# Check if manually selected package is in the updated list
if [[ "$UPDATED_PKG" == *"$SELECTED_PKG"* ]] || [[ "$SELECTED_PKG" == "all" ]]; then
echo "packages=$SELECTED_PKG" >> $GITHUB_OUTPUT
echo "📦 Manual selection (has updates): $SELECTED_PKG"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "⚠️ Manual selection '$SELECTED_PKG' has no updates - skipping (use force_upload to override)"
fi
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Upload to PPA
if: steps.check-loop.outputs.skip != 'true'
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
REBUILD_RELEASE="${{ github.event.inputs.rebuild_release }}"
if [[ -z "$PACKAGES" ]]; then
echo "No packages selected for upload. Skipping."
exit 0
fi
# Build command arguments
BUILD_ARGS=()
if [[ -n "$REBUILD_RELEASE" ]]; then
BUILD_ARGS+=("$REBUILD_RELEASE")
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
fi
if [[ "$PACKAGES" == "all" ]]; then
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms to PPA..."
if [ -n "$REBUILD_RELEASE" ]; then
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms dms questing "${BUILD_ARGS[@]}"
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-git to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms-git dms-git questing "${BUILD_ARGS[@]}"
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-greeter to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms-greeter danklinux questing "${BUILD_ARGS[@]}"
else
# Map package to PPA name
case "$PACKAGES" in
dms)
PPA_NAME="dms"
;;
dms-git)
PPA_NAME="dms-git"
;;
dms-greeter)
PPA_NAME="danklinux"
;;
*)
PPA_NAME="$PACKAGES"
;;
esac
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PACKAGES to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "$PACKAGES" "$PPA_NAME" questing "${BUILD_ARGS[@]}"
fi
- name: Get changed packages
if: steps.check-loop.outputs.skip != 'true'
id: changed-packages
run: |
# Check if there are any changelog changes to commit
if git diff --exit-code distro/ubuntu/ >/dev/null 2>&1; then
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "📋 No changelog changes to commit"
else
echo "has_changes=true" >> $GITHUB_OUTPUT
# Get list of changed packages for commit message (deduplicate)
CHANGED=$(git diff --name-only distro/ubuntu/ | grep 'debian/changelog' | sed 's|/debian/changelog||' | xargs -I{} basename {} | sort -u | tr '\n' ',' | sed 's/,$//')
echo "packages=$CHANGED" >> $GITHUB_OUTPUT
echo "📋 Changed packages: $CHANGED"
echo "📋 Debug - Changed files:"
git diff --name-only distro/ubuntu/ | grep 'debian/changelog' || echo "No changelog files found"
fi
- name: Commit changelog changes
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
run: |
git config user.name "dms-ci[bot]"
git config user.email "dms-ci[bot]@users.noreply.github.com"
git add distro/ubuntu/*/debian/changelog
git commit -m "ci: Auto-update PPA packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
git pull --rebase origin master
git push
- name: Summary
run: |
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ "$PACKAGES" == "all" ]]; then
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA danklinux**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-git" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY

View File

@@ -33,20 +33,6 @@ jobs:
with:
go-version-file: ./core/go.mod
- name: Format check
run: |
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
echo "The following files are not formatted:"
gofmt -s -l .
exit 1
fi
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.6
working-directory: core
- name: Test
run: go test -v ./...

15
.github/workflows/prek.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
name: Pre-commit Checks
on:
push:
pull_request:
branches: [master, main]
jobs:
pre-commit-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: run pre-commit hooks
uses: j178/prek-action@v1

View File

@@ -1,16 +1,19 @@
name: Release
on:
push:
tags:
- "v*"
workflow_dispatch:
inputs:
tag:
description: "Tag to release (e.g., v1.0.1)"
required: true
type: string
permissions:
contents: write
actions: write
concurrency:
group: release-${{ github.ref_name }}
group: release-${{ inputs.tag }}
cancel-in-progress: true
jobs:
@@ -24,10 +27,14 @@ jobs:
run:
working-directory: core
env:
TAG: ${{ inputs.tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ inputs.tag }}
fetch-depth: 0
- name: Set up Go
@@ -54,7 +61,7 @@ jobs:
run: |
set -eux
cd cmd/dankinstall
go build -trimpath -ldflags "-s -w -X main.Version=${GITHUB_REF#refs/tags/}" \
go build -trimpath -ldflags "-s -w -X main.Version=${TAG}" \
-o ../../dankinstall-${{ matrix.arch }}
cd ../..
gzip -9 -k dankinstall-${{ matrix.arch }}
@@ -68,7 +75,7 @@ jobs:
run: |
set -eux
cd cmd/dms
go build -trimpath -ldflags "-s -w -X main.Version=${GITHUB_REF#refs/tags/}" \
go build -trimpath -ldflags "-s -w -X main.Version=${TAG}" \
-o ../../dms-${{ matrix.arch }}
cd ../..
gzip -9 -k dms-${{ matrix.arch }}
@@ -91,7 +98,7 @@ jobs:
run: |
set -eux
cd cmd/dms
go build -trimpath -tags distro_binary -ldflags "-s -w -X main.Version=${GITHUB_REF#refs/tags/}" \
go build -trimpath -tags distro_binary -ldflags "-s -w -X main.Version=${TAG}" \
-o ../../dms-distropkg-${{ matrix.arch }}
cd ../..
gzip -9 -k dms-distropkg-${{ matrix.arch }}
@@ -171,17 +178,18 @@ jobs:
runs-on: ubuntu-24.04
needs: [build-core] #, update-versions]
env:
TAG: ${{ github.ref_name }}
TAG: ${{ inputs.tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ inputs.tag }}
fetch-depth: 0
- name: Fetch updated tag after version bump
run: |
git fetch origin --force tag ${{ github.ref_name }}
git checkout ${{ github.ref_name }}
git fetch origin --force tag ${TAG}
git checkout ${TAG}
- name: Download core artifacts
uses: actions/download-artifact@v4
@@ -273,6 +281,9 @@ jobs:
# Copy root LICENSE and CONTRIBUTING.md to quickshell/ for packaging
cp LICENSE CONTRIBUTING.md quickshell/
# Copy root assets directory to quickshell for systemd service and desktop file
cp -r assets quickshell/
# Tar the CONTENTS of quickshell/, not the directory itself
(cd quickshell && tar --exclude='.git' \
--exclude='.github' \
@@ -387,289 +398,3 @@ jobs:
prerelease: ${{ contains(env.TAG, '-') }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
trigger-obs-update:
runs-on: ubuntu-latest
needs: release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install OSC
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Update OBS packages
run: |
VERSION="${{ github.ref_name }}"
cd distro
bash scripts/obs-upload.sh dms "Update to $VERSION"
trigger-ppa-update:
runs-on: ubuntu-latest
needs: release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y \
debhelper \
devscripts \
dput \
lftp \
build-essential \
fakeroot \
dpkg-dev
- name: Configure GPG
env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
run: |
echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Upload to PPA
run: |
VERSION="${{ github.ref_name }}"
cd distro/ubuntu/ppa
bash create-and-upload.sh ../dms dms questing
copr-build:
runs-on: ubuntu-latest
needs: release
env:
TAG: ${{ github.ref_name }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Determine version
id: version
run: |
VERSION="${TAG#v}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Building DMS stable version: $VERSION"
- name: Setup build environment
run: |
sudo apt-get update
sudo apt-get install -y rpm wget curl jq gzip
mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
- name: Download release assets
run: |
VERSION="${{ steps.version.outputs.version }}"
cd ~/rpmbuild/SOURCES
wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
echo "Failed to download dms-qml.tar.gz for v${VERSION}"
exit 1
}
- name: Generate stable spec file
run: |
VERSION="${{ steps.version.outputs.version }}"
CHANGELOG_DATE="$(date '+%a %b %d %Y')"
cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
# Spec for DMS stable releases - Generated by GitHub Actions
%global debug_package %{nil}
%global version VERSION_PLACEHOLDER
%global pkg_summary DankMaterialShell - Material 3 inspired shell for Wayland compositors
Name: dms
Version: %{version}
Release: 1%{?dist}
Summary: %{pkg_summary}
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
Source0: dms-qml.tar.gz
BuildRequires: gzip
BuildRequires: wget
BuildRequires: systemd-rpm-macros
Requires: (quickshell or quickshell-git)
Requires: accountsservice
Requires: dms-cli = %{version}-%{release}
Requires: dgop
Recommends: cava
Recommends: cliphist
Recommends: danksearch
Recommends: matugen
Recommends: wl-clipboard
Recommends: NetworkManager
Recommends: qt6-qtmultimedia
Suggests: qt6ct
%description
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
and optimized for the niri and hyprland compositors. Features notifications,
app launcher, wallpaper customization, and fully customizable with plugins.
Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
process monitoring, notification center, clipboard history, dock, control center,
lock screen, and comprehensive plugin system.
%package -n dms-cli
Summary: DankMaterialShell CLI tool
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
%description -n dms-cli
Command-line interface for DankMaterialShell configuration and management.
Provides native DBus bindings, NetworkManager integration, and system utilities.
%prep
%setup -q -c -n dms-qml
# Download architecture-specific binaries during build
case "%{_arch}" in
x86_64)
ARCH_SUFFIX="amd64"
;;
aarch64)
ARCH_SUFFIX="arm64"
;;
*)
echo "Unsupported architecture: %{_arch}"
exit 1
;;
esac
wget -O %{_builddir}/dms-cli.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-${ARCH_SUFFIX}.gz" || {
echo "Failed to download dms-cli for architecture %{_arch}"
exit 1
}
gunzip -c %{_builddir}/dms-cli.gz > %{_builddir}/dms-cli
chmod +x %{_builddir}/dms-cli
%build
%install
install -Dm755 %{_builddir}/dms-cli %{buildroot}%{_bindir}/dms
install -d %{buildroot}%{_datadir}/bash-completion/completions
install -d %{buildroot}%{_datadir}/zsh/site-functions
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
%{_builddir}/dms-cli completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
%{_builddir}/dms-cli completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
%{_builddir}/dms-cli completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
install -Dm644 assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
install -Dm644 assets/dms-open.desktop %{buildroot}%{_datadir}/applications/dms-open.desktop
install -Dm644 assets/danklogo.svg %{buildroot}%{_datadir}/icons/hicolor/scalable/apps/danklogo.svg
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
cp -r %{_builddir}/dms-qml/* %{buildroot}%{_datadir}/quickshell/dms/
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
echo "%{version}" > %{buildroot}%{_datadir}/quickshell/dms/VERSION
%posttrans
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
fi
# Signal running DMS instances to reload
pkill -USR1 -x dms >/dev/null 2>&1 || :
%files
%license LICENSE
%doc README.md CONTRIBUTING.md
%{_datadir}/quickshell/dms/
%{_userunitdir}/dms.service
%{_datadir}/applications/dms-open.desktop
%{_datadir}/icons/hicolor/scalable/apps/danklogo.svg
%files -n dms-cli
%{_bindir}/dms
%{_datadir}/bash-completion/completions/dms
%{_datadir}/zsh/site-functions/_dms
%{_datadir}/fish/vendor_completions.d/dms.fish
%changelog
* CHANGELOG_DATE_PLACEHOLDER AvengeMedia <contact@avengemedia.com> - VERSION_PLACEHOLDER-1
- Stable release VERSION_PLACEHOLDER
- Built from GitHub release
SPECEOF
sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/dms.spec
sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/dms.spec
- name: Build SRPM
id: build
run: |
cd ~/rpmbuild/SPECS
rpmbuild -bs dms.spec
SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
SRPM_NAME=$(basename "$SRPM")
echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
echo "SRPM built: $SRPM_NAME"
- name: Upload SRPM artifact
uses: actions/upload-artifact@v4
with:
name: dms-stable-srpm-${{ steps.version.outputs.version }}
path: ${{ steps.build.outputs.srpm_path }}
retention-days: 90
- name: Install Copr CLI
run: |
sudo apt-get install -y python3-pip
pip3 install copr-cli
mkdir -p ~/.config
cat > ~/.config/copr << EOF
[copr-cli]
login = ${{ secrets.COPR_LOGIN }}
username = avengemedia
token = ${{ secrets.COPR_TOKEN }}
copr_url = https://copr.fedorainfracloud.org
EOF
chmod 600 ~/.config/copr
- name: Upload to Copr
run: |
SRPM="${{ steps.build.outputs.srpm_path }}"
VERSION="${{ steps.version.outputs.version }}"
echo "Uploading SRPM to avengemedia/dms..."
BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
echo "$BUILD_OUTPUT"
BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
if [ "$BUILD_ID" != "unknown" ]; then
echo "Build submitted: https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
fi

View File

@@ -3,8 +3,17 @@ name: DMS Copr Stable Release
on:
workflow_dispatch:
inputs:
package:
description: 'Package to build (dms, dms-greeter, or both)'
required: false
default: 'dms'
type: choice
options:
- dms
- dms-greeter
- both
version:
description: 'Versioning (e.g., 0.1.14, leave empty for latest release)'
description: 'Versioning (e.g., 1.0.3, leave empty for latest release)'
required: false
default: ''
release:
@@ -13,13 +22,32 @@ on:
default: '1'
jobs:
build-and-upload:
determine-packages:
runs-on: ubuntu-latest
outputs:
packages: ${{ steps.set-packages.outputs.packages }}
steps:
- name: Set package list
id: set-packages
run: |
PACKAGE_INPUT="${{ github.event.inputs.package || 'dms' }}"
if [ "$PACKAGE_INPUT" = "both" ]; then
echo 'packages=["dms","dms-greeter"]' >> $GITHUB_OUTPUT
else
echo "packages=[\"$PACKAGE_INPUT\"]" >> $GITHUB_OUTPUT
fi
build-and-upload:
needs: determine-packages
runs-on: ubuntu-latest
strategy:
matrix:
package: ${{ fromJSON(needs.determine-packages.outputs.packages) }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Determine version
id: version
run: |
@@ -39,210 +67,84 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release=$RELEASE" >> $GITHUB_OUTPUT
echo "✅ Building DMS hotfix version: $VERSION-$RELEASE"
echo "✅ Building ${{ matrix.package }} version: $VERSION-$RELEASE"
- name: Setup build environment
run: |
sudo apt-get update
sudo apt-get install -y rpm wget curl jq gzip
mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
echo "✅ RPM build environment ready"
- name: Download release assets
run: |
VERSION="${{ steps.version.outputs.version }}"
cd ~/rpmbuild/SOURCES
echo "📦 Downloading DMS QML source for v${VERSION}..."
# Download DMS QML source
wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
echo "❌ Failed to download dms-qml.tar.gz for v${VERSION}"
exit 1
}
echo "✅ Source downloaded"
echo "Note: dms-cli binary will be downloaded during build based on target architecture"
ls -lh
- name: Generate stable spec file
run: |
VERSION="${{ steps.version.outputs.version }}"
RELEASE="${{ steps.version.outputs.release }}"
CHANGELOG_DATE="$(date '+%a %b %d %Y')"
PACKAGE="${{ matrix.package }}"
cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
# Spec for DMS stable releases - Generated by GitHub Actions
# Copy spec file from repository
cp distro/fedora/${PACKAGE}.spec ~/rpmbuild/SPECS/${PACKAGE}.spec
%global debug_package %{nil}
%global version VERSION_PLACEHOLDER
%global pkg_summary DankMaterialShell - Material 3 inspired shell for Wayland compositors
# Replace placeholders with actual values
sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
sed -i "s/RELEASE_PLACEHOLDER/${RELEASE}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
Name: dms
Version: %{version}
Release: RELEASE_PLACEHOLDER%{?dist}
Summary: %{pkg_summary}
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
Source0: dms-qml.tar.gz
BuildRequires: gzip
BuildRequires: wget
BuildRequires: systemd-rpm-macros
Requires: (quickshell or quickshell-git)
Requires: accountsservice
Requires: dms-cli = %{version}-%{release}
Requires: dgop
Recommends: cava
Recommends: cliphist
Recommends: danksearch
Recommends: hyprpicker
Recommends: matugen
Recommends: wl-clipboard
Recommends: NetworkManager
Recommends: qt6-qtmultimedia
Suggests: qt6ct
%description
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
and optimized for the niri and hyprland compositors. Features notifications,
app launcher, wallpaper customization, and fully customizable with plugins.
Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
process monitoring, notification center, clipboard history, dock, control center,
lock screen, and comprehensive plugin system.
%package -n dms-cli
Summary: DankMaterialShell CLI tool
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
%description -n dms-cli
Command-line interface for DankMaterialShell configuration and management.
Provides native DBus bindings, NetworkManager integration, and system utilities.
%prep
%setup -q -c -n dms-qml
# Download architecture-specific binaries during build
# This ensures the correct architecture is used for each build target
case "%{_arch}" in
x86_64)
ARCH_SUFFIX="amd64"
;;
aarch64)
ARCH_SUFFIX="arm64"
;;
*)
echo "Unsupported architecture: %{_arch}"
exit 1
;;
esac
# Download dms-cli for target architecture
wget -O %{_builddir}/dms-cli.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-${ARCH_SUFFIX}.gz" || {
echo "Failed to download dms-cli for architecture %{_arch}"
exit 1
}
gunzip -c %{_builddir}/dms-cli.gz > %{_builddir}/dms-cli
chmod +x %{_builddir}/dms-cli
%build
%install
install -Dm755 %{_builddir}/dms-cli %{buildroot}%{_bindir}/dms
# Shell completions
install -d %{buildroot}%{_datadir}/bash-completion/completions
install -d %{buildroot}%{_datadir}/zsh/site-functions
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
%{_builddir}/dms-cli completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
%{_builddir}/dms-cli completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
%{_builddir}/dms-cli completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
install -Dm644 %{_builddir}/dms-qml/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
cp -r %{_builddir}/dms-qml/* %{buildroot}%{_datadir}/quickshell/dms/
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
%posttrans
# Clean up old installation path from previous versions (only if empty)
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
# Remove directories only if empty (preserves any user-added files)
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
fi
# Signal running DMS instances to reload (harmless if none running)
pkill -USR1 -x dms >/dev/null 2>&1 || :
%files
%license LICENSE
%doc README.md CONTRIBUTING.md
%{_datadir}/quickshell/dms/
%{_userunitdir}/dms.service
%files -n dms-cli
%{_bindir}/dms
%{_datadir}/bash-completion/completions/dms
%{_datadir}/zsh/site-functions/_dms
%{_datadir}/fish/vendor_completions.d/dms.fish
%changelog
* CHANGELOG_DATE_PLACEHOLDER AvengeMedia <contact@avengemedia.com> - VERSION_PLACEHOLDER-RELEASE_PLACEHOLDER
- Stable release VERSION_PLACEHOLDER
- Built from GitHub release
SPECEOF
sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/dms.spec
sed -i "s/RELEASE_PLACEHOLDER/${RELEASE}/g" ~/rpmbuild/SPECS/dms.spec
sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/dms.spec
echo "✅ Spec file generated for v${VERSION}-${RELEASE}"
echo "✅ Spec file generated for ${PACKAGE} v${VERSION}-${RELEASE}"
echo ""
echo "=== Spec file preview ==="
head -40 ~/rpmbuild/SPECS/dms.spec
head -40 ~/rpmbuild/SPECS/${PACKAGE}.spec
- name: Build SRPM
id: build
run: |
cd ~/rpmbuild/SPECS
echo "🔨 Building SRPM..."
rpmbuild -bs dms.spec
SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
PACKAGE="${{ matrix.package }}"
echo "🔨 Building SRPM for ${PACKAGE}..."
rpmbuild -bs ${PACKAGE}.spec
SRPM=$(ls ~/rpmbuild/SRPMS/${PACKAGE}-*.src.rpm | tail -n 1)
SRPM_NAME=$(basename "$SRPM")
echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
echo "✅ SRPM built: $SRPM_NAME"
echo ""
echo "=== SRPM Info ==="
rpm -qpi "$SRPM"
- name: Upload SRPM artifact
uses: actions/upload-artifact@v4
with:
name: dms-stable-srpm-${{ steps.version.outputs.version }}
name: ${{ matrix.package }}-stable-srpm-${{ steps.version.outputs.version }}
path: ${{ steps.build.outputs.srpm_path }}
retention-days: 90
- name: Install Copr CLI
run: |
sudo apt-get install -y python3-pip
pip3 install copr-cli
mkdir -p ~/.config
cat > ~/.config/copr << EOF
[copr-cli]
@@ -252,37 +154,57 @@ jobs:
copr_url = https://copr.fedorainfracloud.org
EOF
chmod 600 ~/.config/copr
echo "✅ Copr CLI configured"
- name: Determine Copr project
id: copr_project
run: |
PACKAGE="${{ matrix.package }}"
if [ "$PACKAGE" = "dms" ]; then
COPR_PROJECT="avengemedia/dms"
elif [ "$PACKAGE" = "dms-greeter" ]; then
COPR_PROJECT="avengemedia/danklinux"
else
echo "❌ Unknown package: $PACKAGE"
exit 1
fi
echo "copr_project=$COPR_PROJECT" >> $GITHUB_OUTPUT
echo "✅ Copr project: $COPR_PROJECT"
- name: Upload to Copr
run: |
SRPM="${{ steps.build.outputs.srpm_path }}"
VERSION="${{ steps.version.outputs.version }}"
echo "🚀 Uploading SRPM to avengemedia/dms..."
COPR_PROJECT="${{ steps.copr_project.outputs.copr_project }}"
PACKAGE="${{ matrix.package }}"
echo "🚀 Uploading ${PACKAGE} SRPM to ${COPR_PROJECT}..."
echo " SRPM: $(basename $SRPM)"
echo " Version: $VERSION"
BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
BUILD_OUTPUT=$(copr-cli build "$COPR_PROJECT" "$SRPM" --nowait 2>&1)
echo "$BUILD_OUTPUT"
BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
if [ "$BUILD_ID" != "unknown" ]; then
echo "✅ Build submitted successfully!"
echo "🔗 https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
echo "🔗 https://copr.fedorainfracloud.org/coprs/${COPR_PROJECT}/build/$BUILD_ID/"
else
echo "⚠️ Could not extract build ID, but upload may have succeeded"
fi
- name: Build summary
if: always()
run: |
echo "### 🎉 DMS Stable Build Summary" >> $GITHUB_STEP_SUMMARY
PACKAGE="${{ matrix.package }}"
COPR_PROJECT="${{ steps.copr_project.outputs.copr_project }}"
echo "### 🎉 ${PACKAGE} Stable Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Package:** ${PACKAGE}" >> $GITHUB_STEP_SUMMARY
echo "- **Version:** ${{ steps.version.outputs.version }}-${{ steps.version.outputs.release }}" >> $GITHUB_STEP_SUMMARY
echo "- **SRPM:** ${{ steps.build.outputs.srpm_name }}" >> $GITHUB_STEP_SUMMARY
echo "- **Project:** https://copr.fedorainfracloud.org/coprs/avengemedia/dms/" >> $GITHUB_STEP_SUMMARY
echo "- **Project:** https://copr.fedorainfracloud.org/coprs/${COPR_PROJECT}/" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Stable release has been built and uploaded to Copr!" >> $GITHUB_STEP_SUMMARY

View File

@@ -4,107 +4,144 @@ on:
workflow_dispatch:
inputs:
package:
description: 'Package to update (dms, dms-git, or all)'
description: "Package to update (dms, dms-git, or all)"
required: false
default: 'all'
default: "all"
tag_version:
description: "Specific tag version for dms stable (e.g., v1.0.2). Leave empty to auto-detect latest release."
required: false
default: ""
rebuild_release:
description: 'Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)'
description: "Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)"
required: false
default: ''
push:
tags:
- 'v*'
default: ""
schedule:
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
version: ${{ steps.check.outputs.version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install OSC
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Check for updates
id: check
env:
OBS_USERNAME: ${{ secrets.OBS_USERNAME }}
OBS_PASSWORD: ${{ secrets.OBS_PASSWORD }}
run: |
# Helper function to check dms-git commit
check_dms_git() {
local CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
local OBS_SPEC=$(curl -s -u "$OBS_USERNAME:$OBS_PASSWORD" "https://api.opensuse.org/source/home:AvengeMedia:dms-git/dms-git/dms-git.spec" 2>/dev/null || echo "")
local OBS_COMMIT=$(echo "$OBS_SPEC" | grep "^Version:" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" && "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "📋 dms-git: Commit $CURRENT_COMMIT already exists, skipping"
return 1 # No update needed
else
echo "📋 dms-git: New commit $CURRENT_COMMIT (OBS has ${OBS_COMMIT:-none})"
return 0 # Update needed
fi
}
# Helper function to check dms stable tag
check_dms_stable() {
local LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "v\?\([^"]*\)".*/\1/' || echo "")
local OBS_SPEC=$(curl -s -u "$OBS_USERNAME:$OBS_PASSWORD" "https://api.opensuse.org/source/home:AvengeMedia:dms/dms/dms.spec" 2>/dev/null || echo "")
local OBS_VERSION=$(echo "$OBS_SPEC" | grep "^Version:" | awk '{print $2}' | xargs || echo "")
if [[ -n "$LATEST_TAG" && "$LATEST_TAG" == "$OBS_VERSION" ]]; then
echo "📋 dms: Tag $LATEST_TAG already exists, skipping"
return 1 # No update needed
else
echo "📋 dms: New tag ${LATEST_TAG:-unknown} (OBS has ${OBS_VERSION:-none})"
return 0 # Update needed
fi
}
# Main logic
REBUILD="${{ github.event.inputs.rebuild_release }}"
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag push - always update stable package
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION (always update)"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - check dms-git only
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
if check_dms_git; then
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
# Get current commit hash (8 chars to match spec format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual workflow trigger
PKG="${{ github.event.inputs.package }}"
# Check OBS for last uploaded commit
OBS_BASE="$HOME/.cache/osc-checkouts"
mkdir -p "$OBS_BASE"
OBS_PROJECT="home:AvengeMedia:dms-git"
if [[ -n "$REBUILD" ]]; then
# Rebuild requested - always proceed
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🔄 Manual rebuild requested: $PKG (db$REBUILD)"
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
osc up -q 2>/dev/null || true
elif [[ "$PKG" == "all" ]]; then
# Check each package and build list of those needing updates
PACKAGES_TO_UPDATE=()
check_dms_git && PACKAGES_TO_UPDATE+=("dms-git")
check_dms_stable && PACKAGES_TO_UPDATE+=("dms")
# Extract commit hash from spec Version line & format like; 0.6.2+git2264.a679be68
if [[ -f "dms-git.spec" ]]; then
OBS_COMMIT=$(grep "^Version:" "dms-git.spec" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already uploaded to OBS, skipping"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (OBS has $OBS_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract OBS commit, proceeding with update"
fi
else
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No spec file in OBS, proceeding with update"
echo "✓ Packages to update: ${PACKAGES_TO_UPDATE[*]}"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "✓ All packages up to date"
fi
elif [[ "$PKG" == "dms-git" ]]; then
if check_dms_git; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms" ]]; then
if check_dms_stable; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
cd "${{ github.workspace }}"
else
# Unknown package - proceed anyway
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 First upload to OBS, update needed"
echo "Manual trigger: $PKG"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
# Fallback - proceed
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
@@ -113,119 +150,166 @@ jobs:
name: Upload to OBS
needs: check-updates
runs-on: ubuntu-latest
if: |
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
if: needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Determine packages to update
id: packages
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag push event - use the pushed tag
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - dms-git only
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: updating git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
# Manual workflow dispatch
# Determine version for dms stable
if [[ "${{ github.event.inputs.package }}" == "dms" ]]; then
# For explicit dms selection, require tag_version
if [[ -n "${{ github.event.inputs.tag_version }}" ]]; then
VERSION="${{ github.event.inputs.tag_version }}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Using specified tag: $VERSION"
else
echo "ERROR: tag_version is required when package=dms"
echo "Please specify a tag version (e.g., v1.0.2) or use package=all for auto-detection"
exit 1
fi
elif [[ "${{ github.event.inputs.package }}" == "all" ]]; then
# For "all", auto-detect if tag_version not specified
if [[ -n "${{ github.event.inputs.tag_version }}" ]]; then
VERSION="${{ github.event.inputs.tag_version }}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Using specified tag: $VERSION"
else
# Auto-detect latest release for "all"
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
if [[ -n "$LATEST_TAG" ]]; then
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
echo "Auto-detected latest release: $LATEST_TAG"
else
echo "ERROR: Could not auto-detect latest release"
exit 1
fi
fi
fi
# Use filtered packages from check-updates when package="all" and no rebuild/tag specified
if [[ "${{ github.event.inputs.package }}" == "all" ]] && [[ -z "${{ github.event.inputs.rebuild_release }}" ]] && [[ -z "${{ github.event.inputs.tag_version }}" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Manual trigger: all (filtered to: ${{ needs.check-updates.outputs.packages }})"
else
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
fi
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Update dms-git spec version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "1.0.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
# Update version in spec
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
# Add changelog entry
# Single changelog entry (git snapshots don't need history)
DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms-git.spec)
{
echo "$LOCAL_SPEC_HEAD"
echo "%changelog"
echo "* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1"
echo "- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
} > distro/opensuse/dms-git.spec
- name: Update Debian dms-git changelog version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
# Debian version format: 0.6.2+git2256.9162e314
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "1.0.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
# Single changelog entry (git snapshots don't need history)
CHANGELOG_DATE=$(date -R)
CHANGELOG_FILE="distro/debian/dms-git/debian/changelog"
# Get current version from changelog
CURRENT_VERSION=$(head -1 "$CHANGELOG_FILE" | sed 's/.*(\([^)]*\)).*/\1/')
echo "Current Debian version: $CURRENT_VERSION"
echo "New version: $NEW_VERSION"
# Only update if version changed
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
# Create new changelog entry at top
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms-git ($NEW_VERSION) nightly; urgency=medium
* Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
# Prepend to existing changelog
cat "$CHANGELOG_FILE" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "$CHANGELOG_FILE"
echo "✓ Updated Debian changelog: $CURRENT_VERSION → $NEW_VERSION"
else
echo "✓ Debian changelog already at version $NEW_VERSION"
fi
{
echo "dms-git (${NEW_VERSION}db1) nightly; urgency=medium"
echo ""
echo " * Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
echo ""
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
} > "distro/debian/dms-git/debian/changelog"
- name: Update dms stable version
if: steps.packages.outputs.version != ''
run: |
VERSION="${{ steps.packages.outputs.version }}"
VERSION_NO_V="${VERSION#v}"
echo "Updating packaging to version $VERSION_NO_V"
# Update openSUSE dms spec (stable only)
echo "==> Updating packaging files to version: $VERSION_NO_V"
# Update spec file
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
# Update Debian _service files
# Verify the update
UPDATED_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1)
echo "✓ Spec file now shows Version: $UPDATED_VERSION"
# Single changelog entry (full history on OBS website)
DATE_STR=$(date "+%a %b %d %Y")
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms.spec)
{
echo "$LOCAL_SPEC_HEAD"
echo "%changelog"
echo "* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1"
echo "- Update to stable $VERSION release"
} > distro/opensuse/dms.spec
# Update Debian _service files (both tar_scm and download_url formats)
for service in distro/debian/*/_service; do
if [[ -f "$service" ]]; then
# Update tar_scm revision parameter (for dms-git)
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
# Update download_url paths (for dms stable)
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
fi
done
# Update Debian changelog for dms stable (single entry, history on OBS website)
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
CHANGELOG_DATE=$(date -R)
{
echo "dms (${VERSION_NO_V}db1) stable; urgency=medium"
echo ""
echo " * Update to $VERSION stable release"
echo ""
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
} > "distro/debian/dms/debian/changelog"
echo "✓ Updated Debian changelog to ${VERSION_NO_V}db1"
fi
- name: Install Go
uses: actions/setup-go@v5
with:
go-version: '1.24'
go-version: "1.24"
- name: Install OSC
run: |
@@ -245,32 +329,78 @@ jobs:
- name: Upload to OBS
env:
FORCE_REBUILD: ${{ github.event_name == 'workflow_dispatch' && 'true' || '' }}
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
TAG_VERSION: ${{ steps.packages.outputs.version }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ -z "$PACKAGES" ]]; then
echo "✓ No packages need uploading. All up to date!"
exit 0
fi
MESSAGE="Automated update from GitHub Actions"
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
echo "==> Version being uploaded: ${{ steps.packages.outputs.version }}"
fi
if [[ "$PACKAGES" == "all" ]]; then
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
fi
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
# Loop through each package and upload
for PKG in $PACKAGES; do
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PKG to OBS..."
if [[ -n "$REBUILD_RELEASE" ]]; then
echo "🔄 Using rebuild release number: db$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [[ "$PKG" == "dms-git" ]]; then
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
bash distro/scripts/obs-upload.sh "$PKG" "$MESSAGE"
fi
done
- name: Summary
if: always()
run: |
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
echo "### OBS Package Upload Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ -z "$PACKAGES" ]]; then
echo "**Status:** ✅ All packages up to date (no uploads needed)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All packages are current. Run completed successfully." >> $GITHUB_STEP_SUMMARY
else
echo "**Packages Uploaded:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
for PKG in $PACKAGES; do
case "$PKG" in
dms)
echo "- ✅ **dms** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms/dms)" >> $GITHUB_STEP_SUMMARY
;;
dms-git)
echo "- ✅ **dms-git** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms-git/dms-git)" >> $GITHUB_STEP_SUMMARY
;;
esac
done
echo "" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
echo "**Rebuild Number:** db${{ github.event.inputs.rebuild_release }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "**Version:** ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "Monitor build progress on [OBS project page](https://build.opensuse.org/project/show/home:AvengeMedia)." >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY

View File

@@ -4,15 +4,15 @@ on:
workflow_dispatch:
inputs:
package:
description: 'Package to upload (dms, dms-git, dms-greeter, or all)'
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
required: false
default: 'dms-git'
default: "dms-git"
rebuild_release:
description: 'Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)'
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
required: false
default: ''
default: ""
schedule:
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
@@ -32,41 +32,113 @@ jobs:
- name: Check for updates
id: check
run: |
if [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
# Helper function to check dms-git commit
check_dms_git() {
local CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
local PPA_VERSION=$(curl -s "https://api.launchpad.net/1.0/~avengemedia/+archive/ubuntu/dms-git?ws.op=getPublishedSources&source_name=dms-git&status=Published" | grep -oP '"source_package_version":\s*"\K[^"]+' | head -1 || echo "")
local PPA_COMMIT=$(echo "$PPA_VERSION" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
# Get current commit hash (8 chars to match changelog format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Extract commit hash from changelog
# Format: dms-git (0.6.2+git2264.c5c5ce84) questing; urgency=medium
CHANGELOG_FILE="distro/ubuntu/dms-git/debian/changelog"
if [[ -f "$CHANGELOG_FILE" ]]; then
CHANGELOG_COMMIT=$(head -1 "$CHANGELOG_FILE" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$CHANGELOG_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$CHANGELOG_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already in changelog, skipping upload"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (changelog has $CHANGELOG_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract commit from changelog, proceeding with upload"
fi
if [[ -n "$PPA_COMMIT" && "$CURRENT_COMMIT" == "$PPA_COMMIT" ]]; then
echo "📋 dms-git: Commit $CURRENT_COMMIT already exists, skipping"
return 1 # No update needed
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No changelog file found, proceeding with upload"
echo "📋 dms-git: New commit $CURRENT_COMMIT (PPA has ${PPA_COMMIT:-none})"
return 0 # Update needed
fi
}
# Helper function to check stable package tag
check_stable_package() {
local PKG="$1"
local PPA_NAME="$2"
# Use git ls-remote to find the latest tag, sorted by version (descending)
local LATEST_TAG=$(git ls-remote --tags --refs --sort='-v:refname' https://github.com/AvengeMedia/DankMaterialShell.git | head -n1 | awk -F/ '{print $NF}' | sed 's/^v//')
local PPA_VERSION=$(curl -s "https://api.launchpad.net/1.0/~avengemedia/+archive/ubuntu/$PPA_NAME?ws.op=getPublishedSources&source_name=$PKG&status=Published" | grep -oP '"source_package_version":\s*"\K[^"]+' | head -1 || echo "")
local PPA_BASE_VERSION=$(echo "$PPA_VERSION" | sed 's/ppa[0-9]*$//')
if [[ -n "$LATEST_TAG" && "$LATEST_TAG" == "$PPA_BASE_VERSION" ]]; then
echo "📋 $PKG: Tag $LATEST_TAG already exists, skipping"
return 1 # No update needed
else
echo "📋 $PKG: New tag ${LATEST_TAG:-unknown} (PPA has ${PPA_BASE_VERSION:-none})"
return 0 # Update needed
fi
}
# Main logic
REBUILD="${{ github.event.inputs.rebuild_release }}"
if [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - check dms-git only
echo "packages=dms-git" >> $GITHUB_OUTPUT
if check_dms_git; then
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
# Manual workflow trigger
PKG="${{ github.event.inputs.package }}"
if [[ -n "$REBUILD" ]]; then
# Rebuild requested - always proceed
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🔄 Manual rebuild requested: $PKG (ppa$REBUILD)"
elif [[ "$PKG" == "all" ]]; then
# Check each package and build list of those needing updates
PACKAGES_TO_UPDATE=()
check_dms_git && PACKAGES_TO_UPDATE+=("dms-git")
check_stable_package "dms" "dms" && PACKAGES_TO_UPDATE+=("dms")
check_stable_package "dms-greeter" "danklinux" && PACKAGES_TO_UPDATE+=("dms-greeter")
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "✓ Packages to update: ${PACKAGES_TO_UPDATE[*]}"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "✓ All packages up to date"
fi
elif [[ "$PKG" == "dms-git" ]]; then
if check_dms_git; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms" ]]; then
if check_stable_package "dms" "dms"; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms-greeter" ]]; then
if check_stable_package "dms-greeter" "danklinux"; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
else
# Unknown package - proceed anyway
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: $PKG"
fi
else
# Fallback
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
@@ -75,21 +147,19 @@ jobs:
name: Upload to PPA
needs: check-updates
runs-on: ubuntu-latest
if: |
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
if: needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.24'
cache: false
go-version: "1.24"
cache: false
- name: Install build dependencies
run: |
@@ -102,7 +172,7 @@ jobs:
build-essential \
fakeroot \
dpkg-dev
- name: Configure GPG
env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
@@ -110,79 +180,101 @@ jobs:
echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Determine packages to upload
id: packages
run: |
# Use packages determined by check-updates job
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
if [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: uploading git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ needs.check-updates.outputs.packages }}"
fi
- name: Upload to PPA
env:
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ "$PACKAGES" == "all" ]]; then
REBUILD_RELEASE="${{ github.event.inputs.rebuild_release }}"
if [[ -z "$PACKAGES" ]]; then
echo "✓ No packages need uploading. All up to date!"
exit 0
fi
# Export REBUILD_RELEASE so ppa-build.sh can use it
if [[ -n "$REBUILD_RELEASE" ]]; then
export REBUILD_RELEASE
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
fi
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
# Loop through each package and upload
for PKG in $PACKAGES; do
# Map package to PPA name
case "$PKG" in
dms)
PPA_NAME="dms"
;;
dms-git)
PPA_NAME="dms-git"
;;
dms-greeter)
PPA_NAME="danklinux"
;;
*)
echo "⚠️ Unknown package: $PKG, skipping"
continue
;;
esac
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms to PPA..."
if [ -n "$REBUILD_RELEASE" ]; then
echo "Uploading $PKG to PPA $PPA_NAME..."
if [[ -n "$REBUILD_RELEASE" ]]; then
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-git to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-greeter to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-greeter" danklinux questing
else
PPA_NAME="$PACKAGES"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PACKAGES to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing
fi
- name: Summary
run: |
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
bash distro/scripts/ppa-upload.sh "$PKG" "$PPA_NAME" questing ${REBUILD_RELEASE:+"$REBUILD_RELEASE"}
done
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
- name: Summary
if: always()
run: |
echo "### PPA Package Upload Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ "$PACKAGES" == "all" ]]; then
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA danklinux**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-git" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
if [[ -z "$PACKAGES" ]]; then
echo "**Status:** ✅ All packages up to date (no uploads needed)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All packages are current. Run will complete successfully." >> $GITHUB_STEP_SUMMARY
else
echo "**Packages Uploaded:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
for PKG in $PACKAGES; do
case "$PKG" in
dms)
echo "- ✅ **dms** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages)" >> $GITHUB_STEP_SUMMARY
;;
dms-git)
echo "- ✅ **dms-git** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages)" >> $GITHUB_STEP_SUMMARY
;;
dms-greeter)
echo "- ✅ **dms-greeter** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages)" >> $GITHUB_STEP_SUMMARY
;;
esac
done
echo "" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
echo "**Rebuild Number:** ppa${{ github.event.inputs.rebuild_release }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY

19
.github/workflows/stable.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
name: Update stable branch
on:
push:
tags:
- "v*"
jobs:
update-stable:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Push to stable branch
run: git push origin HEAD:refs/heads/stable --force

View File

@@ -36,7 +36,7 @@ jobs:
run: |
set -euo pipefail
echo "Attempting nix build to get new vendorHash..."
if output=$(nix build .#dmsCli 2>&1); then
if output=$(nix build .#dms-shell 2>&1); then
echo "Build succeeded, no hash update needed"
exit 0
fi
@@ -46,7 +46,7 @@ jobs:
[ "$current_hash" = "$new_hash" ] && { echo "vendorHash already up to date"; exit 0; }
sed -i "s|vendorHash = \"$current_hash\"|vendorHash = \"$new_hash\"|" flake.nix
echo "Verifying build with new vendorHash..."
nix build .#dmsCli
nix build .#dms-shell
echo "vendorHash updated successfully!"
- name: Commit and push vendorHash update

9
.gitignore vendored
View File

@@ -96,20 +96,15 @@ go.work
go.work.sum
# env file
.env
.env*
# Editor/IDE
# .idea/
# .vscode/
vim/
bin/
# Extracted source trees in Ubuntu package directories
distro/ubuntu/*/dms-git-repo/
distro/ubuntu/*/DankMaterialShell-*/
distro/ubuntu/danklinux/*/dsearch-*/
distro/ubuntu/danklinux/*/dgop-*/
# direnv
.envrc
.direnv/

12
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,12 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: check-yaml
- id: end-of-file-fixer
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
args: [-e, SC2164, -e, SC2001, -e, SC2012, -e, SC2317]

17
CHANGELOG.MD Normal file
View File

@@ -0,0 +1,17 @@
This file is more of a quick reference so I know what to account for before next releases.
# 1.2.0
- Added clipboard and clipboard history integration
- Added swipe to dismiss notification popups and from center
- Added paste from clipboard history view - requires wtype
- Optimize surface damage of OSD & Toast
- Add monitor configurator (niri, Hyprland, MangoWC)
- **BREAKING** ghostty theme changed to ~/.config/ghostty/themes/danktheme
- requires intervention and doc update
- Added desktop widget plugins
- dev guidance available
- builtin clock & dgop widgets
- new IPC targets
- Initial RTL support/i18n
- Theme registry

View File

@@ -6,10 +6,10 @@ To contribute fork this repository, make your changes, and open a pull request.
## Setup
Enable pre-commit hooks to catch CI failures before pushing:
Install [prek](https://prek.j178.dev/) then activate pre-commit hooks:
```bash
git config core.hooksPath .githooks
prek install
```
### Nix Development Shell
@@ -21,6 +21,7 @@ nix develop
```
This will provide:
- Go 1.24 toolchain (go, gopls, delve, go-tools) and GNU Make
- Quickshell and required QML packages
- Properly configured QML2_IMPORT_PATH
@@ -54,6 +55,20 @@ touch .qmlls.ini
5. Make your changes, test, and open a pull request.
### I18n/Localization
When adding user-facing strings, ensure they are wrapped in `I18n.tr()` with context, for example.
```qml
import qs.Common
Text {
text: I18n.tr("Hello World", "<This is context for the translators, example> Hello world greeting that appears on the lock screen")
}
```
Preferably, try to keep new terms to a minimum and re-use existing terms where possible. See `quickshell/translations/en.json` for the list of existing terms. (This isn't always possible obviously, but instead of using `Auto-connect` you would use `Autoconnect` since it's already translated)
### GO (`core` directory)
1. Install the [Go Extension](https://code.visualstudio.com/docs/languages/go)

View File

@@ -163,7 +163,7 @@ quickshell -p quickshell/
inputs.dms.url = "github:AvengeMedia/DankMaterialShell";
# Use in home-manager or NixOS configuration
imports = [ inputs.dms.homeModules.dankMaterialShell.default ];
imports = [ inputs.dms.homeModules.dank-material-shell ];
}
```

View File

@@ -1,10 +1,10 @@
[Desktop Entry]
Type=Application
Name=DMS Application Picker
Name=DMS
Comment=Select an application to open links and files
Exec=dms open %u
Icon=danklogo
Terminal=false
NoDisplay=true
MimeType=x-scheme-handler/http;x-scheme-handler/https;text/html;application/xhtml+xml;
MimeType=x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/dms;text/html;application/xhtml+xml;
Categories=Utility;

View File

@@ -9,9 +9,9 @@ Type=dbus
BusName=org.freedesktop.Notifications
ExecStart=/usr/bin/dms run --session
ExecReload=/usr/bin/pkill -USR1 -x dms
Restart=always
Restart=on-failure
RestartSec=1.23
TimeoutStopSec=10
[Install]
WantedBy=graphical-session.target
WantedBy=graphical-session.target

View File

@@ -102,7 +102,11 @@ linters:
- linters:
- ineffassign
path: internal/proto/
# binary.Write to bytes.Buffer can't fail
# binary.Write/Read to bytes.Buffer can't fail
- linters:
- errcheck
text: "Error return value of `binary\\.Write` is not checked"
text: "Error return value of `binary\\.(Write|Read)` is not checked"
# bytes.Reader.Read can't fail (reads from memory)
- linters:
- errcheck
text: "Error return value of `buf\\.Read` is not checked"

View File

@@ -56,3 +56,15 @@ packages:
outpkg: mocks_version
interfaces:
VersionFetcher:
github.com/AvengeMedia/DankMaterialShell/core/internal/server/wlcontext:
config:
dir: "internal/mocks/wlcontext"
outpkg: mocks_wlcontext
interfaces:
WaylandContext:
github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client:
config:
dir: "internal/mocks/wlclient"
outpkg: mocks_wlclient
interfaces:
WaylandDisplay:

View File

@@ -0,0 +1,16 @@
repos:
- repo: https://github.com/golangci/golangci-lint
rev: v2.6.2
hooks:
- id: golangci-lint-fmt
require_serial: true
- id: golangci-lint-full
- id: golangci-lint-config-verify
- repo: local
hooks:
- id: go-test
name: go test
entry: go test ./...
language: system
pass_filenames: false
types: [go]

View File

@@ -14,34 +14,63 @@ Distribution-aware installer with TUI for deploying DMS and compositor configura
## System Integration
**Wayland Protocols**
- `wlr-gamma-control-unstable-v1` - Night mode and gamma control
- `wlr-screencopy-unstable-v1` - Screen capture for color picker
- `wlr-layer-shell-unstable-v1` - Overlay surfaces for color picker
- `wp-viewporter` - Fractional scaling support
- `dwl-ipc-unstable-v2` - dwl/MangoWC workspace integration
- `ext-workspace-v1` - Workspace protocol support
- `wlr-output-management-unstable-v1` - Display configuration
### Wayland Protocols (Client)
**DBus Interfaces**
- NetworkManager/iwd - Network management
- logind - Session control and inhibit locks
- accountsservice - User account information
- CUPS - Printer management
- Custom IPC via unix socket (JSON API)
All Wayland protocols are consumed as a client - connecting to the compositor.
**Hardware Control**
- DDC/CI protocol - External monitor brightness control (like `ddcutil`)
- Backlight control - Internal display brightness via `login1` or sysfs
- LED control - Keyboard/device LED management
- evdev input monitoring - Keyboard state tracking (caps lock, etc.)
| Protocol | Purpose |
| ----------------------------------------- | ----------------------------------------------------------- |
| `wlr-gamma-control-unstable-v1` | Night mode color temperature control |
| `wlr-screencopy-unstable-v1` | Screen capture for color picker/screenshot |
| `wlr-layer-shell-unstable-v1` | Overlay surfaces for color picker UI/screenshot |
| `wlr-output-management-unstable-v1` | Display configuration |
| `wlr-output-power-management-unstable-v1` | DPMS on/off CLI |
| `wp-viewporter` | Fractional scaling support (color picker/screenshot UIs) |
| `keyboard-shortcuts-inhibit-unstable-v1` | Inhibit compositor shortcuts during color picker/screenshot |
| `ext-data-control-v1` | Clipboard history and persistence |
| `ext-workspace-v1` | Workspace integration |
| `dwl-ipc-unstable-v2` | dwl/MangoWC IPC for tags, outputs, etc. |
### DBus Interfaces
**Client (consuming external services):**
| Interface | Purpose |
| -------------------------------- | --------------------------------------------- |
| `org.bluez` | Bluetooth management with pairing agent |
| `org.freedesktop.NetworkManager` | Network management |
| `net.connman.iwd` | iwd Wi-Fi backend |
| `org.freedesktop.network1` | systemd-networkd integration |
| `org.freedesktop.login1` | Session control, sleep inhibitors, brightness |
| `org.freedesktop.Accounts` | User account information |
| `org.freedesktop.portal.Desktop` | Desktop appearance settings (color scheme) |
| CUPS via IPP + D-Bus | Printer management with job notifications |
**Server (implementing interfaces):**
| Interface | Purpose |
| ----------------------------- | -------------------------------------- |
| `org.freedesktop.ScreenSaver` | Screensaver inhibit for video playback |
Custom IPC via unix socket (JSON API) for shell communication.
### Hardware Control
| Subsystem | Method | Purpose |
| --------- | ------------------- | ---------------------------------- |
| DDC/CI | I2C direct | External monitor brightness |
| Backlight | logind or sysfs | Internal display brightness |
| evdev | `/dev/input/event*` | Keyboard state (caps lock LED) |
| udev | netlink monitor | Backlight device updates (for OSD) |
### Plugin System
**Plugin System**
- Plugin registry integration
- Plugin lifecycle management
- Settings persistence
## CLI Commands
- `dms run [-d]` - Start shell (optionally as daemon)
- `dms restart` / `dms kill` - Manage running processes
- `dms ipc <command>` - Send IPC commands (toggle launcher, notifications, etc.)
@@ -70,6 +99,7 @@ The on-screen preview displays the selected format. JSON output includes hex, RG
Requires Go 1.24+
**Development build:**
```bash
make # Build dms CLI
make dankinstall # Build installer
@@ -77,6 +107,7 @@ make test # Run tests
```
**Distribution build:**
```bash
make dist # Build without update/greeter features
```
@@ -84,6 +115,7 @@ make dist # Build without update/greeter features
Produces `bin/dms-linux-amd64` and `bin/dms-linux-arm64`
**Installation:**
```bash
sudo make install # Install to /usr/local/bin/dms
```
@@ -91,6 +123,7 @@ sudo make install # Install to /usr/local/bin/dms
## Development
**Setup pre-commit hooks:**
```bash
git config core.hooksPath .githooks
```
@@ -98,6 +131,7 @@ git config core.hooksPath .githooks
This runs gofmt, golangci-lint, tests, and builds before each commit when `core/` files are staged.
**Regenerating Wayland Protocol Bindings:**
```bash
go install github.com/rajveermalviya/go-wayland/cmd/go-wayland-scanner@latest
go-wayland-scanner -i internal/proto/xml/wlr-gamma-control-unstable-v1.xml \
@@ -105,6 +139,7 @@ go-wayland-scanner -i internal/proto/xml/wlr-gamma-control-unstable-v1.xml \
```
**Module Structure:**
- `cmd/` - Binary entrypoints (dms, dankinstall)
- `internal/distros/` - Distribution-specific installation logic
- `internal/proto/` - Wayland protocol bindings
@@ -139,4 +174,4 @@ Most packages available in standard repos. Minimal building required.
**Gentoo**
Uses Portage with GURU overlay. Automatically configures USE flags. Variable success depending on system configuration.
See installer output for distribution-specific details during installation.
See installer output for distribution-specific details during installation.

View File

@@ -8,7 +8,7 @@
<rect x="0" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="20" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="0" y="37" width="24" height="8" fill="#CCBEFF"/>
<!-- A -->
<rect x="36" y="5" width="20" height="8" fill="#CCBEFF"/>
<rect x="32" y="13" width="8" height="8" fill="#CCBEFF"/>
@@ -18,7 +18,7 @@
<rect x="52" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="32" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="52" y="37" width="8" height="8" fill="#CCBEFF"/>
<!-- N -->
<rect x="64" y="5" width="12" height="8" fill="#CCBEFF"/>
<rect x="92" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -32,7 +32,7 @@
<rect x="92" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="64" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="84" y="37" width="16" height="8" fill="#CCBEFF"/>
<!-- K -->
<rect x="104" y="5" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -43,4 +43,4 @@
<rect x="120" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="104" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="37" width="8" height="8" fill="#CCBEFF"/>
</svg>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -179,7 +179,7 @@ func runBrightnessList(cmd *cobra.Command, args []string) {
fmt.Printf("%-*s %-12s %-*s %s\n", idPad, "Device", "Class", namePad, "Name", "Brightness")
sepLen := idPad + 2 + 12 + 2 + namePad + 2 + 15
for i := 0; i < sepLen; i++ {
for range sepLen {
fmt.Print("─")
}
fmt.Println()

View File

@@ -0,0 +1,618 @@
package main
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"os/signal"
"strconv"
"syscall"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/spf13/cobra"
)
var clipboardCmd = &cobra.Command{
Use: "clipboard",
Aliases: []string{"cl"},
Short: "Manage clipboard",
Long: "Interact with the clipboard manager",
}
var clipCopyCmd = &cobra.Command{
Use: "copy [text]",
Short: "Copy text to clipboard",
Long: "Copy text to clipboard. If no text provided, reads from stdin. Works without server.",
Run: runClipCopy,
}
var (
clipCopyForeground bool
clipCopyPasteOnce bool
clipCopyType string
clipJSONOutput bool
)
var clipPasteCmd = &cobra.Command{
Use: "paste",
Short: "Paste text from clipboard",
Long: "Paste text from clipboard to stdout. Works without server.",
Run: runClipPaste,
}
var clipWatchCmd = &cobra.Command{
Use: "watch [command]",
Short: "Watch clipboard for changes",
Long: `Watch clipboard for changes and optionally execute a command.
Works like wl-paste --watch. Does not require server.
If a command is provided, it will be executed each time the clipboard changes,
with the clipboard content piped to its stdin.
Examples:
dms cl watch # Print clipboard changes to stdout
dms cl watch cat # Same as above
dms cl watch notify-send # Send notification on clipboard change`,
Run: runClipWatch,
}
var clipHistoryCmd = &cobra.Command{
Use: "history",
Short: "Show clipboard history",
Long: "Show clipboard history with previews (requires server)",
Run: runClipHistory,
}
var clipGetCmd = &cobra.Command{
Use: "get <id>",
Short: "Get clipboard entry by ID",
Long: "Get full clipboard entry data by ID (requires server). Use --copy to copy it to clipboard.",
Args: cobra.ExactArgs(1),
Run: runClipGet,
}
var clipGetCopy bool
var clipDeleteCmd = &cobra.Command{
Use: "delete <id>",
Short: "Delete clipboard entry",
Long: "Delete a clipboard history entry by ID (requires server)",
Args: cobra.ExactArgs(1),
Run: runClipDelete,
}
var clipClearCmd = &cobra.Command{
Use: "clear",
Short: "Clear clipboard history",
Long: "Clear all clipboard history (requires server)",
Run: runClipClear,
}
var clipWatchStore bool
var clipSearchCmd = &cobra.Command{
Use: "search [query]",
Short: "Search clipboard history",
Long: "Search clipboard history with filters (requires server)",
Run: runClipSearch,
}
var (
clipSearchLimit int
clipSearchOffset int
clipSearchMimeType string
clipSearchImages bool
clipSearchText bool
)
var clipConfigCmd = &cobra.Command{
Use: "config",
Short: "Manage clipboard config",
Long: "Get or set clipboard configuration (requires server)",
}
var clipConfigGetCmd = &cobra.Command{
Use: "get",
Short: "Get clipboard config",
Run: runClipConfigGet,
}
var clipConfigSetCmd = &cobra.Command{
Use: "set",
Short: "Set clipboard config",
Long: `Set clipboard configuration options.
Examples:
dms cl config set --max-history 200
dms cl config set --auto-clear-days 7
dms cl config set --clear-at-startup`,
Run: runClipConfigSet,
}
var (
clipConfigMaxHistory int
clipConfigAutoClearDays int
clipConfigClearAtStartup bool
clipConfigNoClearStartup bool
clipConfigDisabled bool
clipConfigEnabled bool
clipConfigDisableHistory bool
clipConfigEnableHistory bool
)
func init() {
clipCopyCmd.Flags().BoolVarP(&clipCopyForeground, "foreground", "f", false, "Stay in foreground instead of forking")
clipCopyCmd.Flags().BoolVarP(&clipCopyPasteOnce, "paste-once", "o", false, "Exit after first paste")
clipCopyCmd.Flags().StringVarP(&clipCopyType, "type", "t", "text/plain;charset=utf-8", "MIME type")
clipWatchCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipHistoryCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipGetCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipGetCmd.Flags().BoolVarP(&clipGetCopy, "copy", "c", false, "Copy entry to clipboard")
clipSearchCmd.Flags().IntVarP(&clipSearchLimit, "limit", "l", 50, "Max results")
clipSearchCmd.Flags().IntVarP(&clipSearchOffset, "offset", "o", 0, "Result offset")
clipSearchCmd.Flags().StringVarP(&clipSearchMimeType, "mime", "m", "", "Filter by MIME type")
clipSearchCmd.Flags().BoolVar(&clipSearchImages, "images", false, "Only images")
clipSearchCmd.Flags().BoolVar(&clipSearchText, "text", false, "Only text")
clipSearchCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipConfigSetCmd.Flags().IntVar(&clipConfigMaxHistory, "max-history", 0, "Max history entries")
clipConfigSetCmd.Flags().IntVar(&clipConfigAutoClearDays, "auto-clear-days", -1, "Auto-clear entries older than N days (0 to disable)")
clipConfigSetCmd.Flags().BoolVar(&clipConfigClearAtStartup, "clear-at-startup", false, "Clear history on startup")
clipConfigSetCmd.Flags().BoolVar(&clipConfigNoClearStartup, "no-clear-at-startup", false, "Don't clear history on startup")
clipConfigSetCmd.Flags().BoolVar(&clipConfigDisabled, "disable", false, "Disable clipboard manager entirely")
clipConfigSetCmd.Flags().BoolVar(&clipConfigEnabled, "enable", false, "Enable clipboard manager")
clipConfigSetCmd.Flags().BoolVar(&clipConfigDisableHistory, "disable-history", false, "Disable clipboard history persistence")
clipConfigSetCmd.Flags().BoolVar(&clipConfigEnableHistory, "enable-history", false, "Enable clipboard history persistence")
clipWatchCmd.Flags().BoolVarP(&clipWatchStore, "store", "s", false, "Store clipboard changes to history (no server required)")
clipConfigCmd.AddCommand(clipConfigGetCmd, clipConfigSetCmd)
clipboardCmd.AddCommand(clipCopyCmd, clipPasteCmd, clipWatchCmd, clipHistoryCmd, clipGetCmd, clipDeleteCmd, clipClearCmd, clipSearchCmd, clipConfigCmd)
}
func runClipCopy(cmd *cobra.Command, args []string) {
var data []byte
if len(args) > 0 {
data = []byte(args[0])
} else {
var err error
data, err = io.ReadAll(os.Stdin)
if err != nil {
log.Fatalf("read stdin: %v", err)
}
}
if err := clipboard.CopyOpts(data, clipCopyType, clipCopyForeground, clipCopyPasteOnce); err != nil {
log.Fatalf("copy: %v", err)
}
}
func runClipPaste(cmd *cobra.Command, args []string) {
data, _, err := clipboard.Paste()
if err != nil {
log.Fatalf("paste: %v", err)
}
os.Stdout.Write(data)
}
func runClipWatch(cmd *cobra.Command, args []string) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM)
go func() {
<-sigCh
cancel()
}()
switch {
case len(args) > 0:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
runCommand(args, data)
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
case clipWatchStore:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
if err := clipboard.Store(data, mimeType); err != nil {
log.Errorf("store: %v", err)
}
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
case clipJSONOutput:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
out := map[string]any{
"data": string(data),
"mimeType": mimeType,
"timestamp": time.Now().Format(time.RFC3339),
"size": len(data),
}
j, _ := json.Marshal(out)
fmt.Println(string(j))
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
default:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
os.Stdout.Write(data)
os.Stdout.WriteString("\n")
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
}
}
func runCommand(args []string, stdin []byte) {
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if len(stdin) == 0 {
cmd.Run()
return
}
r, w, err := os.Pipe()
if err != nil {
cmd.Run()
return
}
cmd.Stdin = r
go func() {
w.Write(stdin)
w.Close()
}()
cmd.Run()
}
func runClipHistory(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.getHistory",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get clipboard history: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
if clipJSONOutput {
fmt.Println("[]")
} else {
fmt.Println("No clipboard history")
}
return
}
historyList, ok := (*resp.Result).([]any)
if !ok {
log.Fatal("Invalid response format")
}
if clipJSONOutput {
out, _ := json.MarshalIndent(historyList, "", " ")
fmt.Println(string(out))
return
}
if len(historyList) == 0 {
fmt.Println("No clipboard history")
return
}
fmt.Println("Clipboard History:")
fmt.Println()
for _, item := range historyList {
entry, ok := item.(map[string]any)
if !ok {
continue
}
id := uint64(entry["id"].(float64))
preview := entry["preview"].(string)
timestamp := entry["timestamp"].(string)
isImage := entry["isImage"].(bool)
typeStr := "text"
if isImage {
typeStr = "image"
}
fmt.Printf("ID: %d | %s | %s\n", id, typeStr, timestamp)
fmt.Printf(" %s\n", preview)
fmt.Println()
}
}
func runClipGet(cmd *cobra.Command, args []string) {
id, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
log.Fatalf("Invalid ID: %v", err)
}
if clipGetCopy {
req := models.Request{
ID: 1,
Method: "clipboard.copyEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to copy clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Printf("Copied entry %d to clipboard\n", id)
return
}
req := models.Request{
ID: 1,
Method: "clipboard.getEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("Entry not found")
}
entry, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
switch {
case clipJSONOutput:
output, _ := json.MarshalIndent(entry, "", " ")
fmt.Println(string(output))
default:
if data, ok := entry["data"].(string); ok {
fmt.Print(data)
} else {
output, _ := json.MarshalIndent(entry, "", " ")
fmt.Println(string(output))
}
}
}
func runClipDelete(cmd *cobra.Command, args []string) {
id, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
log.Fatalf("Invalid ID: %v", err)
}
req := models.Request{
ID: 1,
Method: "clipboard.deleteEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to delete clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Printf("Deleted entry %d\n", id)
}
func runClipClear(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.clearHistory",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to clear clipboard history: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Println("Clipboard history cleared")
}
func runClipSearch(cmd *cobra.Command, args []string) {
params := map[string]any{
"limit": clipSearchLimit,
"offset": clipSearchOffset,
}
if len(args) > 0 {
params["query"] = args[0]
}
if clipSearchMimeType != "" {
params["mimeType"] = clipSearchMimeType
}
if clipSearchImages {
params["isImage"] = true
} else if clipSearchText {
params["isImage"] = false
}
req := models.Request{
ID: 1,
Method: "clipboard.search",
Params: params,
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to search clipboard: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("No results")
}
result, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
if clipJSONOutput {
out, _ := json.MarshalIndent(result, "", " ")
fmt.Println(string(out))
return
}
entries, _ := result["entries"].([]any)
total := int(result["total"].(float64))
hasMore := result["hasMore"].(bool)
if len(entries) == 0 {
fmt.Println("No results found")
return
}
fmt.Printf("Results: %d of %d\n\n", len(entries), total)
for _, item := range entries {
entry, ok := item.(map[string]any)
if !ok {
continue
}
id := uint64(entry["id"].(float64))
preview := entry["preview"].(string)
timestamp := entry["timestamp"].(string)
isImage := entry["isImage"].(bool)
typeStr := "text"
if isImage {
typeStr = "image"
}
fmt.Printf("ID: %d | %s | %s\n", id, typeStr, timestamp)
fmt.Printf(" %s\n\n", preview)
}
if hasMore {
fmt.Printf("Use --offset %d to see more results\n", clipSearchOffset+clipSearchLimit)
}
}
func runClipConfigGet(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.getConfig",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get config: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("No config returned")
}
cfg, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
output, _ := json.MarshalIndent(cfg, "", " ")
fmt.Println(string(output))
}
func runClipConfigSet(cmd *cobra.Command, args []string) {
params := map[string]any{}
if cmd.Flags().Changed("max-history") {
params["maxHistory"] = clipConfigMaxHistory
}
if cmd.Flags().Changed("auto-clear-days") {
params["autoClearDays"] = clipConfigAutoClearDays
}
if clipConfigClearAtStartup {
params["clearAtStartup"] = true
}
if clipConfigNoClearStartup {
params["clearAtStartup"] = false
}
if clipConfigDisabled {
params["disabled"] = true
}
if clipConfigEnabled {
params["disabled"] = false
}
if clipConfigDisableHistory {
params["disableHistory"] = true
}
if clipConfigEnableHistory {
params["disableHistory"] = false
}
if len(params) == 0 {
fmt.Println("No config options specified")
return
}
req := models.Request{
ID: 1,
Method: "clipboard.setConfig",
Params: params,
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to set config: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Println("Config updated")
}

View File

@@ -3,8 +3,8 @@ package main
import (
"fmt"
"os"
"os/exec"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/colorpicker"
"github.com/spf13/cobra"
)
@@ -121,13 +121,7 @@ func runColorPick(cmd *cobra.Command, args []string) {
}
func copyToClipboard(text string) {
var cmd *exec.Cmd
if _, err := exec.LookPath("wl-copy"); err == nil {
cmd = exec.Command("wl-copy", text)
} else {
fmt.Fprintln(os.Stderr, "wl-copy not found, cannot copy to clipboard")
return
if err := clipboard.CopyText(text); err != nil {
fmt.Fprintln(os.Stderr, "clipboard copy failed:", err)
}
_ = cmd.Run()
}

View File

@@ -171,7 +171,6 @@ var pluginsUpdateCmd = &cobra.Command{
}
func runVersion(cmd *cobra.Command, args []string) {
printASCII()
fmt.Printf("%s\n", formatVersion(Version))
}
@@ -220,7 +219,7 @@ func getBaseVersion() string {
}
// Fallback
return "0.6.2"
return "1.0.2"
}
func startDebugServer() error {
@@ -513,5 +512,6 @@ func getCommonCommands() []*cobra.Command {
screenshotCmd,
notifyActionCmd,
matugenCmd,
clipboardCmd,
}
}

View File

@@ -22,6 +22,7 @@ func init() {
dank16Cmd.Flags().Bool("json", false, "Output in JSON format")
dank16Cmd.Flags().Bool("kitty", false, "Output in Kitty terminal format")
dank16Cmd.Flags().Bool("foot", false, "Output in Foot terminal format")
dank16Cmd.Flags().Bool("neovim", false, "Output in Neovim plugin format")
dank16Cmd.Flags().Bool("alacritty", false, "Output in Alacritty terminal format")
dank16Cmd.Flags().Bool("ghostty", false, "Output in Ghostty terminal format")
dank16Cmd.Flags().Bool("wezterm", false, "Output in Wezterm terminal format")
@@ -40,6 +41,7 @@ func runDank16(cmd *cobra.Command, args []string) {
isJson, _ := cmd.Flags().GetBool("json")
isKitty, _ := cmd.Flags().GetBool("kitty")
isFoot, _ := cmd.Flags().GetBool("foot")
isNeovim, _ := cmd.Flags().GetBool("neovim")
isAlacritty, _ := cmd.Flags().GetBool("alacritty")
isGhostty, _ := cmd.Flags().GetBool("ghostty")
isWezterm, _ := cmd.Flags().GetBool("wezterm")
@@ -116,6 +118,8 @@ func runDank16(cmd *cobra.Command, args []string) {
fmt.Print(dank16.GenerateGhosttyTheme(colors))
} else if isWezterm {
fmt.Print(dank16.GenerateWeztermTheme(colors))
} else if isNeovim {
fmt.Print(dank16.GenerateNeovimTheme(colors))
} else {
fmt.Print(dank16.GenerateGhosttyTheme(colors))
}

View File

@@ -377,7 +377,7 @@ func updateDMSBinary() error {
}
version := ""
for _, line := range strings.Split(string(output), "\n") {
for line := range strings.SplitSeq(string(output), "\n") {
if strings.Contains(line, "\"tag_name\"") {
parts := strings.Split(line, "\"")
if len(parts) >= 4 {
@@ -443,7 +443,7 @@ func updateDMSBinary() error {
decompressedPath := filepath.Join(tempDir, "dms")
if err := os.Chmod(decompressedPath, 0755); err != nil {
if err := os.Chmod(decompressedPath, 0o755); err != nil {
return fmt.Errorf("failed to make binary executable: %w", err)
}

View File

@@ -211,8 +211,8 @@ func checkGroupExists(groupName string) bool {
return false
}
lines := strings.Split(string(data), "\n")
for _, line := range lines {
lines := strings.SplitSeq(string(data), "\n")
for line := range lines {
if strings.HasPrefix(line, groupName+":") {
return true
}
@@ -521,7 +521,7 @@ func enableGreeter() error {
newConfig := strings.Join(finalLines, "\n")
tmpFile := "/tmp/greetd-config.toml"
if err := os.WriteFile(tmpFile, []byte(newConfig), 0644); err != nil {
if err := os.WriteFile(tmpFile, []byte(newConfig), 0o644); err != nil {
return fmt.Errorf("failed to write temp config: %w", err)
}
@@ -592,8 +592,8 @@ func checkGreeterStatus() error {
if data, err := os.ReadFile(configPath); err == nil {
configContent := string(data)
if strings.Contains(configContent, "dms-greeter") {
lines := strings.Split(configContent, "\n")
for _, line := range lines {
lines := strings.SplitSeq(configContent, "\n")
for line := range lines {
trimmed := strings.TrimSpace(line)
if strings.HasPrefix(trimmed, "command =") || strings.HasPrefix(trimmed, "command=") {
parts := strings.SplitN(trimmed, "=", 2)

View File

@@ -2,15 +2,12 @@ package main
import (
"context"
"encoding/json"
"fmt"
"net"
"os"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/matugen"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/spf13/cobra"
)
@@ -49,6 +46,7 @@ func init() {
cmd.Flags().String("stock-colors", "", "Stock theme colors JSON")
cmd.Flags().Bool("sync-mode-with-portal", false, "Sync color scheme with GNOME portal")
cmd.Flags().Bool("terminals-always-dark", false, "Force terminal themes to dark variant")
cmd.Flags().String("skip-templates", "", "Comma-separated list of templates to skip")
}
matugenQueueCmd.Flags().Bool("wait", true, "Wait for completion")
@@ -68,6 +66,7 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
stockColors, _ := cmd.Flags().GetString("stock-colors")
syncModeWithPortal, _ := cmd.Flags().GetBool("sync-mode-with-portal")
terminalsAlwaysDark, _ := cmd.Flags().GetBool("terminals-always-dark")
skipTemplates, _ := cmd.Flags().GetString("skip-templates")
return matugen.Options{
StateDir: stateDir,
@@ -82,6 +81,7 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
StockColors: stockColors,
SyncModeWithPortal: syncModeWithPortal,
TerminalsAlwaysDark: terminalsAlwaysDark,
SkipTemplates: skipTemplates,
}
}
@@ -97,33 +97,10 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
wait, _ := cmd.Flags().GetBool("wait")
timeout, _ := cmd.Flags().GetDuration("timeout")
socketPath := os.Getenv("DMS_SOCKET")
if socketPath == "" {
var err error
socketPath, err = server.FindSocket()
if err != nil {
log.Info("No socket available, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
}
conn, err := net.Dial("unix", socketPath)
if err != nil {
log.Info("Socket connection failed, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
defer conn.Close()
request := map[string]any{
"id": 1,
"method": "matugen.queue",
"params": map[string]any{
request := models.Request{
ID: 1,
Method: "matugen.queue",
Params: map[string]any{
"stateDir": opts.StateDir,
"shellDir": opts.ShellDir,
"configDir": opts.ConfigDir,
@@ -136,15 +113,19 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
"stockColors": opts.StockColors,
"syncModeWithPortal": opts.SyncModeWithPortal,
"terminalsAlwaysDark": opts.TerminalsAlwaysDark,
"skipTemplates": opts.SkipTemplates,
"wait": wait,
},
}
if err := json.NewEncoder(conn).Encode(request); err != nil {
log.Fatalf("Failed to send request: %v", err)
}
if !wait {
if err := sendServerRequestFireAndForget(request); err != nil {
log.Info("Server unavailable, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
fmt.Println("Theme generation queued")
return
}
@@ -154,17 +135,18 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
resultCh := make(chan error, 1)
go func() {
var response struct {
ID int `json:"id"`
Result any `json:"result"`
Error string `json:"error"`
}
if err := json.NewDecoder(conn).Decode(&response); err != nil {
resultCh <- fmt.Errorf("failed to read response: %w", err)
resp, ok := tryServerRequest(request)
if !ok {
log.Info("Server unavailable, running synchronously")
if err := matugen.Run(opts); err != nil {
resultCh <- err
return
}
resultCh <- nil
return
}
if response.Error != "" {
resultCh <- fmt.Errorf("server error: %s", response.Error)
if resp.Error != "" {
resultCh <- fmt.Errorf("server error: %s", resp.Error)
return
}
resultCh <- nil

View File

@@ -1,17 +1,14 @@
package main
import (
"encoding/json"
"fmt"
"mime"
"net"
"net/url"
"os"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/spf13/cobra"
)
@@ -93,32 +90,6 @@ func mimeTypeToCategories(mimeType string) []string {
}
func runOpen(target string) {
socketPath, err := server.FindSocket()
if err != nil {
log.Warnf("DMS socket not found: %v", err)
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
}
conn, err := net.Dial("unix", socketPath)
if err != nil {
log.Warnf("DMS socket connection failed: %v", err)
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
}
defer conn.Close()
buf := make([]byte, 1)
for {
_, err := conn.Read(buf)
if err != nil {
return
}
if buf[0] == '\n' {
break
}
}
// Parse file:// URIs to extract the actual file path
actualTarget := target
detectedMimeType := openMimeType
@@ -160,6 +131,12 @@ func runOpen(target string) {
detectedRequestType = "url"
}
log.Infof("Detected HTTP(S) URL")
} else if strings.HasPrefix(target, "dms://") {
// Handle DMS internal URLs (theme/plugin install, etc.)
if detectedRequestType == "" {
detectedRequestType = "url"
}
log.Infof("Detected DMS internal URL")
} else if _, err := os.Stat(target); err == nil {
// Handle local file paths directly (not file:// URIs)
// Convert to absolute path
@@ -206,7 +183,7 @@ func runOpen(target string) {
}
method := "apppicker.open"
if detectedMimeType == "" && len(detectedCategories) == 0 && (strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://")) {
if detectedMimeType == "" && len(detectedCategories) == 0 && (strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://") || strings.HasPrefix(target, "dms://")) {
method = "browser.open"
params["url"] = target
}
@@ -219,8 +196,9 @@ func runOpen(target string) {
log.Infof("Sending request - Method: %s, Params: %+v", method, params)
if err := json.NewEncoder(conn).Encode(req); err != nil {
log.Fatalf("Failed to send request: %v", err)
if err := sendServerRequestFireAndForget(req); err != nil {
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
}
log.Infof("Request sent successfully")

View File

@@ -4,10 +4,10 @@ import (
"bytes"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/screenshot"
"github.com/spf13/cobra"
)
@@ -257,9 +257,7 @@ func copyImageToClipboard(buf *screenshot.ShmBuffer, format screenshot.Format, q
}
}
cmd := exec.Command("wl-copy", "--type", mimeType)
cmd.Stdin = &data
return cmd.Run()
return clipboard.Copy(data.Bytes(), mimeType)
}
func writeImageToStdout(buf *screenshot.ShmBuffer, format screenshot.Format, quality int, pixelFormat uint32) error {

View File

@@ -87,20 +87,14 @@ func newDPMSClient() (*dpmsClient, error) {
switch e.Interface {
case wlr_output_power.ZwlrOutputPowerManagerV1InterfaceName:
powerMgr := wlr_output_power.NewZwlrOutputPowerManagerV1(c.ctx)
version := e.Version
if version > 1 {
version = 1
}
version := min(e.Version, 1)
if err := registry.Bind(e.Name, e.Interface, version, powerMgr); err == nil {
c.powerMgr = powerMgr
}
case "wl_output":
output := wlclient.NewOutput(c.ctx)
version := e.Version
if version > 4 {
version = 4
}
version := min(e.Version, 4)
if err := registry.Bind(e.Name, e.Interface, version, output); err == nil {
outputID := fmt.Sprintf("output-%d", output.ID())
state := &outputState{

View File

@@ -0,0 +1,114 @@
package main
import (
"bufio"
"encoding/json"
"fmt"
"net"
"os"
"path/filepath"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
)
func sendServerRequest(req models.Request) (*models.Response[any], error) {
socketPath := getServerSocketPath()
conn, err := net.Dial("unix", socketPath)
if err != nil {
return nil, fmt.Errorf("failed to connect to server (is it running?): %w", err)
}
defer conn.Close()
scanner := bufio.NewScanner(conn)
scanner.Scan() // discard initial capabilities message
reqData, err := json.Marshal(req)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
if _, err := conn.Write(reqData); err != nil {
return nil, fmt.Errorf("failed to write request: %w", err)
}
if _, err := conn.Write([]byte("\n")); err != nil {
return nil, fmt.Errorf("failed to write newline: %w", err)
}
if !scanner.Scan() {
return nil, fmt.Errorf("failed to read response")
}
var resp models.Response[any]
if err := json.Unmarshal(scanner.Bytes(), &resp); err != nil {
return nil, fmt.Errorf("failed to unmarshal response: %w", err)
}
return &resp, nil
}
// sendServerRequestFireAndForget sends a request without waiting for a response.
// Useful for commands that trigger UI or async operations.
func sendServerRequestFireAndForget(req models.Request) error {
socketPath := getServerSocketPath()
conn, err := net.Dial("unix", socketPath)
if err != nil {
return fmt.Errorf("failed to connect to server (is it running?): %w", err)
}
defer conn.Close()
scanner := bufio.NewScanner(conn)
scanner.Scan() // discard initial capabilities message
reqData, err := json.Marshal(req)
if err != nil {
return fmt.Errorf("failed to marshal request: %w", err)
}
if _, err := conn.Write(reqData); err != nil {
return fmt.Errorf("failed to write request: %w", err)
}
if _, err := conn.Write([]byte("\n")); err != nil {
return fmt.Errorf("failed to write newline: %w", err)
}
return nil
}
// tryServerRequest attempts to send a request but returns false if server unavailable.
// Does not log errors - caller can decide what to do on failure.
func tryServerRequest(req models.Request) (*models.Response[any], bool) {
resp, err := sendServerRequest(req)
if err != nil {
return nil, false
}
return resp, true
}
func getServerSocketPath() string {
runtimeDir := os.Getenv("XDG_RUNTIME_DIR")
if runtimeDir == "" {
runtimeDir = os.TempDir()
}
entries, err := os.ReadDir(runtimeDir)
if err != nil {
return filepath.Join(runtimeDir, "danklinux.sock")
}
for _, entry := range entries {
name := entry.Name()
if name == "danklinux.sock" {
return filepath.Join(runtimeDir, name)
}
if len(name) > 10 && name[:10] == "danklinux-" && filepath.Ext(name) == ".sock" {
return filepath.Join(runtimeDir, name)
}
}
return server.GetSocketPath()
}

View File

@@ -7,6 +7,7 @@ import (
"os/exec"
"os/signal"
"path/filepath"
"slices"
"strconv"
"strings"
"syscall"
@@ -18,6 +19,25 @@ import (
type ipcTargets map[string]map[string][]string
// getProcessExitCode returns the exit code from a ProcessState.
// For normal exits, returns the exit code directly.
// For signal termination, returns 128 + signal number (Unix convention).
func getProcessExitCode(state *os.ProcessState) int {
if state == nil {
return 1
}
if code := state.ExitCode(); code != -1 {
return code
}
// Process was killed by signal - extract signal number
if status, ok := state.Sys().(syscall.WaitStatus); ok {
if status.Signaled() {
return 128 + int(status.Signal())
}
}
return 1
}
var isSessionManaged bool
func execDetachedRestart(targetPID int) {
@@ -155,6 +175,7 @@ func runShellInteractive(session bool) {
errChan <- fmt.Errorf("server panic: %v", r)
}
}()
server.CLIVersion = Version
if err := server.Start(false); err != nil {
errChan <- fmt.Errorf("server error: %w", err)
}
@@ -179,6 +200,16 @@ func runShellInteractive(session bool) {
}
}
if os.Getenv("QT_QPA_PLATFORMTHEME") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME=gtk3")
}
if os.Getenv("QT_QPA_PLATFORMTHEME_QT6") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME_QT6=gtk3")
}
if os.Getenv("QT_QPA_PLATFORM") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORM=wayland")
}
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
@@ -213,14 +244,28 @@ func runShellInteractive(session bool) {
for {
select {
case sig := <-sigChan:
// Handle SIGUSR1 restart for non-session managed processes
if sig == syscall.SIGUSR1 && !isSessionManaged {
if sig == syscall.SIGUSR1 {
if isSessionManaged {
log.Infof("Received SIGUSR1, exiting for systemd restart...")
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
os.Exit(1)
}
log.Infof("Received SIGUSR1, spawning detached restart process...")
execDetachedRestart(os.Getpid())
// Exit immediately to avoid race conditions with detached restart
return
}
// Check if qs already crashed before we got SIGTERM (systemd sends SIGTERM when D-Bus name is released)
select {
case <-errChan:
cancel()
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
case <-time.After(500 * time.Millisecond):
}
log.Infof("\nReceived signal %v, shutting down...", sig)
cancel()
cmd.Process.Signal(syscall.SIGTERM)
@@ -234,7 +279,7 @@ func runShellInteractive(session bool) {
cmd.Process.Signal(syscall.SIGTERM)
}
os.Remove(socketPath)
os.Exit(1)
os.Exit(getProcessExitCode(cmd.ProcessState))
}
}
}
@@ -327,13 +372,7 @@ func killShell() {
func runShellDaemon(session bool) {
isSessionManaged = session
isDaemonChild := false
for _, arg := range os.Args {
if arg == "--daemon-child" {
isDaemonChild = true
break
}
}
isDaemonChild := slices.Contains(os.Args, "--daemon-child")
if !isDaemonChild {
fmt.Fprintf(os.Stderr, "dms %s\n", Version)
@@ -399,6 +438,16 @@ func runShellDaemon(session bool) {
}
}
if os.Getenv("QT_QPA_PLATFORMTHEME") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME=gtk3")
}
if os.Getenv("QT_QPA_PLATFORMTHEME_QT6") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME_QT6=gtk3")
}
if os.Getenv("QT_QPA_PLATFORM") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORM=wayland")
}
devNull, err := os.OpenFile("/dev/null", os.O_RDWR, 0)
if err != nil {
log.Fatalf("Error opening /dev/null: %v", err)
@@ -439,15 +488,28 @@ func runShellDaemon(session bool) {
for {
select {
case sig := <-sigChan:
// Handle SIGUSR1 restart for non-session managed processes
if sig == syscall.SIGUSR1 && !isSessionManaged {
if sig == syscall.SIGUSR1 {
if isSessionManaged {
log.Infof("Received SIGUSR1, exiting for systemd restart...")
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
os.Exit(1)
}
log.Infof("Received SIGUSR1, spawning detached restart process...")
execDetachedRestart(os.Getpid())
// Exit immediately to avoid race conditions with detached restart
return
}
// All other signals: clean shutdown
// Check if qs already crashed before we got SIGTERM (systemd sends SIGTERM when D-Bus name is released)
select {
case <-errChan:
cancel()
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
case <-time.After(500 * time.Millisecond):
}
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
@@ -459,7 +521,7 @@ func runShellDaemon(session bool) {
cmd.Process.Signal(syscall.SIGTERM)
}
os.Remove(socketPath)
os.Exit(1)
os.Exit(getProcessExitCode(cmd.ProcessState))
}
}
}
@@ -467,9 +529,9 @@ func runShellDaemon(session bool) {
func parseTargetsFromIPCShowOutput(output string) ipcTargets {
targets := make(ipcTargets)
var currentTarget string
for _, line := range strings.Split(output, "\n") {
if strings.HasPrefix(line, "target ") {
currentTarget = strings.TrimSpace(strings.TrimPrefix(line, "target "))
for line := range strings.SplitSeq(output, "\n") {
if after, ok := strings.CutPrefix(line, "target "); ok {
currentTarget = strings.TrimSpace(after)
targets[currentTarget] = make(map[string][]string)
}
if strings.HasPrefix(line, " function") && currentTarget != "" {

View File

@@ -17,8 +17,8 @@ func getThemedASCII() string {
logo := `
██████╗ █████╗ ███╗ ██╗██╗ ██╗
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██████╔╝██║ ██║██║ ╚████║██║ ██╗
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝`

View File

@@ -3,6 +3,7 @@ package main
import (
"fmt"
"os/exec"
"slices"
"strings"
)
@@ -36,13 +37,7 @@ func checkSystemdServiceEnabled(serviceName string) (string, bool, error) {
if err != nil {
knownStates := []string{"disabled", "masked", "masked-runtime", "not-found", "enabled", "enabled-runtime", "static", "indirect", "alias"}
isKnownState := false
for _, known := range knownStates {
if stateStr == known {
isKnownState = true
break
}
}
isKnownState := slices.Contains(knownStates, stateStr)
if !isKnownState {
return stateStr, false, fmt.Errorf("systemctl is-enabled failed: %w (output: %s)", err, stateStr)

View File

@@ -15,7 +15,9 @@ require (
github.com/sblinch/kdl-go v0.0.0-20250930225324-bf4099d4614a
github.com/spf13/cobra v1.10.1
github.com/stretchr/testify v1.11.1
go.etcd.io/bbolt v1.4.3
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39
golang.org/x/image v0.34.0
)
require (
@@ -65,6 +67,6 @@ require (
github.com/spf13/pflag v1.0.10 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/sys v0.38.0
golang.org/x/text v0.31.0
golang.org/x/text v0.32.0
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@@ -131,20 +131,26 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo=
go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 h1:DHNhtq3sNNzrvduZZIiFyXWOL9IWaDPHqTnLJp+rCBY=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=

View File

@@ -1,4 +1,4 @@
#!/bin/sh
#!/usr/bin/env bash
set -e
@@ -9,8 +9,8 @@ NC='\033[0m' # No Color
# Check for root privileges
if [ "$(id -u)" == "0" ]; then
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
exit 1
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
exit 1
fi
# Check if running on Linux
@@ -22,17 +22,17 @@ fi
# Detect architecture
ARCH=$(uname -m)
case "$ARCH" in
x86_64)
ARCH="amd64"
;;
aarch64)
ARCH="arm64"
;;
*)
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
exit 1
;;
x86_64)
ARCH="amd64"
;;
aarch64)
ARCH="arm64"
;;
*)
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
exit 1
;;
esac
# Get the latest release version
@@ -55,7 +55,7 @@ curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LAT
curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LATEST_VERSION/dankinstall-$ARCH.gz.sha256" -o "expected.sha256"
# Get the expected checksum
EXPECTED_CHECKSUM=$(cat expected.sha256 | awk '{print $1}')
EXPECTED_CHECKSUM=$(awk '{print $1}' expected.sha256)
# Calculate actual checksum
printf "%bVerifying checksum...%b\n" "$GREEN" "$NC"
@@ -67,7 +67,7 @@ if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then
printf "Expected: %s\n" "$EXPECTED_CHECKSUM"
printf "Got: %s\n" "$ACTUAL_CHECKSUM"
printf "The downloaded file may be corrupted or tampered with\n"
cd - > /dev/null
cd - >/dev/null
rm -rf "$TEMP_DIR"
exit 1
fi
@@ -82,5 +82,5 @@ printf "%bRunning installer...%b\n" "$GREEN" "$NC"
./installer
# Cleanup
cd - > /dev/null
rm -rf "$TEMP_DIR"
cd - >/dev/null
rm -rf "$TEMP_DIR"

View File

@@ -0,0 +1,332 @@
package clipboard
import (
"fmt"
"io"
"os"
"os/exec"
"syscall"
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
)
func Copy(data []byte, mimeType string) error {
return CopyOpts(data, mimeType, false, false)
}
func CopyOpts(data []byte, mimeType string, foreground, pasteOnce bool) error {
if !foreground {
return copyFork(data, mimeType, pasteOnce)
}
return copyServe(data, mimeType, pasteOnce)
}
func copyFork(data []byte, mimeType string, pasteOnce bool) error {
args := []string{os.Args[0], "cl", "copy", "--foreground"}
if pasteOnce {
args = append(args, "--paste-once")
}
args = append(args, "--type", mimeType)
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdin = nil
cmd.Stdout = nil
cmd.Stderr = nil
cmd.SysProcAttr = &syscall.SysProcAttr{Setsid: true}
stdin, err := cmd.StdinPipe()
if err != nil {
return fmt.Errorf("stdin pipe: %w", err)
}
if err := cmd.Start(); err != nil {
return fmt.Errorf("start: %w", err)
}
if _, err := stdin.Write(data); err != nil {
stdin.Close()
return fmt.Errorf("write stdin: %w", err)
}
stdin.Close()
return nil
}
func copyServe(data []byte, mimeType string, pasteOnce bool) error {
display, err := wlclient.Connect("")
if err != nil {
return fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
ctx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
source, err := dataControlMgr.CreateDataSource()
if err != nil {
return fmt.Errorf("create data source: %w", err)
}
if err := source.Offer(mimeType); err != nil {
return fmt.Errorf("offer mime type: %w", err)
}
if mimeType == "text/plain;charset=utf-8" || mimeType == "text/plain" {
if err := source.Offer("text/plain"); err != nil {
return fmt.Errorf("offer text/plain: %w", err)
}
if err := source.Offer("text/plain;charset=utf-8"); err != nil {
return fmt.Errorf("offer text/plain;charset=utf-8: %w", err)
}
if err := source.Offer("UTF8_STRING"); err != nil {
return fmt.Errorf("offer UTF8_STRING: %w", err)
}
if err := source.Offer("STRING"); err != nil {
return fmt.Errorf("offer STRING: %w", err)
}
if err := source.Offer("TEXT"); err != nil {
return fmt.Errorf("offer TEXT: %w", err)
}
}
cancelled := make(chan struct{})
pasted := make(chan struct{}, 1)
source.SetSendHandler(func(e ext_data_control.ExtDataControlSourceV1SendEvent) {
defer syscall.Close(e.Fd)
file := os.NewFile(uintptr(e.Fd), "pipe")
defer file.Close()
file.Write(data)
select {
case pasted <- struct{}{}:
default:
}
})
source.SetCancelledHandler(func(e ext_data_control.ExtDataControlSourceV1CancelledEvent) {
close(cancelled)
})
if err := device.SetSelection(source); err != nil {
return fmt.Errorf("set selection: %w", err)
}
display.Roundtrip()
for {
select {
case <-cancelled:
return nil
case <-pasted:
if pasteOnce {
return nil
}
default:
if err := ctx.Dispatch(); err != nil {
return nil
}
}
}
}
func CopyText(text string) error {
return Copy([]byte(text), "text/plain;charset=utf-8")
}
func Paste() ([]byte, string, error) {
display, err := wlclient.Connect("")
if err != nil {
return nil, "", fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
ctx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return nil, "", fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return nil, "", fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return nil, "", fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return nil, "", fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return nil, "", fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
offerMimeTypes := make(map[*ext_data_control.ExtDataControlOfferV1][]string)
device.SetDataOfferHandler(func(e ext_data_control.ExtDataControlDeviceV1DataOfferEvent) {
if e.Id == nil {
return
}
offerMimeTypes[e.Id] = nil
e.Id.SetOfferHandler(func(me ext_data_control.ExtDataControlOfferV1OfferEvent) {
offerMimeTypes[e.Id] = append(offerMimeTypes[e.Id], me.MimeType)
})
})
var selectionOffer *ext_data_control.ExtDataControlOfferV1
gotSelection := false
device.SetSelectionHandler(func(e ext_data_control.ExtDataControlDeviceV1SelectionEvent) {
selectionOffer = e.Id
gotSelection = true
})
display.Roundtrip()
display.Roundtrip()
if !gotSelection || selectionOffer == nil {
return nil, "", fmt.Errorf("no clipboard data")
}
mimeTypes := offerMimeTypes[selectionOffer]
selectedMime := selectPreferredMimeType(mimeTypes)
if selectedMime == "" {
return nil, "", fmt.Errorf("no supported mime type")
}
r, w, err := os.Pipe()
if err != nil {
return nil, "", fmt.Errorf("create pipe: %w", err)
}
defer r.Close()
if err := selectionOffer.Receive(selectedMime, int(w.Fd())); err != nil {
w.Close()
return nil, "", fmt.Errorf("receive: %w", err)
}
w.Close()
display.Roundtrip()
data, err := io.ReadAll(r)
if err != nil {
return nil, "", fmt.Errorf("read: %w", err)
}
return data, selectedMime, nil
}
func PasteText() (string, error) {
data, _, err := Paste()
if err != nil {
return "", err
}
return string(data), nil
}
func selectPreferredMimeType(mimes []string) string {
preferred := []string{
"text/plain;charset=utf-8",
"text/plain",
"UTF8_STRING",
"STRING",
"TEXT",
"image/png",
"image/jpeg",
}
for _, pref := range preferred {
for _, mime := range mimes {
if mime == pref {
return mime
}
}
}
if len(mimes) > 0 {
return mimes[0]
}
return ""
}
func IsImageMimeType(mime string) bool {
return len(mime) > 6 && mime[:6] == "image/"
}

View File

@@ -0,0 +1,229 @@
package clipboard
import (
"bytes"
"encoding/binary"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"os"
"path/filepath"
"strings"
"time"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
"hash/fnv"
bolt "go.etcd.io/bbolt"
)
type StoreConfig struct {
MaxHistory int
MaxEntrySize int64
}
func DefaultStoreConfig() StoreConfig {
return StoreConfig{
MaxHistory: 100,
MaxEntrySize: 5 * 1024 * 1024,
}
}
type Entry struct {
ID uint64
Data []byte
MimeType string
Preview string
Size int
Timestamp time.Time
IsImage bool
Hash uint64
}
func Store(data []byte, mimeType string) error {
return StoreWithConfig(data, mimeType, DefaultStoreConfig())
}
func StoreWithConfig(data []byte, mimeType string, cfg StoreConfig) error {
if len(data) == 0 {
return nil
}
if int64(len(data)) > cfg.MaxEntrySize {
return fmt.Errorf("data too large: %d > %d", len(data), cfg.MaxEntrySize)
}
dbPath, err := getDBPath()
if err != nil {
return fmt.Errorf("get db path: %w", err)
}
db, err := bolt.Open(dbPath, 0644, &bolt.Options{Timeout: 1 * time.Second})
if err != nil {
return fmt.Errorf("open db: %w", err)
}
defer db.Close()
entry := Entry{
Data: data,
MimeType: mimeType,
Size: len(data),
Timestamp: time.Now(),
IsImage: IsImageMimeType(mimeType),
Hash: computeHash(data),
}
switch {
case entry.IsImage:
entry.Preview = imagePreview(data, mimeType)
default:
entry.Preview = textPreview(data)
}
return db.Update(func(tx *bolt.Tx) error {
b, err := tx.CreateBucketIfNotExists([]byte("clipboard"))
if err != nil {
return err
}
if err := deduplicateInTx(b, entry.Hash); err != nil {
return err
}
id, err := b.NextSequence()
if err != nil {
return err
}
entry.ID = id
encoded, err := encodeEntry(entry)
if err != nil {
return err
}
if err := b.Put(itob(id), encoded); err != nil {
return err
}
return trimLengthInTx(b, cfg.MaxHistory)
})
}
func getDBPath() (string, error) {
cacheDir, err := os.UserCacheDir()
if err != nil {
homeDir, err := os.UserHomeDir()
if err != nil {
return "", err
}
cacheDir = filepath.Join(homeDir, ".cache")
}
dbDir := filepath.Join(cacheDir, "dms-clipboard")
if err := os.MkdirAll(dbDir, 0700); err != nil {
return "", err
}
return filepath.Join(dbDir, "db"), nil
}
func deduplicateInTx(b *bolt.Bucket, hash uint64) error {
c := b.Cursor()
for k, v := c.Last(); k != nil; k, v = c.Prev() {
if extractHash(v) != hash {
continue
}
if err := b.Delete(k); err != nil {
return err
}
}
return nil
}
func trimLengthInTx(b *bolt.Bucket, maxHistory int) error {
c := b.Cursor()
var count int
for k, _ := c.Last(); k != nil; k, _ = c.Prev() {
if count < maxHistory {
count++
continue
}
if err := b.Delete(k); err != nil {
return err
}
}
return nil
}
func encodeEntry(e Entry) ([]byte, error) {
buf := new(bytes.Buffer)
binary.Write(buf, binary.BigEndian, e.ID)
binary.Write(buf, binary.BigEndian, uint32(len(e.Data)))
buf.Write(e.Data)
binary.Write(buf, binary.BigEndian, uint32(len(e.MimeType)))
buf.WriteString(e.MimeType)
binary.Write(buf, binary.BigEndian, uint32(len(e.Preview)))
buf.WriteString(e.Preview)
binary.Write(buf, binary.BigEndian, int32(e.Size))
binary.Write(buf, binary.BigEndian, e.Timestamp.Unix())
if e.IsImage {
buf.WriteByte(1)
} else {
buf.WriteByte(0)
}
binary.Write(buf, binary.BigEndian, e.Hash)
return buf.Bytes(), nil
}
func itob(v uint64) []byte {
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, v)
return b
}
func computeHash(data []byte) uint64 {
h := fnv.New64a()
h.Write(data)
return h.Sum64()
}
func extractHash(data []byte) uint64 {
if len(data) < 8 {
return 0
}
return binary.BigEndian.Uint64(data[len(data)-8:])
}
func textPreview(data []byte) string {
text := string(data)
text = strings.TrimSpace(text)
text = strings.Join(strings.Fields(text), " ")
if len(text) > 100 {
return text[:100] + "…"
}
return text
}
func imagePreview(data []byte, format string) string {
config, imgFmt, err := image.DecodeConfig(bytes.NewReader(data))
if err != nil {
return fmt.Sprintf("[[ image %s %s ]]", sizeStr(len(data)), format)
}
return fmt.Sprintf("[[ image %s %s %dx%d ]]", sizeStr(len(data)), imgFmt, config.Width, config.Height)
}
func sizeStr(size int) string {
units := []string{"B", "KiB", "MiB"}
var i int
fsize := float64(size)
for fsize >= 1024 && i < len(units)-1 {
fsize /= 1024
i++
}
return fmt.Sprintf("%.0f %s", fsize, units[i])
}

View File

@@ -0,0 +1,160 @@
package clipboard
import (
"context"
"fmt"
"io"
"os"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
)
type ClipboardChange struct {
Data []byte
MimeType string
}
func Watch(ctx context.Context, callback func(data []byte, mimeType string)) error {
display, err := wlclient.Connect("")
if err != nil {
return fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
wlCtx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(wlCtx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(wlCtx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
offerMimeTypes := make(map[*ext_data_control.ExtDataControlOfferV1][]string)
device.SetDataOfferHandler(func(e ext_data_control.ExtDataControlDeviceV1DataOfferEvent) {
if e.Id == nil {
return
}
offerMimeTypes[e.Id] = nil
e.Id.SetOfferHandler(func(me ext_data_control.ExtDataControlOfferV1OfferEvent) {
offerMimeTypes[e.Id] = append(offerMimeTypes[e.Id], me.MimeType)
})
})
device.SetSelectionHandler(func(e ext_data_control.ExtDataControlDeviceV1SelectionEvent) {
if e.Id == nil {
return
}
mimes := offerMimeTypes[e.Id]
selectedMime := selectPreferredMimeType(mimes)
if selectedMime == "" {
return
}
r, w, err := os.Pipe()
if err != nil {
return
}
if err := e.Id.Receive(selectedMime, int(w.Fd())); err != nil {
w.Close()
r.Close()
return
}
w.Close()
go func() {
defer r.Close()
data, err := io.ReadAll(r)
if err != nil || len(data) == 0 {
return
}
callback(data, selectedMime)
}()
})
display.Roundtrip()
display.Roundtrip()
for {
select {
case <-ctx.Done():
return ctx.Err()
default:
if err := wlCtx.SetReadDeadline(time.Now().Add(100 * time.Millisecond)); err != nil {
return fmt.Errorf("set read deadline: %w", err)
}
if err := wlCtx.Dispatch(); err != nil && err != os.ErrDeadlineExceeded {
return fmt.Errorf("dispatch: %w", err)
}
}
}
}
func WatchChan(ctx context.Context) (<-chan ClipboardChange, <-chan error) {
ch := make(chan ClipboardChange, 16)
errCh := make(chan error, 1)
go func() {
defer close(ch)
err := Watch(ctx, func(data []byte, mimeType string) {
select {
case ch <- ClipboardChange{Data: data, MimeType: mimeType}:
default:
}
})
if err != nil && err != context.Canceled {
errCh <- err
}
close(errCh)
}()
time.Sleep(50 * time.Millisecond)
return ch, errCh
}

View File

@@ -221,10 +221,7 @@ func (p *Picker) handleGlobal(e client.RegistryGlobalEvent) {
case client.OutputInterfaceName:
output := client.NewOutput(p.ctx)
version := e.Version
if version > 4 {
version = 4
}
version := min(e.Version, 4)
if err := p.registry.Bind(e.Name, e.Interface, version, output); err == nil {
p.outputsMu.Lock()
p.outputs[e.Name] = &Output{
@@ -239,20 +236,14 @@ func (p *Picker) handleGlobal(e client.RegistryGlobalEvent) {
case wlr_layer_shell.ZwlrLayerShellV1InterfaceName:
layerShell := wlr_layer_shell.NewZwlrLayerShellV1(p.ctx)
version := e.Version
if version > 4 {
version = 4
}
version := min(e.Version, 4)
if err := p.registry.Bind(e.Name, e.Interface, version, layerShell); err == nil {
p.layerShell = layerShell
}
case wlr_screencopy.ZwlrScreencopyManagerV1InterfaceName:
screencopy := wlr_screencopy.NewZwlrScreencopyManagerV1(p.ctx)
version := e.Version
if version > 3 {
version = 3
}
version := min(e.Version, 3)
if err := p.registry.Bind(e.Name, e.Interface, version, screencopy); err == nil {
p.screencopy = screencopy
}

View File

@@ -1157,7 +1157,7 @@ func drawGlyph(data []byte, stride, width, height, x, y int, r rune, col Color,
rOff, bOff = 2, 0
}
for row := 0; row < fontH; row++ {
for row := range fontH {
yy := y + row
if yy < 0 || yy >= height {
continue
@@ -1165,7 +1165,7 @@ func drawGlyph(data []byte, stride, width, height, x, y int, r rune, col Color,
rowPattern := g[row]
dstRowOff := yy * stride
for colIdx := 0; colIdx < fontW; colIdx++ {
for colIdx := range fontW {
if (rowPattern & (1 << (fontW - 1 - colIdx))) == 0 {
continue
}

View File

@@ -0,0 +1,314 @@
package colorpicker
import (
"sync"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSurfaceState_ConcurrentPointerMotion(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 50
const iterations = 100
for i := range goroutines {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := range iterations {
s.OnPointerMotion(float64(id*10+j), float64(id*10+j))
}
}(i)
}
wg.Wait()
}
func TestSurfaceState_ConcurrentScaleAccess(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for i := range goroutines / 2 {
wg.Add(1)
go func(id int) {
defer wg.Done()
for range iterations {
s.SetScale(int32(id%3 + 1))
}
}(i)
}
for range goroutines / 2 {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
scale := s.Scale()
assert.GreaterOrEqual(t, scale, int32(1))
}
}()
}
wg.Wait()
}
func TestSurfaceState_ConcurrentLogicalSize(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 20
const iterations = 100
for i := range goroutines / 2 {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := range iterations {
_ = s.OnLayerConfigure(1920+id, 1080+j)
}
}(i)
}
for range goroutines / 2 {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
w, h := s.LogicalSize()
_ = w
_ = h
}
}()
}
wg.Wait()
}
func TestSurfaceState_ConcurrentIsDone(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for range goroutines / 3 {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
s.OnPointerButton(0x110, 1)
}
}()
}
for range goroutines / 3 {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
s.OnKey(1, 1)
}
}()
}
for range goroutines / 3 {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
picked, cancelled := s.IsDone()
_ = picked
_ = cancelled
}
}()
}
wg.Wait()
}
func TestSurfaceState_ConcurrentIsReady(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 20
const iterations = 100
for range goroutines {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
_ = s.IsReady()
}
}()
}
wg.Wait()
}
func TestSurfaceState_ConcurrentSwapBuffers(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
var wg sync.WaitGroup
const goroutines = 20
const iterations = 100
for range goroutines {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
s.SwapBuffers()
}
}()
}
wg.Wait()
}
func TestSurfaceState_ZeroScale(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
s.SetScale(0)
assert.Equal(t, int32(1), s.Scale())
}
func TestSurfaceState_NegativeScale(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
s.SetScale(-5)
assert.Equal(t, int32(1), s.Scale())
}
func TestSurfaceState_ZeroDimensionConfigure(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
err := s.OnLayerConfigure(0, 100)
assert.NoError(t, err)
err = s.OnLayerConfigure(100, 0)
assert.NoError(t, err)
err = s.OnLayerConfigure(-1, 100)
assert.NoError(t, err)
w, h := s.LogicalSize()
assert.Equal(t, 0, w)
assert.Equal(t, 0, h)
}
func TestSurfaceState_PickColorNilBuffer(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
color, ok := s.PickColor()
assert.False(t, ok)
assert.Equal(t, Color{}, color)
}
func TestSurfaceState_RedrawNilBuffer(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
buf := s.Redraw()
assert.Nil(t, buf)
}
func TestSurfaceState_RedrawScreenOnlyNilBuffer(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
buf := s.RedrawScreenOnly()
assert.Nil(t, buf)
}
func TestSurfaceState_FrontRenderBufferNil(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
buf := s.FrontRenderBuffer()
assert.Nil(t, buf)
}
func TestSurfaceState_ScreenBufferNil(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
buf := s.ScreenBuffer()
assert.Nil(t, buf)
}
func TestSurfaceState_DestroyMultipleTimes(t *testing.T) {
s := NewSurfaceState(FormatHex, false)
s.Destroy()
s.Destroy()
}
func TestClamp(t *testing.T) {
tests := []struct {
v, lo, hi, expected int
}{
{5, 0, 10, 5},
{-5, 0, 10, 0},
{15, 0, 10, 10},
{0, 0, 10, 0},
{10, 0, 10, 10},
}
for _, tt := range tests {
result := clamp(tt.v, tt.lo, tt.hi)
assert.Equal(t, tt.expected, result)
}
}
func TestClampF(t *testing.T) {
tests := []struct {
v, lo, hi, expected float64
}{
{5.0, 0.0, 10.0, 5.0},
{-5.0, 0.0, 10.0, 0.0},
{15.0, 0.0, 10.0, 10.0},
{0.0, 0.0, 10.0, 0.0},
{10.0, 0.0, 10.0, 10.0},
}
for _, tt := range tests {
result := clampF(tt.v, tt.lo, tt.hi)
assert.InDelta(t, tt.expected, result, 0.001)
}
}
func TestAbs(t *testing.T) {
tests := []struct {
v, expected int
}{
{5, 5},
{-5, 5},
{0, 0},
}
for _, tt := range tests {
result := abs(tt.v)
assert.Equal(t, tt.expected, result)
}
}
func TestBlendColors(t *testing.T) {
bg := Color{R: 0, G: 0, B: 0, A: 255}
fg := Color{R: 255, G: 255, B: 255, A: 255}
result := blendColors(bg, fg, 0.0)
assert.Equal(t, bg.R, result.R)
assert.Equal(t, bg.G, result.G)
assert.Equal(t, bg.B, result.B)
result = blendColors(bg, fg, 1.0)
assert.Equal(t, fg.R, result.R)
assert.Equal(t, fg.G, result.G)
assert.Equal(t, fg.B, result.B)
result = blendColors(bg, fg, 0.5)
assert.InDelta(t, 127, int(result.R), 1)
assert.InDelta(t, 127, int(result.G), 1)
assert.InDelta(t, 127, int(result.B), 1)
result = blendColors(bg, fg, -1.0)
assert.Equal(t, bg.R, result.R)
result = blendColors(bg, fg, 2.0)
assert.Equal(t, fg.R, result.R)
}

View File

@@ -213,6 +213,11 @@ func (cd *ConfigDeployer) deployNiriDmsConfigs(dmsDir, terminalCommand string) e
for _, cfg := range configs {
path := filepath.Join(dmsDir, cfg.name)
// Skip if file already exists to preserve user modifications
if _, err := os.Stat(path); err == nil {
cd.log(fmt.Sprintf("Skipping %s (already exists)", cfg.name))
continue
}
if err := os.WriteFile(path, []byte(cfg.content), 0644); err != nil {
return fmt.Errorf("failed to write %s: %w", cfg.name, err)
}
@@ -265,7 +270,13 @@ func (cd *ConfigDeployer) deployGhosttyConfig() ([]DeploymentResult, error) {
colorResult := DeploymentResult{
ConfigType: "Ghostty Colors",
Path: filepath.Join(os.Getenv("HOME"), ".config", "ghostty", "config-dankcolors"),
Path: filepath.Join(os.Getenv("HOME"), ".config", "ghostty", "themes", "dankcolors"),
}
themesDir := filepath.Dir(colorResult.Path)
if err := os.MkdirAll(themesDir, 0755); err != nil {
mainResult.Error = fmt.Errorf("failed to create themes directory: %w", err)
return []DeploymentResult{mainResult}, mainResult.Error
}
if err := os.WriteFile(colorResult.Path, []byte(GhosttyColorConfig), 0644); err != nil {
@@ -615,10 +626,11 @@ func (cd *ConfigDeployer) transformNiriConfigForNonSystemd(config, terminalComma
spawnDms := `spawn-at-startup "dms" "run"`
if !strings.Contains(config, spawnDms) {
config = strings.Replace(config,
`spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"`,
`spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"`+"\n"+spawnDms,
1)
// Insert spawn-at-startup for dms after the environment block
envBlockEnd := regexp.MustCompile(`environment \{[^}]*\}`)
if loc := envBlockEnd.FindStringIndex(config); loc != nil {
config = config[:loc[1]] + "\n" + spawnDms + config[loc[1]:]
}
}
return config

View File

@@ -468,7 +468,7 @@ func TestHyprlandConfigStructure(t *testing.T) {
func TestGhosttyConfigStructure(t *testing.T) {
assert.Contains(t, GhosttyConfig, "window-decoration = false")
assert.Contains(t, GhosttyConfig, "background-opacity = 1.0")
assert.Contains(t, GhosttyConfig, "config-file = ./config-dankcolors")
assert.Contains(t, GhosttyConfig, "theme = dankcolors")
}
func TestGhosttyColorConfigStructure(t *testing.T) {

View File

@@ -5,15 +5,13 @@ import (
"os"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
func LocateDMSConfig() (string, error) {
var primaryPaths []string
configHome := utils.XDGConfigHome()
if configHome != "" {
configHome, err := os.UserConfigDir()
if err == nil && configHome != "" {
primaryPaths = append(primaryPaths, filepath.Join(configHome, "quickshell", "dms"))
}
@@ -23,7 +21,7 @@ func LocateDMSConfig() (string, error) {
dataDirs = "/usr/local/share:/usr/share"
}
for _, dir := range strings.Split(dataDirs, ":") {
for dir := range strings.SplitSeq(dataDirs, ":") {
if dir != "" {
primaryPaths = append(primaryPaths, filepath.Join(dir, "quickshell", "dms"))
}
@@ -35,7 +33,7 @@ func LocateDMSConfig() (string, error) {
configDirs = "/etc/xdg"
}
for _, dir := range strings.Split(configDirs, ":") {
for dir := range strings.SplitSeq(configDirs, ":") {
if dir != "" {
primaryPaths = append(primaryPaths, filepath.Join(dir, "quickshell", "dms"))
}

View File

@@ -48,4 +48,4 @@ keybind = shift+enter=text:\n
gtk-single-instance = true
# Dank color generation
config-file = ./config-dankcolors
theme = dankcolors

View File

@@ -12,7 +12,6 @@ monitor = , preferred,auto,auto
# ==================
exec-once = dbus-update-activation-environment --systemd --all
exec-once = systemctl --user start hyprland-session.target
exec-once = bash -c "wl-paste --watch cliphist store &"
# ==================
# INPUT CONFIG
@@ -277,4 +276,4 @@ bind = CTRL, Print, exec, dms screenshot full
bind = ALT, Print, exec, dms screenshot window
# === System Controls ===
bind = $mod SHIFT, P, dpms, off
bind = $mod SHIFT, P, dpms, toggle

View File

@@ -1,3 +1,8 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
recent-windows {
highlight {
corner-radius 12

View File

@@ -1,3 +1,8 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
binds {
// === System & Overview ===
Mod+D repeat=false { toggle-overview; }
@@ -192,4 +197,4 @@ binds {
// === System Controls ===
Mod+Escape allow-inhibiting=false { toggle-keyboard-shortcuts-inhibit; }
Mod+Shift+P { power-off-monitors; }
}
}

View File

@@ -1,3 +1,8 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
layout {
background-color "transparent"
@@ -33,4 +38,4 @@ recent-windows {
active-color "#124a73"
urgent-color "#ffb4ab"
}
}
}

View File

@@ -1,3 +1,8 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
layout {
gaps 4

View File

@@ -18,15 +18,64 @@ gestures {
input {
keyboard {
xkb {
// You can set rules, model, layout, variant and options.
// For more information, see xkeyboard-config(7).
// For example:
// layout "us,ru"
// options "grp:win_space_toggle,compose:ralt,ctrl:nocaps"
// If this section is empty, niri will fetch xkb settings
// from org.freedesktop.locale1. You can control these using
// localectl set-x11-keymap.
}
// Enable numlock on startup, omitting this setting disables it.
numlock
}
// Next sections include libinput settings.
// Omitting settings disables them, or leaves them at their default values.
// All commented-out settings here are examples, not defaults.
touchpad {
// off
tap
// dwt
// dwtp
// drag false
// drag-lock
natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "two-finger"
// disabled-on-external-mouse
}
mouse {
// off
// natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "no-scroll"
}
trackpoint {
// off
// natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "on-button-down"
// scroll-button 273
// scroll-button-lock
// middle-emulation
}
// Uncomment this to make the mouse warp to the center of newly focused windows.
// warp-mouse-to-focus
// Focus windows and outputs automatically when moving the mouse into them.
// Setting max-scroll-amount="0%" makes it work only on windows already fully on screen.
// focus-follows-mouse max-scroll-amount="0%"
}
// You can configure outputs by their name, which you can find
// by running `niri msg outputs` while inside a niri instance.
@@ -109,7 +158,6 @@ overview {
// which may be more convenient to use.
// See the binds section below for more spawn examples.
// This line starts waybar, a commonly used bar for Wayland compositors.
spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"
environment {
XDG_CURRENT_DESKTOP "niri"
}

View File

@@ -345,7 +345,7 @@ func EnsureContrastDPSLstar(hexColor, hexBg string, minLc float64, isLightMode b
}
step := 0.5
for i := 0; i < 120; i++ {
for range 120 {
Lf = math.Max(0, math.Min(100, Lf+dir*step))
cand := labToHex(Lf, af, bf)
if DeltaPhiStarContrast(cand, hexBg, isLightMode) >= minLc {

View File

@@ -658,7 +658,7 @@ func TestContrastAlgorithmComparison(t *testing.T) {
}
differentCount := 0
for i := 0; i < 16; i++ {
for i := range 16 {
if wcagColors[i].Hex != dpsColors[i].Hex {
differentCount++
}

View File

@@ -112,3 +112,24 @@ func GenerateWeztermTheme(p Palette) string {
p.Color12.Hex, p.Color13.Hex, p.Color14.Hex, p.Color15.Hex)
return result.String()
}
func GenerateNeovimTheme(p Palette) string {
var result strings.Builder
fmt.Fprintf(&result, "vim.g.terminal_color_0 = \"%s\"\n", p.Color0.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_1 = \"%s\"\n", p.Color1.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_2 = \"%s\"\n", p.Color2.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_3 = \"%s\"\n", p.Color3.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_4 = \"%s\"\n", p.Color4.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_5 = \"%s\"\n", p.Color5.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_6 = \"%s\"\n", p.Color6.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_7 = \"%s\"\n", p.Color7.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_8 = \"%s\"\n", p.Color8.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_9 = \"%s\"\n", p.Color9.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_10 = \"%s\"\n", p.Color10.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_11 = \"%s\"\n", p.Color11.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_12 = \"%s\"\n", p.Color12.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_13 = \"%s\"\n", p.Color13.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_14 = \"%s\"\n", p.Color14.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_15 = \"%s\"\n", p.Color15.Hex)
return result.String()
}

View File

@@ -7,6 +7,7 @@ import (
"os/exec"
"path/filepath"
"runtime"
"slices"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
@@ -103,10 +104,8 @@ func (a *ArchDistribution) DetectDependenciesWithTerminal(ctx context.Context, w
dependencies = append(dependencies, a.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, a.detectMatugen())
dependencies = append(dependencies, a.detectDgop())
dependencies = append(dependencies, a.detectClipboardTools()...)
return dependencies, nil
}
@@ -139,8 +138,6 @@ func (a *ArchDistribution) GetPackageMappingWithVariants(wm deps.WindowManager,
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"cliphist": {Name: "cliphist", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
}
@@ -344,7 +341,7 @@ func (a *ArchDistribution) InstallPackages(ctx context.Context, dependencies []d
a.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := a.EnableDMSService(ctx); err != nil {
if err := a.EnableDMSService(ctx, wm); err != nil {
a.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}
@@ -518,12 +515,9 @@ func (a *ArchDistribution) reorderAURPackages(packages []string) []string {
dmsShell = append(dmsShell, pkg)
} else {
isDep := false
for _, dep := range dmsDepencies {
if pkg == dep {
deps = append(deps, pkg)
isDep = true
break
}
if slices.Contains(dmsDepencies, pkg) {
deps = append(deps, pkg)
isDep = true
}
if !isDep {
others = append(others, pkg)
@@ -549,7 +543,7 @@ func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sud
a.log(fmt.Sprintf("Warning: failed to clean existing cache for %s: %v", pkg, err))
}
if err := os.MkdirAll(buildDir, 0755); err != nil {
if err := os.MkdirAll(buildDir, 0o755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
defer func() {

View File

@@ -185,37 +185,6 @@ func (b *BaseDistribution) detectSpecificTerminal(terminal deps.Terminal) deps.D
}
}
func (b *BaseDistribution) detectClipboardTools() []deps.Dependency {
var dependencies []deps.Dependency
cliphist := deps.StatusMissing
if b.commandExists("cliphist") {
cliphist = deps.StatusInstalled
}
wlClipboard := deps.StatusMissing
if b.commandExists("wl-copy") && b.commandExists("wl-paste") {
wlClipboard = deps.StatusInstalled
}
dependencies = append(dependencies,
deps.Dependency{
Name: "cliphist",
Status: cliphist,
Description: "Wayland clipboard manager",
Required: true,
},
deps.Dependency{
Name: "wl-clipboard",
Status: wlClipboard,
Description: "Wayland clipboard utilities",
Required: true,
},
)
return dependencies
}
func (b *BaseDistribution) detectHyprlandTools() []deps.Dependency {
var dependencies []deps.Dependency
@@ -581,10 +550,7 @@ func (b *BaseDistribution) WriteEnvironmentConfig(terminal deps.Terminal) error
terminalCmd = "ghostty"
}
content := fmt.Sprintf(`QT_QPA_PLATFORM=wayland
ELECTRON_OZONE_PLATFORM_HINT=auto
QT_QPA_PLATFORMTHEME=gtk3
QT_QPA_PLATFORMTHEME_QT6=gtk3
content := fmt.Sprintf(`ELECTRON_OZONE_PLATFORM_HINT=auto
TERMINAL=%s
`, terminalCmd)
@@ -597,12 +563,18 @@ TERMINAL=%s
return nil
}
func (b *BaseDistribution) EnableDMSService(ctx context.Context) error {
cmd := exec.CommandContext(ctx, "systemctl", "--user", "enable", "--now", "dms")
if err := cmd.Run(); err != nil {
return fmt.Errorf("failed to enable dms service: %w", err)
func (b *BaseDistribution) EnableDMSService(ctx context.Context, wm deps.WindowManager) error {
switch wm {
case deps.WindowManagerNiri:
if err := exec.CommandContext(ctx, "systemctl", "--user", "add-wants", "niri.service", "dms").Run(); err != nil {
b.log("Warning: failed to add dms as a want for niri.service")
}
case deps.WindowManagerHyprland:
if err := exec.CommandContext(ctx, "systemctl", "--user", "add-wants", "hyprland-session.target", "dms").Run(); err != nil {
b.log("Warning: failed to add dms as a want for hyprland-session.target")
}
}
b.log("Enabled dms systemd user service")
return nil
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"os/exec"
"runtime"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
@@ -69,7 +70,6 @@ func (d *DebianDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, d.detectMatugen())
dependencies = append(dependencies, d.detectDgop())
dependencies = append(dependencies, d.detectClipboardTools()...)
return dependencies, nil
}
@@ -102,7 +102,6 @@ func (d *DebianDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "git", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
@@ -111,7 +110,6 @@ func (d *DebianDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": d.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
}
@@ -312,7 +310,7 @@ func (d *DebianDistribution) InstallPackages(ctx context.Context, dependencies [
d.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := d.EnableDMSService(ctx); err != nil {
if err := d.EnableDMSService(ctx, wm); err != nil {
d.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}
@@ -387,6 +385,8 @@ func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Packa
debianVersion := "Debian_13"
if osInfo.VersionID == "testing" {
debianVersion = "Debian_Testing"
} else if osInfo.VersionCodename == "sid" || osInfo.VersionID == "sid" || strings.Contains(strings.ToLower(osInfo.PrettyName), "sid") || strings.Contains(strings.ToLower(osInfo.PrettyName), "unstable") {
debianVersion = "Debian_Unstable"
}
for _, pkg := range obsPkgs {
@@ -430,7 +430,7 @@ func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Packa
}
// Add repository
repoLine := fmt.Sprintf("deb [signed-by=%s] %s/ /", keyringPath, baseURL)
repoLine := fmt.Sprintf("deb [signed-by=%s, arch=%s] %s/ /", keyringPath, runtime.GOARCH, baseURL)
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
@@ -549,7 +549,7 @@ func (d *DebianDistribution) installBuildDependencies(ctx context.Context, manua
if err := d.installRust(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Rust: %w", err)
}
case "cliphist", "dgop":
case "dgop":
if err := d.installGo(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Go: %w", err)
}

View File

@@ -88,10 +88,8 @@ func (f *FedoraDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, f.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, f.detectMatugen())
dependencies = append(dependencies, f.detectDgop())
dependencies = append(dependencies, f.detectClipboardTools()...)
return dependencies, nil
}
@@ -117,14 +115,12 @@ func (f *FedoraDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"ghostty": {Name: "ghostty", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
// COPR packages
"quickshell": f.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"dms (DankMaterialShell)": f.getDmsMapping(variants["dms (DankMaterialShell)"]),
"dgop": {Name: "dgop", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
}
@@ -353,7 +349,7 @@ func (f *FedoraDistribution) InstallPackages(ctx context.Context, dependencies [
f.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := f.EnableDMSService(ctx); err != nil {
if err := f.EnableDMSService(ctx, wm); err != nil {
f.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}

View File

@@ -107,7 +107,6 @@ func (g *GentooDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, g.detectMatugen())
dependencies = append(dependencies, g.detectDgop())
dependencies = append(dependencies, g.detectClipboardTools()...)
return dependencies, nil
}
@@ -140,7 +139,6 @@ func (g *GentooDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "dev-vcs/git", Repository: RepoTypeSystem},
"kitty": {Name: "x11-terms/kitty", Repository: RepoTypeSystem, UseFlags: "X wayland"},
"alacritty": {Name: "x11-terms/alacritty", Repository: RepoTypeSystem, UseFlags: "X wayland"},
"wl-clipboard": {Name: "gui-apps/wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "sys-apps/xdg-desktop-portal-gtk", Repository: RepoTypeSystem, UseFlags: "wayland X"},
"accountsservice": {Name: "sys-apps/accountsservice", Repository: RepoTypeSystem},
@@ -151,7 +149,6 @@ func (g *GentooDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": g.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "x11-misc/matugen", Repository: RepoTypeGURU, AcceptKeywords: archKeyword},
"cliphist": {Name: "app-misc/cliphist", Repository: RepoTypeGURU, AcceptKeywords: archKeyword},
"dms (DankMaterialShell)": g.getDmsMapping(variants["dms (DankMaterialShell)"]),
"dgop": {Name: "dgop", Repository: RepoTypeManual, BuildFunc: "installDgop"},
}
@@ -410,7 +407,7 @@ func (g *GentooDistribution) InstallPackages(ctx context.Context, dependencies [
g.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := g.EnableDMSService(ctx); err != nil {
if err := g.EnableDMSService(ctx, wm); err != nil {
g.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}

View File

@@ -18,8 +18,8 @@ type ManualPackageInstaller struct {
// parseLatestTagFromGitOutput parses git ls-remote output and returns the latest tag
func (m *ManualPackageInstaller) parseLatestTagFromGitOutput(output string) string {
lines := strings.Split(output, "\n")
for _, line := range lines {
lines := strings.SplitSeq(output, "\n")
for line := range lines {
if strings.Contains(line, "refs/tags/") && !strings.Contains(line, "^{}") {
parts := strings.Split(line, "refs/tags/")
if len(parts) > 1 {
@@ -74,10 +74,6 @@ func (m *ManualPackageInstaller) InstallManualPackages(ctx context.Context, pack
if err := m.installHyprland(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install hyprland: %w", err)
}
case "hyprpicker":
if err := m.installHyprpicker(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install hyprpicker: %w", err)
}
case "ghostty":
if err := m.installGhostty(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install ghostty: %w", err)
@@ -86,10 +82,6 @@ func (m *ManualPackageInstaller) InstallManualPackages(ctx context.Context, pack
if err := m.installMatugen(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install matugen: %w", err)
}
case "cliphist":
if err := m.installCliphist(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install cliphist: %w", err)
}
case "xwayland-satellite":
if err := m.installXwaylandSatellite(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install xwayland-satellite: %w", err)
@@ -111,12 +103,12 @@ func (m *ManualPackageInstaller) installDgop(ctx context.Context, sudoPassword s
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "dgop-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -168,10 +160,10 @@ func (m *ManualPackageInstaller) installNiri(ctx context.Context, sudoPassword s
homeDir, _ := os.UserHomeDir()
buildDir := filepath.Join(homeDir, ".cache", "dankinstall", "niri-build")
tmpDir := filepath.Join(homeDir, ".cache", "dankinstall", "tmp")
if err := os.MkdirAll(buildDir, 0755); err != nil {
if err := os.MkdirAll(buildDir, 0o755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
if err := os.MkdirAll(tmpDir, 0755); err != nil {
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer func() {
@@ -245,12 +237,12 @@ func (m *ManualPackageInstaller) installQuickshell(ctx context.Context, variant
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "quickshell-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -281,7 +273,7 @@ func (m *ManualPackageInstaller) installQuickshell(ctx context.Context, variant
}
buildDir := tmpDir + "/build"
if err := os.MkdirAll(buildDir, 0755); err != nil {
if err := os.MkdirAll(buildDir, 0o755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
@@ -351,12 +343,12 @@ func (m *ManualPackageInstaller) installHyprland(ctx context.Context, sudoPasswo
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "hyprland-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -405,184 +397,6 @@ func (m *ManualPackageInstaller) installHyprland(ctx context.Context, sudoPasswo
return nil
}
func (m *ManualPackageInstaller) installHyprpicker(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing hyprpicker from source...")
homeDir := os.Getenv("HOME")
if homeDir == "" {
return fmt.Errorf("HOME environment variable not set")
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
// Install hyprutils first
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.05,
Step: "Building hyprutils dependency...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprutils.git",
}
hyprutilsDir := filepath.Join(cacheDir, "hyprutils-build")
if err := os.MkdirAll(hyprutilsDir, 0755); err != nil {
return fmt.Errorf("failed to create hyprutils directory: %w", err)
}
defer os.RemoveAll(hyprutilsDir)
cloneUtilsCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprutils.git", hyprutilsDir)
if err := cloneUtilsCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprutils: %w", err)
}
configureUtilsCmd := exec.CommandContext(ctx, "cmake",
"--no-warn-unused-cli",
"-DCMAKE_BUILD_TYPE:STRING=Release",
"-DCMAKE_INSTALL_PREFIX:PATH=/usr",
"-DBUILD_TESTING=off",
"-S", ".",
"-B", "./build")
configureUtilsCmd.Dir = hyprutilsDir
configureUtilsCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(configureUtilsCmd, progressChan, PhaseSystemPackages, 0.05, 0.1, "Configuring hyprutils..."); err != nil {
return fmt.Errorf("failed to configure hyprutils: %w", err)
}
buildUtilsCmd := exec.CommandContext(ctx, "cmake", "--build", "./build", "--config", "Release", "--target", "all")
buildUtilsCmd.Dir = hyprutilsDir
buildUtilsCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildUtilsCmd, progressChan, PhaseSystemPackages, 0.1, 0.2, "Building hyprutils..."); err != nil {
return fmt.Errorf("failed to build hyprutils: %w", err)
}
installUtilsCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install ./build")
installUtilsCmd.Dir = hyprutilsDir
if err := installUtilsCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprutils: %w", err)
}
// Install hyprwayland-scanner
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.2,
Step: "Building hyprwayland-scanner dependency...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprwayland-scanner.git",
}
scannerDir := filepath.Join(cacheDir, "hyprwayland-scanner-build")
if err := os.MkdirAll(scannerDir, 0755); err != nil {
return fmt.Errorf("failed to create scanner directory: %w", err)
}
defer os.RemoveAll(scannerDir)
cloneScannerCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprwayland-scanner.git", scannerDir)
if err := cloneScannerCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprwayland-scanner: %w", err)
}
configureScannerCmd := exec.CommandContext(ctx, "cmake",
"-DCMAKE_INSTALL_PREFIX=/usr",
"-B", "build")
configureScannerCmd.Dir = scannerDir
configureScannerCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(configureScannerCmd, progressChan, PhaseSystemPackages, 0.2, 0.25, "Configuring hyprwayland-scanner..."); err != nil {
return fmt.Errorf("failed to configure hyprwayland-scanner: %w", err)
}
buildScannerCmd := exec.CommandContext(ctx, "cmake", "--build", "build", "-j")
buildScannerCmd.Dir = scannerDir
buildScannerCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildScannerCmd, progressChan, PhaseSystemPackages, 0.25, 0.35, "Building hyprwayland-scanner..."); err != nil {
return fmt.Errorf("failed to build hyprwayland-scanner: %w", err)
}
installScannerCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install build")
installScannerCmd.Dir = scannerDir
if err := installScannerCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprwayland-scanner: %w", err)
}
// Now build hyprpicker
tmpDir := filepath.Join(cacheDir, "hyprpicker-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.35,
Step: "Cloning hyprpicker repository...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprpicker.git",
}
cloneCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprpicker.git", tmpDir)
if err := cloneCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprpicker: %w", err)
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.45,
Step: "Configuring hyprpicker build...",
IsComplete: false,
CommandInfo: "cmake -B build -S . -DCMAKE_BUILD_TYPE=Release",
}
configureCmd := exec.CommandContext(ctx, "cmake",
"--no-warn-unused-cli",
"-DCMAKE_BUILD_TYPE:STRING=Release",
"-DCMAKE_INSTALL_PREFIX:PATH=/usr",
"-S", ".",
"-B", "./build")
configureCmd.Dir = tmpDir
configureCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
output, err := configureCmd.CombinedOutput()
if err != nil {
m.log(fmt.Sprintf("cmake configure failed. Output:\n%s", string(output)))
return fmt.Errorf("failed to configure hyprpicker: %w\nCMake output:\n%s", err, string(output))
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.55,
Step: "Building hyprpicker...",
IsComplete: false,
CommandInfo: "cmake --build build --target hyprpicker",
}
buildCmd := exec.CommandContext(ctx, "cmake", "--build", "./build", "--config", "Release", "--target", "hyprpicker")
buildCmd.Dir = tmpDir
buildCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildCmd, progressChan, PhaseSystemPackages, 0.55, 0.8, "Building hyprpicker..."); err != nil {
return fmt.Errorf("failed to build hyprpicker: %w", err)
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.8,
Step: "Installing hyprpicker...",
IsComplete: false,
NeedsSudo: true,
CommandInfo: "sudo cmake --install build",
}
installCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install ./build")
installCmd.Dir = tmpDir
if err := installCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprpicker: %w", err)
}
m.log("hyprpicker installed successfully from source")
return nil
}
func (m *ManualPackageInstaller) installGhostty(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing Ghostty from source...")
@@ -592,12 +406,12 @@ func (m *ManualPackageInstaller) installGhostty(ctx context.Context, sudoPasswor
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "ghostty-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -714,7 +528,7 @@ func (m *ManualPackageInstaller) installDankMaterialShell(ctx context.Context, v
}
configDir := filepath.Dir(dmsPath)
if err := os.MkdirAll(configDir, 0755); err != nil {
if err := os.MkdirAll(configDir, 0o755); err != nil {
return fmt.Errorf("failed to create quickshell config directory: %w", err)
}
@@ -803,52 +617,6 @@ func (m *ManualPackageInstaller) installDankMaterialShell(ctx context.Context, v
return nil
}
func (m *ManualPackageInstaller) installCliphist(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing cliphist from source...")
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.1,
Step: "Installing cliphist via go install...",
IsComplete: false,
CommandInfo: "go install go.senan.xyz/cliphist@latest",
}
installCmd := exec.CommandContext(ctx, "go", "install", "go.senan.xyz/cliphist@latest")
if err := m.runWithProgressStep(installCmd, progressChan, PhaseSystemPackages, 0.1, 0.7, "Building cliphist..."); err != nil {
return fmt.Errorf("failed to install cliphist: %w", err)
}
homeDir := os.Getenv("HOME")
sourcePath := filepath.Join(homeDir, "go", "bin", "cliphist")
targetPath := "/usr/local/bin/cliphist"
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.7,
Step: "Installing cliphist binary to system...",
IsComplete: false,
NeedsSudo: true,
CommandInfo: fmt.Sprintf("sudo cp %s %s", sourcePath, targetPath),
}
copyCmd := exec.CommandContext(ctx, "sudo", "-S", "cp", sourcePath, targetPath)
copyCmd.Stdin = strings.NewReader(sudoPassword + "\n")
if err := copyCmd.Run(); err != nil {
return fmt.Errorf("failed to copy cliphist to /usr/local/bin: %w", err)
}
// Make it executable
chmodCmd := exec.CommandContext(ctx, "sudo", "-S", "chmod", "+x", targetPath)
chmodCmd.Stdin = strings.NewReader(sudoPassword + "\n")
if err := chmodCmd.Run(); err != nil {
return fmt.Errorf("failed to make cliphist executable: %w", err)
}
m.log("cliphist installed successfully from source")
return nil
}
func (m *ManualPackageInstaller) installXwaylandSatellite(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing xwayland-satellite from source...")

View File

@@ -15,6 +15,12 @@ func init() {
Register("opensuse-tumbleweed", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
Register("opensuse-leap", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
Register("opensuse-slowroll", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
}
type OpenSUSEDistribution struct {
@@ -78,10 +84,8 @@ func (o *OpenSUSEDistribution) DetectDependenciesWithTerminal(ctx context.Contex
dependencies = append(dependencies, o.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, o.detectMatugen())
dependencies = append(dependencies, o.detectDgop())
dependencies = append(dependencies, o.detectClipboardTools()...)
return dependencies, nil
}
@@ -107,10 +111,8 @@ func (o *OpenSUSEDistribution) GetPackageMappingWithVariants(wm deps.WindowManag
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
"cliphist": {Name: "cliphist", Repository: RepoTypeSystem},
// DMS packages from OBS
"dms (DankMaterialShell)": o.getDmsMapping(variants["dms (DankMaterialShell)"]),
@@ -371,7 +373,7 @@ func (o *OpenSUSEDistribution) InstallPackages(ctx context.Context, dependencies
o.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := o.EnableDMSService(ctx); err != nil {
if err := o.EnableDMSService(ctx, wm); err != nil {
o.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}
@@ -438,6 +440,19 @@ func (o *OpenSUSEDistribution) extractPackageNames(packages []PackageMapping) []
func (o *OpenSUSEDistribution) enableOBSRepos(ctx context.Context, obsPkgs []PackageMapping, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
enabledRepos := make(map[string]bool)
osInfo, err := GetOSInfo()
if err != nil {
return fmt.Errorf("failed to get OS info: %w", err)
}
obsDistroVersion := "openSUSE_Tumbleweed"
switch osInfo.Distribution.ID {
case "opensuse-leap":
obsDistroVersion = fmt.Sprintf("openSUSE_Leap_%s", osInfo.VersionID)
case "opensuse-slowroll":
obsDistroVersion = "openSUSE_Slowroll"
}
for _, pkg := range obsPkgs {
if pkg.RepoURL != "" && !enabledRepos[pkg.RepoURL] {
o.log(fmt.Sprintf("Enabling OBS repository: %s", pkg.RepoURL))
@@ -445,8 +460,8 @@ func (o *OpenSUSEDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Pac
// RepoURL format: "home:AvengeMedia:danklinux"
repoPath := strings.ReplaceAll(pkg.RepoURL, ":", ":/")
repoName := strings.ReplaceAll(pkg.RepoURL, ":", "-")
repoURL := fmt.Sprintf("https://download.opensuse.org/repositories/%s/openSUSE_Tumbleweed/%s.repo",
repoPath, pkg.RepoURL)
repoURL := fmt.Sprintf("https://download.opensuse.org/repositories/%s/%s/%s.repo",
repoPath, obsDistroVersion, pkg.RepoURL)
checkCmd := exec.CommandContext(ctx, "zypper", "repos", repoName)
if checkCmd.Run() == nil {

View File

@@ -19,11 +19,12 @@ type DistroInfo struct {
// OSInfo contains complete OS information
type OSInfo struct {
Distribution DistroInfo
Version string
VersionID string
PrettyName string
Architecture string
Distribution DistroInfo
Version string
VersionID string
VersionCodename string
PrettyName string
Architecture string
}
// GetOSInfo detects the current OS and returns information about it
@@ -72,6 +73,8 @@ func GetOSInfo() (*OSInfo, error) {
info.VersionID = value
case "VERSION":
info.Version = value
case "VERSION_CODENAME":
info.VersionCodename = value
case "PRETTY_NAME":
info.PrettyName = value
}
@@ -100,6 +103,10 @@ func IsUnsupportedDistro(distroID, versionID string) bool {
}
if distroID == "debian" {
// unstable/sid support
if versionID == "sid" {
return false
}
if versionID == "" {
// debian testing/sid have no version ID
return false

View File

@@ -76,10 +76,8 @@ func (u *UbuntuDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, u.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, u.detectMatugen())
dependencies = append(dependencies, u.detectDgop())
dependencies = append(dependencies, u.detectClipboardTools()...)
return dependencies, nil
}
@@ -112,7 +110,6 @@ func (u *UbuntuDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "git", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
@@ -121,7 +118,6 @@ func (u *UbuntuDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": u.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"dgop": {Name: "dgop", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"ghostty": {Name: "ghostty", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
}
@@ -331,7 +327,7 @@ func (u *UbuntuDistribution) InstallPackages(ctx context.Context, dependencies [
u.log(fmt.Sprintf("Warning: failed to write window manager config: %v", err))
}
if err := u.EnableDMSService(ctx); err != nil {
if err := u.EnableDMSService(ctx, wm); err != nil {
u.log(fmt.Sprintf("Warning: failed to enable dms service: %v", err))
}
@@ -539,8 +535,6 @@ func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manua
buildDeps["libpam0g-dev"] = true
case "matugen":
buildDeps["curl"] = true
case "cliphist":
// Go will be installed separately with PPA
}
}
@@ -550,7 +544,7 @@ func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manua
if err := u.installRust(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Rust: %w", err)
}
case "cliphist", "dgop":
case "dgop":
if err := u.installGo(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Go: %w", err)
}

View File

@@ -518,7 +518,7 @@ func (m Model) categorizeDependencies() map[string][]DependencyInfo {
categories["Hyprland Components"] = append(categories["Hyprland Components"], dep)
case "niri":
categories["Niri Components"] = append(categories["Niri Components"], dep)
case "kitty", "alacritty", "ghostty", "hyprpicker":
case "kitty", "alacritty", "ghostty":
categories["Shared Components"] = append(categories["Shared Components"], dep)
default:
categories["Shared Components"] = append(categories["Shared Components"], dep)

View File

@@ -16,14 +16,14 @@ type DiscoveryConfig struct {
func DefaultDiscoveryConfig() *DiscoveryConfig {
var searchPaths []string
configHome := utils.XDGConfigHome()
if configHome != "" {
searchPaths = append(searchPaths, filepath.Join(configHome, "DankMaterialShell", "cheatsheets"))
configDir, err := os.UserConfigDir()
if err == nil && configDir != "" {
searchPaths = append(searchPaths, filepath.Join(configDir, "DankMaterialShell", "cheatsheets"))
}
configDirs := os.Getenv("XDG_CONFIG_DIRS")
if configDirs != "" {
for _, dir := range strings.Split(configDirs, ":") {
for dir := range strings.SplitSeq(configDirs, ":") {
if dir != "" {
searchPaths = append(searchPaths, filepath.Join(dir, "DankMaterialShell", "cheatsheets"))
}

View File

@@ -12,7 +12,7 @@ func TestNewJSONFileProvider(t *testing.T) {
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.json")
if err := os.WriteFile(testFile, []byte("{}"), 0644); err != nil {
if err := os.WriteFile(testFile, []byte("{}"), 0o644); err != nil {
t.Fatalf("Failed to create test file: %v", err)
}
@@ -81,7 +81,7 @@ func TestJSONFileProviderGetCheatSheet(t *testing.T) {
}
}`
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -135,7 +135,7 @@ func TestJSONFileProviderGetCheatSheetNoProvider(t *testing.T) {
"binds": {}
}`
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -181,7 +181,7 @@ func TestJSONFileProviderFlatArrayBackwardsCompat(t *testing.T) {
]
}`
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -216,7 +216,7 @@ func TestJSONFileProviderInvalidJSON(t *testing.T) {
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "invalid.json")
if err := os.WriteFile(testFile, []byte("not valid json"), 0644); err != nil {
if err := os.WriteFile(testFile, []byte("not valid json"), 0o644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}

View File

@@ -6,10 +6,10 @@ import (
"os/exec"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/keybinds"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/sblinch/kdl-go"
"github.com/sblinch/kdl-go/document"
)
@@ -30,7 +30,11 @@ func NewNiriProvider(configDir string) *NiriProvider {
}
func defaultNiriConfigDir() string {
return filepath.Join(utils.XDGConfigHome(), "niri")
configDir, err := os.UserConfigDir()
if err != nil {
return ""
}
return filepath.Join(configDir, "niri")
}
func (n *NiriProvider) Name() string {
@@ -153,6 +157,7 @@ func (n *NiriProvider) convertKeybind(kb *NiriKeyBinding, subcategory string, co
Subcategory: subcategory,
Source: source,
HideOnOverlay: kb.HideOnOverlay,
CooldownMs: kb.CooldownMs,
}
if source == "dms" && conflicts != nil {
@@ -310,7 +315,9 @@ func (n *NiriProvider) extractOptions(node *document.Node) map[string]any {
opts["repeat"] = val.String() == "true"
}
if val, ok := node.Properties.Get("cooldown-ms"); ok {
opts["cooldown-ms"] = val.String()
if ms, err := strconv.Atoi(val.String()); err == nil {
opts["cooldown-ms"] = ms
}
}
if val, ok := node.Properties.Get("allow-when-locked"); ok {
opts["allow-when-locked"] = val.String() == "true"
@@ -336,7 +343,14 @@ func (n *NiriProvider) buildBindNode(bind *overrideBind) *document.Node {
node.AddProperty("repeat", false, "")
}
if v, ok := bind.Options["cooldown-ms"]; ok {
node.AddProperty("cooldown-ms", v, "")
switch val := v.(type) {
case int:
node.AddProperty("cooldown-ms", val, "")
case string:
if ms, err := strconv.Atoi(val); err == nil {
node.AddProperty("cooldown-ms", ms, "")
}
}
}
if v, ok := bind.Options["allow-when-locked"]; ok && v == true {
node.AddProperty("allow-when-locked", true, "")
@@ -447,9 +461,16 @@ func (n *NiriProvider) getBindSortPriority(action string) int {
}
}
const dmsWarningHeader = `// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
`
func (n *NiriProvider) generateBindsContent(binds map[string]*overrideBind) string {
if len(binds) == 0 {
return "binds {}\n"
return dmsWarningHeader + "binds {}\n"
}
var regularBinds, recentWindowsBinds []*overrideBind
@@ -476,6 +497,7 @@ func (n *NiriProvider) generateBindsContent(binds map[string]*overrideBind) stri
var sb strings.Builder
sb.WriteString(dmsWarningHeader)
sb.WriteString("binds {\n")
for _, bind := range regularBinds {
n.writeBindNode(&sb, bind, " ")

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/sblinch/kdl-go"
@@ -17,6 +18,7 @@ type NiriKeyBinding struct {
Args []string
Description string
HideOnOverlay bool
CooldownMs int
Source string
}
@@ -275,6 +277,7 @@ func (p *NiriParser) parseKeybindNode(node *document.Node, _ string) *NiriKeyBin
var description string
var hideOnOverlay bool
var cooldownMs int
if node.Properties != nil {
if val, ok := node.Properties.Get("hotkey-overlay-title"); ok {
switch val.ValueString() {
@@ -284,6 +287,9 @@ func (p *NiriParser) parseKeybindNode(node *document.Node, _ string) *NiriKeyBin
description = val.ValueString()
}
}
if val, ok := node.Properties.Get("cooldown-ms"); ok {
cooldownMs, _ = strconv.Atoi(val.String())
}
}
return &NiriKeyBinding{
@@ -293,6 +299,7 @@ func (p *NiriParser) parseKeybindNode(node *document.Node, _ string) *NiriKeyBin
Args: args,
Description: description,
HideOnOverlay: hideOnOverlay,
CooldownMs: cooldownMs,
Source: p.currentSource,
}
}

View File

@@ -6,6 +6,13 @@ import (
"testing"
)
const testHeader = `// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
`
func TestNiriProviderName(t *testing.T) {
provider := NewNiriProvider("")
if provider.Name() != "niri" {
@@ -197,7 +204,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
{
name: "empty binds",
binds: map[string]*overrideBind{},
expected: "binds {}\n",
expected: testHeader + "binds {}\n",
},
{
name: "simple spawn bind",
@@ -208,7 +215,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Open Terminal",
},
},
expected: `binds {
expected: testHeader + `binds {
Mod+T hotkey-overlay-title="Open Terminal" { spawn "kitty"; }
}
`,
@@ -222,7 +229,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Application Launcher",
},
},
expected: `binds {
expected: testHeader + `binds {
Mod+Space hotkey-overlay-title="Application Launcher" { spawn "dms" "ipc" "call" "spotlight" "toggle"; }
}
`,
@@ -236,7 +243,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Options: map[string]any{"allow-when-locked": true},
},
},
expected: `binds {
expected: testHeader + `binds {
XF86AudioMute allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "mute"; }
}
`,
@@ -250,7 +257,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Close Window",
},
},
expected: `binds {
expected: testHeader + `binds {
Mod+Q hotkey-overlay-title="Close Window" { close-window; }
}
`,
@@ -263,7 +270,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Action: "next-window",
},
},
expected: `binds {
expected: testHeader + `binds {
}
recent-windows {
@@ -415,7 +422,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Focus Workspace 1",
},
},
expected: `binds {
expected: testHeader + `binds {
Mod+1 hotkey-overlay-title="Focus Workspace 1" { focus-workspace 1; }
}
`,
@@ -429,7 +436,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Focus Workspace 10",
},
},
expected: `binds {
expected: testHeader + `binds {
Mod+0 hotkey-overlay-title="Focus Workspace 10" { focus-workspace 10; }
}
`,
@@ -443,7 +450,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Adjust Column Width -10%",
},
},
expected: `binds {
expected: testHeader + `binds {
Super+Minus hotkey-overlay-title="Adjust Column Width -10%" { set-column-width "-10%"; }
}
`,
@@ -457,7 +464,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Adjust Column Width +10%",
},
},
expected: `binds {
expected: testHeader + `binds {
Super+Equal hotkey-overlay-title="Adjust Column Width +10%" { set-column-width "+10%"; }
}
`,
@@ -486,7 +493,7 @@ func TestNiriGenerateActionWithUnquotedPercentArg(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := `binds {
expected := testHeader + `binds {
Super+Equal hotkey-overlay-title="Adjust Window Height +10%" { set-window-height "+10%"; }
}
`
@@ -507,7 +514,7 @@ func TestNiriGenerateSpawnWithNumericArgs(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := `binds {
expected := testHeader + `binds {
XF86AudioLowerVolume allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "decrement" "3"; }
}
`
@@ -528,7 +535,7 @@ func TestNiriGenerateSpawnNumericArgFromCLI(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := `binds {
expected := testHeader + `binds {
XF86AudioLowerVolume allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "decrement" "3"; }
}
`

View File

@@ -7,6 +7,7 @@ type Keybind struct {
Subcategory string `json:"subcat,omitempty"`
Source string `json:"source,omitempty"`
HideOnOverlay bool `json:"hideOnOverlay,omitempty"`
CooldownMs int `json:"cooldownMs,omitempty"`
Conflict *Keybind `json:"conflict,omitempty"`
}

View File

@@ -34,6 +34,7 @@ type Options struct {
StockColors string
SyncModeWithPortal bool
TerminalsAlwaysDark bool
SkipTemplates string
}
type ColorsOutput struct {
@@ -47,6 +48,18 @@ func (o *Options) ColorsOutput() string {
return filepath.Join(o.StateDir, "dms-colors.json")
}
func (o *Options) ShouldSkipTemplate(name string) bool {
if o.SkipTemplates == "" {
return false
}
for _, skip := range strings.Split(o.SkipTemplates, ",") {
if strings.TrimSpace(skip) == name {
return true
}
}
return false
}
func Run(opts Options) error {
if opts.StateDir == "" {
return fmt.Errorf("state-dir is required")
@@ -218,34 +231,74 @@ output_path = '%s'
`, opts.ShellDir, opts.ColorsOutput())
switch opts.Mode {
case "light":
appendConfig(opts, cfgFile, "skip", "gtk3-light.toml")
default:
appendConfig(opts, cfgFile, "skip", "gtk3-dark.toml")
if !opts.ShouldSkipTemplate("gtk") {
switch opts.Mode {
case "light":
appendConfig(opts, cfgFile, "skip", "gtk3-light.toml")
default:
appendConfig(opts, cfgFile, "skip", "gtk3-dark.toml")
}
}
appendConfig(opts, cfgFile, "niri", "niri.toml")
appendConfig(opts, cfgFile, "qt5ct", "qt5ct.toml")
appendConfig(opts, cfgFile, "qt6ct", "qt6ct.toml")
appendConfig(opts, cfgFile, "firefox", "firefox.toml")
appendConfig(opts, cfgFile, "pywalfox", "pywalfox.toml")
appendConfig(opts, cfgFile, "vesktop", "vesktop.toml")
if !opts.ShouldSkipTemplate("niri") {
appendConfig(opts, cfgFile, "niri", "niri.toml")
}
if !opts.ShouldSkipTemplate("qt5ct") {
appendConfig(opts, cfgFile, "qt5ct", "qt5ct.toml")
}
if !opts.ShouldSkipTemplate("qt6ct") {
appendConfig(opts, cfgFile, "qt6ct", "qt6ct.toml")
}
if !opts.ShouldSkipTemplate("firefox") {
appendConfig(opts, cfgFile, "firefox", "firefox.toml")
}
if !opts.ShouldSkipTemplate("pywalfox") {
appendConfig(opts, cfgFile, "pywalfox", "pywalfox.toml")
}
if !opts.ShouldSkipTemplate("zenbrowser") {
appendConfig(opts, cfgFile, "zen", "zenbrowser.toml")
}
if !opts.ShouldSkipTemplate("vesktop") {
appendConfig(opts, cfgFile, "vesktop", "vesktop.toml")
}
if !opts.ShouldSkipTemplate("equibop") {
appendConfig(opts, cfgFile, "equibop", "equibop.toml")
}
if !opts.ShouldSkipTemplate("ghostty") {
appendTerminalConfig(opts, cfgFile, tmpDir, "ghostty", "ghostty.toml")
}
if !opts.ShouldSkipTemplate("kitty") {
appendTerminalConfig(opts, cfgFile, tmpDir, "kitty", "kitty.toml")
}
if !opts.ShouldSkipTemplate("foot") {
appendTerminalConfig(opts, cfgFile, tmpDir, "foot", "foot.toml")
}
if !opts.ShouldSkipTemplate("alacritty") {
appendTerminalConfig(opts, cfgFile, tmpDir, "alacritty", "alacritty.toml")
}
if !opts.ShouldSkipTemplate("wezterm") {
appendTerminalConfig(opts, cfgFile, tmpDir, "wezterm", "wezterm.toml")
}
if !opts.ShouldSkipTemplate("nvim") {
appendTerminalConfig(opts, cfgFile, tmpDir, "nvim", "neovim.toml")
}
appendTerminalConfig(opts, cfgFile, tmpDir, "ghostty", "ghostty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "kitty", "kitty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "foot", "foot.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "alacritty", "alacritty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "wezterm", "wezterm.toml")
if !opts.ShouldSkipTemplate("dgop") {
appendConfig(opts, cfgFile, "dgop", "dgop.toml")
}
appendConfig(opts, cfgFile, "dgop", "dgop.toml")
if !opts.ShouldSkipTemplate("kcolorscheme") {
appendConfig(opts, cfgFile, "skip", "kcolorscheme.toml")
}
homeDir, _ := os.UserHomeDir()
appendVSCodeConfig(cfgFile, "vscode", filepath.Join(homeDir, ".vscode/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codium", filepath.Join(homeDir, ".vscode-oss/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codeoss", filepath.Join(homeDir, ".config/Code - OSS/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
if !opts.ShouldSkipTemplate("vscode") {
homeDir, _ := os.UserHomeDir()
appendVSCodeConfig(cfgFile, "vscode", filepath.Join(homeDir, ".vscode/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codium", filepath.Join(homeDir, ".vscode-oss/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codeoss", filepath.Join(homeDir, ".config/Code - OSS/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
}
if opts.RunUserTemplates {
if data, err := os.ReadFile(userConfigPath); err == nil {

View File

@@ -0,0 +1,229 @@
// Code generated by mockery v2.53.5. DO NOT EDIT.
package mocks_wlclient
import (
client "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
mock "github.com/stretchr/testify/mock"
)
// MockWaylandDisplay is an autogenerated mock type for the WaylandDisplay type
type MockWaylandDisplay struct {
mock.Mock
}
type MockWaylandDisplay_Expecter struct {
mock *mock.Mock
}
func (_m *MockWaylandDisplay) EXPECT() *MockWaylandDisplay_Expecter {
return &MockWaylandDisplay_Expecter{mock: &_m.Mock}
}
// Context provides a mock function with no fields
func (_m *MockWaylandDisplay) Context() *client.Context {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Context")
}
var r0 *client.Context
if rf, ok := ret.Get(0).(func() *client.Context); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Context)
}
}
return r0
}
// MockWaylandDisplay_Context_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Context'
type MockWaylandDisplay_Context_Call struct {
*mock.Call
}
// Context is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Context() *MockWaylandDisplay_Context_Call {
return &MockWaylandDisplay_Context_Call{Call: _e.mock.On("Context")}
}
func (_c *MockWaylandDisplay_Context_Call) Run(run func()) *MockWaylandDisplay_Context_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Context_Call) Return(_a0 *client.Context) *MockWaylandDisplay_Context_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Context_Call) RunAndReturn(run func() *client.Context) *MockWaylandDisplay_Context_Call {
_c.Call.Return(run)
return _c
}
// Destroy provides a mock function with no fields
func (_m *MockWaylandDisplay) Destroy() error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Destroy")
}
var r0 error
if rf, ok := ret.Get(0).(func() error); ok {
r0 = rf()
} else {
r0 = ret.Error(0)
}
return r0
}
// MockWaylandDisplay_Destroy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Destroy'
type MockWaylandDisplay_Destroy_Call struct {
*mock.Call
}
// Destroy is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Destroy() *MockWaylandDisplay_Destroy_Call {
return &MockWaylandDisplay_Destroy_Call{Call: _e.mock.On("Destroy")}
}
func (_c *MockWaylandDisplay_Destroy_Call) Run(run func()) *MockWaylandDisplay_Destroy_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Destroy_Call) Return(_a0 error) *MockWaylandDisplay_Destroy_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Destroy_Call) RunAndReturn(run func() error) *MockWaylandDisplay_Destroy_Call {
_c.Call.Return(run)
return _c
}
// GetRegistry provides a mock function with no fields
func (_m *MockWaylandDisplay) GetRegistry() (*client.Registry, error) {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for GetRegistry")
}
var r0 *client.Registry
var r1 error
if rf, ok := ret.Get(0).(func() (*client.Registry, error)); ok {
return rf()
}
if rf, ok := ret.Get(0).(func() *client.Registry); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Registry)
}
}
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// MockWaylandDisplay_GetRegistry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRegistry'
type MockWaylandDisplay_GetRegistry_Call struct {
*mock.Call
}
// GetRegistry is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) GetRegistry() *MockWaylandDisplay_GetRegistry_Call {
return &MockWaylandDisplay_GetRegistry_Call{Call: _e.mock.On("GetRegistry")}
}
func (_c *MockWaylandDisplay_GetRegistry_Call) Run(run func()) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_GetRegistry_Call) Return(_a0 *client.Registry, _a1 error) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Return(_a0, _a1)
return _c
}
func (_c *MockWaylandDisplay_GetRegistry_Call) RunAndReturn(run func() (*client.Registry, error)) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Return(run)
return _c
}
// Roundtrip provides a mock function with no fields
func (_m *MockWaylandDisplay) Roundtrip() error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Roundtrip")
}
var r0 error
if rf, ok := ret.Get(0).(func() error); ok {
r0 = rf()
} else {
r0 = ret.Error(0)
}
return r0
}
// MockWaylandDisplay_Roundtrip_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Roundtrip'
type MockWaylandDisplay_Roundtrip_Call struct {
*mock.Call
}
// Roundtrip is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Roundtrip() *MockWaylandDisplay_Roundtrip_Call {
return &MockWaylandDisplay_Roundtrip_Call{Call: _e.mock.On("Roundtrip")}
}
func (_c *MockWaylandDisplay_Roundtrip_Call) Run(run func()) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Roundtrip_Call) Return(_a0 error) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Roundtrip_Call) RunAndReturn(run func() error) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Return(run)
return _c
}
// NewMockWaylandDisplay creates a new instance of MockWaylandDisplay. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockWaylandDisplay(t interface {
mock.TestingT
Cleanup(func())
}) *MockWaylandDisplay {
mock := &MockWaylandDisplay{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -0,0 +1,226 @@
// Code generated by mockery v2.53.5. DO NOT EDIT.
package mocks_wlcontext
import (
client "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
mock "github.com/stretchr/testify/mock"
)
// MockWaylandContext is an autogenerated mock type for the WaylandContext type
type MockWaylandContext struct {
mock.Mock
}
type MockWaylandContext_Expecter struct {
mock *mock.Mock
}
func (_m *MockWaylandContext) EXPECT() *MockWaylandContext_Expecter {
return &MockWaylandContext_Expecter{mock: &_m.Mock}
}
// Close provides a mock function with no fields
func (_m *MockWaylandContext) Close() {
_m.Called()
}
// MockWaylandContext_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close'
type MockWaylandContext_Close_Call struct {
*mock.Call
}
// Close is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Close() *MockWaylandContext_Close_Call {
return &MockWaylandContext_Close_Call{Call: _e.mock.On("Close")}
}
func (_c *MockWaylandContext_Close_Call) Run(run func()) *MockWaylandContext_Close_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Close_Call) Return() *MockWaylandContext_Close_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Close_Call) RunAndReturn(run func()) *MockWaylandContext_Close_Call {
_c.Run(run)
return _c
}
// Display provides a mock function with no fields
func (_m *MockWaylandContext) Display() *client.Display {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Display")
}
var r0 *client.Display
if rf, ok := ret.Get(0).(func() *client.Display); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Display)
}
}
return r0
}
// MockWaylandContext_Display_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Display'
type MockWaylandContext_Display_Call struct {
*mock.Call
}
// Display is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Display() *MockWaylandContext_Display_Call {
return &MockWaylandContext_Display_Call{Call: _e.mock.On("Display")}
}
func (_c *MockWaylandContext_Display_Call) Run(run func()) *MockWaylandContext_Display_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Display_Call) Return(_a0 *client.Display) *MockWaylandContext_Display_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandContext_Display_Call) RunAndReturn(run func() *client.Display) *MockWaylandContext_Display_Call {
_c.Call.Return(run)
return _c
}
// FatalError provides a mock function with no fields
func (_m *MockWaylandContext) FatalError() <-chan error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for FatalError")
}
var r0 <-chan error
if rf, ok := ret.Get(0).(func() <-chan error); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(<-chan error)
}
}
return r0
}
// MockWaylandContext_FatalError_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FatalError'
type MockWaylandContext_FatalError_Call struct {
*mock.Call
}
// FatalError is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) FatalError() *MockWaylandContext_FatalError_Call {
return &MockWaylandContext_FatalError_Call{Call: _e.mock.On("FatalError")}
}
func (_c *MockWaylandContext_FatalError_Call) Run(run func()) *MockWaylandContext_FatalError_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_FatalError_Call) Return(_a0 <-chan error) *MockWaylandContext_FatalError_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandContext_FatalError_Call) RunAndReturn(run func() <-chan error) *MockWaylandContext_FatalError_Call {
_c.Call.Return(run)
return _c
}
// Post provides a mock function with given fields: fn
func (_m *MockWaylandContext) Post(fn func()) {
_m.Called(fn)
}
// MockWaylandContext_Post_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Post'
type MockWaylandContext_Post_Call struct {
*mock.Call
}
// Post is a helper method to define mock.On call
// - fn func()
func (_e *MockWaylandContext_Expecter) Post(fn interface{}) *MockWaylandContext_Post_Call {
return &MockWaylandContext_Post_Call{Call: _e.mock.On("Post", fn)}
}
func (_c *MockWaylandContext_Post_Call) Run(run func(fn func())) *MockWaylandContext_Post_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(func()))
})
return _c
}
func (_c *MockWaylandContext_Post_Call) Return() *MockWaylandContext_Post_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Post_Call) RunAndReturn(run func(func())) *MockWaylandContext_Post_Call {
_c.Run(run)
return _c
}
// Start provides a mock function with no fields
func (_m *MockWaylandContext) Start() {
_m.Called()
}
// MockWaylandContext_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start'
type MockWaylandContext_Start_Call struct {
*mock.Call
}
// Start is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Start() *MockWaylandContext_Start_Call {
return &MockWaylandContext_Start_Call{Call: _e.mock.On("Start")}
}
func (_c *MockWaylandContext_Start_Call) Run(run func()) *MockWaylandContext_Start_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Start_Call) Return() *MockWaylandContext_Start_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Start_Call) RunAndReturn(run func()) *MockWaylandContext_Start_Call {
_c.Run(run)
return _c
}
// NewMockWaylandContext creates a new instance of MockWaylandContext. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockWaylandContext(t interface {
mock.TestingT
Cleanup(func())
}) *MockWaylandContext {
mock := &MockWaylandContext{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -9,7 +9,7 @@ import (
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/spf13/afero"
)
@@ -33,7 +33,12 @@ func NewManagerWithFs(fs afero.Fs) (*Manager, error) {
}
func getPluginsDir() string {
return filepath.Join(utils.XDGConfigHome(), "DankMaterialShell", "plugins")
configDir, err := os.UserConfigDir()
if err != nil {
log.Error("failed to get user config dir", "err", err)
return ""
}
return filepath.Join(configDir, "DankMaterialShell", "plugins")
}
func (m *Manager) IsInstalled(plugin Plugin) (bool, error) {

View File

@@ -0,0 +1,695 @@
// Generated by go-wayland-scanner
// https://github.com/yaslama/go-wayland/cmd/go-wayland-scanner
// XML file : internal/proto/xml/ext-data-control-v1.xml
//
// ext_data_control_v1 Protocol Copyright:
//
// Copyright © 2018 Simon Ser
// Copyright © 2019 Ivan Molodetskikh
// Copyright © 2024 Neal Gompa
//
// Permission to use, copy, modify, distribute, and sell this
// software and its documentation for any purpose is hereby granted
// without fee, provided that the above copyright notice appear in
// all copies and that both that copyright notice and this permission
// notice appear in supporting documentation, and that the name of
// the copyright holders not be used in advertising or publicity
// pertaining to distribution of the software without specific,
// written prior permission. The copyright holders make no
// representations about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
//
// THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
// SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
// SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
// AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
// ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
// THIS SOFTWARE.
package ext_data_control
import (
"github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
"golang.org/x/sys/unix"
)
// ExtDataControlManagerV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlManagerV1InterfaceName = "ext_data_control_manager_v1"
// ExtDataControlManagerV1 : manager to control data devices
//
// This interface is a manager that allows creating per-seat data device
// controls.
type ExtDataControlManagerV1 struct {
client.BaseProxy
}
// NewExtDataControlManagerV1 : manager to control data devices
//
// This interface is a manager that allows creating per-seat data device
// controls.
func NewExtDataControlManagerV1(ctx *client.Context) *ExtDataControlManagerV1 {
extDataControlManagerV1 := &ExtDataControlManagerV1{}
ctx.Register(extDataControlManagerV1)
return extDataControlManagerV1
}
// CreateDataSource : create a new data source
//
// Create a new data source.
func (i *ExtDataControlManagerV1) CreateDataSource() (*ExtDataControlSourceV1, error) {
id := NewExtDataControlSourceV1(i.Context())
const opcode = 0
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], id.ID())
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return id, err
}
// GetDataDevice : get a data device for a seat
//
// Create a data device that can be used to manage a seat's selection.
func (i *ExtDataControlManagerV1) GetDataDevice(seat *client.Seat) (*ExtDataControlDeviceV1, error) {
id := NewExtDataControlDeviceV1(i.Context())
const opcode = 1
const _reqBufLen = 8 + 4 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], id.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], seat.ID())
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return id, err
}
// GetDataDeviceWithProxy : get a data device for a seat using a pre-created proxy
//
// Like GetDataDevice, but uses a pre-created ExtDataControlDeviceV1 proxy.
// This allows setting up event handlers before the request is sent.
func (i *ExtDataControlManagerV1) GetDataDeviceWithProxy(device *ExtDataControlDeviceV1, seat *client.Seat) error {
const opcode = 1
const _reqBufLen = 8 + 4 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], device.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], seat.ID())
l += 4
return i.Context().WriteMsg(_reqBuf[:], nil)
}
// Destroy : destroy the manager
//
// All objects created by the manager will still remain valid, until their
// appropriate destroy request has been called.
func (i *ExtDataControlManagerV1) Destroy() error {
defer i.MarkZombie()
const opcode = 2
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// ExtDataControlDeviceV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlDeviceV1InterfaceName = "ext_data_control_device_v1"
// ExtDataControlDeviceV1 : manage a data device for a seat
//
// This interface allows a client to manage a seat's selection.
//
// When the seat is destroyed, this object becomes inert.
type ExtDataControlDeviceV1 struct {
client.BaseProxy
dataOfferHandler ExtDataControlDeviceV1DataOfferHandlerFunc
selectionHandler ExtDataControlDeviceV1SelectionHandlerFunc
finishedHandler ExtDataControlDeviceV1FinishedHandlerFunc
primarySelectionHandler ExtDataControlDeviceV1PrimarySelectionHandlerFunc
}
// NewExtDataControlDeviceV1 : manage a data device for a seat
//
// This interface allows a client to manage a seat's selection.
//
// When the seat is destroyed, this object becomes inert.
func NewExtDataControlDeviceV1(ctx *client.Context) *ExtDataControlDeviceV1 {
extDataControlDeviceV1 := &ExtDataControlDeviceV1{}
ctx.Register(extDataControlDeviceV1)
return extDataControlDeviceV1
}
// SetSelection : copy data to the selection
//
// This request asks the compositor to set the selection to the data from
// the source on behalf of the client.
//
// The given source may not be used in any further set_selection or
// set_primary_selection requests. Attempting to use a previously used
// source triggers the used_source protocol error.
//
// To unset the selection, set the source to NULL.
func (i *ExtDataControlDeviceV1) SetSelection(source *ExtDataControlSourceV1) error {
const opcode = 0
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
if source == nil {
client.PutUint32(_reqBuf[l:l+4], 0)
l += 4
} else {
client.PutUint32(_reqBuf[l:l+4], source.ID())
l += 4
}
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// Destroy : destroy this data device
//
// Destroys the data device object.
func (i *ExtDataControlDeviceV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// SetPrimarySelection : copy data to the primary selection
//
// This request asks the compositor to set the primary selection to the
// data from the source on behalf of the client.
//
// The given source may not be used in any further set_selection or
// set_primary_selection requests. Attempting to use a previously used
// source triggers the used_source protocol error.
//
// To unset the primary selection, set the source to NULL.
//
// The compositor will ignore this request if it does not support primary
// selection.
func (i *ExtDataControlDeviceV1) SetPrimarySelection(source *ExtDataControlSourceV1) error {
const opcode = 2
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
if source == nil {
client.PutUint32(_reqBuf[l:l+4], 0)
l += 4
} else {
client.PutUint32(_reqBuf[l:l+4], source.ID())
l += 4
}
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
type ExtDataControlDeviceV1Error uint32
// ExtDataControlDeviceV1Error :
const (
// ExtDataControlDeviceV1ErrorUsedSource : source given to set_selection or set_primary_selection was already used before
ExtDataControlDeviceV1ErrorUsedSource ExtDataControlDeviceV1Error = 1
)
func (e ExtDataControlDeviceV1Error) Name() string {
switch e {
case ExtDataControlDeviceV1ErrorUsedSource:
return "used_source"
default:
return ""
}
}
func (e ExtDataControlDeviceV1Error) Value() string {
switch e {
case ExtDataControlDeviceV1ErrorUsedSource:
return "1"
default:
return ""
}
}
func (e ExtDataControlDeviceV1Error) String() string {
return e.Name() + "=" + e.Value()
}
// ExtDataControlDeviceV1DataOfferEvent : introduce a new ext_data_control_offer
//
// The data_offer event introduces a new ext_data_control_offer object,
// which will subsequently be used in either the
// ext_data_control_device.selection event (for the regular clipboard
// selections) or the ext_data_control_device.primary_selection event (for
// the primary clipboard selections). Immediately following the
// ext_data_control_device.data_offer event, the new data_offer object
// will send out ext_data_control_offer.offer events to describe the MIME
// types it offers.
type ExtDataControlDeviceV1DataOfferEvent struct {
Id *ExtDataControlOfferV1
}
type ExtDataControlDeviceV1DataOfferHandlerFunc func(ExtDataControlDeviceV1DataOfferEvent)
// SetDataOfferHandler : sets handler for ExtDataControlDeviceV1DataOfferEvent
func (i *ExtDataControlDeviceV1) SetDataOfferHandler(f ExtDataControlDeviceV1DataOfferHandlerFunc) {
i.dataOfferHandler = f
}
// ExtDataControlDeviceV1SelectionEvent : advertise new selection
//
// The selection event is sent out to notify the client of a new
// ext_data_control_offer for the selection for this device. The
// ext_data_control_device.data_offer and the ext_data_control_offer.offer
// events are sent out immediately before this event to introduce the data
// offer object. The selection event is sent to a client when a new
// selection is set. The ext_data_control_offer is valid until a new
// ext_data_control_offer or NULL is received. The client must destroy the
// previous selection ext_data_control_offer, if any, upon receiving this
// event. Regardless, the previous selection will be ignored once a new
// selection ext_data_control_offer is received.
//
// The first selection event is sent upon binding the
// ext_data_control_device object.
type ExtDataControlDeviceV1SelectionEvent struct {
Id *ExtDataControlOfferV1
OfferId uint32 // Raw object ID for external registry lookups
}
type ExtDataControlDeviceV1SelectionHandlerFunc func(ExtDataControlDeviceV1SelectionEvent)
// SetSelectionHandler : sets handler for ExtDataControlDeviceV1SelectionEvent
func (i *ExtDataControlDeviceV1) SetSelectionHandler(f ExtDataControlDeviceV1SelectionHandlerFunc) {
i.selectionHandler = f
}
// ExtDataControlDeviceV1FinishedEvent : this data control is no longer valid
//
// This data control object is no longer valid and should be destroyed by
// the client.
type ExtDataControlDeviceV1FinishedEvent struct{}
type ExtDataControlDeviceV1FinishedHandlerFunc func(ExtDataControlDeviceV1FinishedEvent)
// SetFinishedHandler : sets handler for ExtDataControlDeviceV1FinishedEvent
func (i *ExtDataControlDeviceV1) SetFinishedHandler(f ExtDataControlDeviceV1FinishedHandlerFunc) {
i.finishedHandler = f
}
// ExtDataControlDeviceV1PrimarySelectionEvent : advertise new primary selection
//
// The primary_selection event is sent out to notify the client of a new
// ext_data_control_offer for the primary selection for this device. The
// ext_data_control_device.data_offer and the ext_data_control_offer.offer
// events are sent out immediately before this event to introduce the data
// offer object. The primary_selection event is sent to a client when a
// new primary selection is set. The ext_data_control_offer is valid until
// a new ext_data_control_offer or NULL is received. The client must
// destroy the previous primary selection ext_data_control_offer, if any,
// upon receiving this event. Regardless, the previous primary selection
// will be ignored once a new primary selection ext_data_control_offer is
// received.
//
// If the compositor supports primary selection, the first
// primary_selection event is sent upon binding the
// ext_data_control_device object.
type ExtDataControlDeviceV1PrimarySelectionEvent struct {
Id *ExtDataControlOfferV1
OfferId uint32 // Raw object ID for external registry lookups
}
type ExtDataControlDeviceV1PrimarySelectionHandlerFunc func(ExtDataControlDeviceV1PrimarySelectionEvent)
// SetPrimarySelectionHandler : sets handler for ExtDataControlDeviceV1PrimarySelectionEvent
func (i *ExtDataControlDeviceV1) SetPrimarySelectionHandler(f ExtDataControlDeviceV1PrimarySelectionHandlerFunc) {
i.primarySelectionHandler = f
}
func (i *ExtDataControlDeviceV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
// data_offer event: server creates a new object (new_id)
if i.dataOfferHandler == nil {
return
}
var e ExtDataControlDeviceV1DataOfferEvent
l := 0
newID := client.Uint32(data[l : l+4])
l += 4
ctx := i.Context()
offer := &ExtDataControlOfferV1{}
offer.SetContext(ctx)
offer.SetID(newID)
ctx.RegisterWithID(offer, newID)
e.Id = offer
i.dataOfferHandler(e)
case 1:
// selection event: nullable object reference
if i.selectionHandler == nil {
return
}
var e ExtDataControlDeviceV1SelectionEvent
l := 0
objID := client.Uint32(data[l : l+4])
l += 4
e.OfferId = objID
if objID != 0 {
if p := i.Context().GetProxy(objID); p != nil {
e.Id = p.(*ExtDataControlOfferV1)
}
}
i.selectionHandler(e)
case 2:
if i.finishedHandler == nil {
return
}
var e ExtDataControlDeviceV1FinishedEvent
i.finishedHandler(e)
case 3:
// primary_selection event: nullable object reference
if i.primarySelectionHandler == nil {
return
}
var e ExtDataControlDeviceV1PrimarySelectionEvent
l := 0
objID := client.Uint32(data[l : l+4])
l += 4
e.OfferId = objID
if objID != 0 {
if p := i.Context().GetProxy(objID); p != nil {
e.Id = p.(*ExtDataControlOfferV1)
}
}
i.primarySelectionHandler(e)
}
}
// ExtDataControlSourceV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlSourceV1InterfaceName = "ext_data_control_source_v1"
// ExtDataControlSourceV1 : offer to transfer data
//
// The ext_data_control_source object is the source side of a
// ext_data_control_offer. It is created by the source client in a data
// transfer and provides a way to describe the offered data and a way to
// respond to requests to transfer the data.
type ExtDataControlSourceV1 struct {
client.BaseProxy
sendHandler ExtDataControlSourceV1SendHandlerFunc
cancelledHandler ExtDataControlSourceV1CancelledHandlerFunc
}
// NewExtDataControlSourceV1 : offer to transfer data
//
// The ext_data_control_source object is the source side of a
// ext_data_control_offer. It is created by the source client in a data
// transfer and provides a way to describe the offered data and a way to
// respond to requests to transfer the data.
func NewExtDataControlSourceV1(ctx *client.Context) *ExtDataControlSourceV1 {
extDataControlSourceV1 := &ExtDataControlSourceV1{}
ctx.Register(extDataControlSourceV1)
return extDataControlSourceV1
}
// Offer : add an offered MIME type
//
// This request adds a MIME type to the set of MIME types advertised to
// targets. Can be called several times to offer multiple types.
//
// Calling this after ext_data_control_device.set_selection is a protocol
// error.
//
// mimeType: MIME type offered by the data source
func (i *ExtDataControlSourceV1) Offer(mimeType string) error {
const opcode = 0
mimeTypeLen := client.PaddedLen(len(mimeType) + 1)
_reqBufLen := 8 + (4 + mimeTypeLen)
_reqBuf := make([]byte, _reqBufLen)
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutString(_reqBuf[l:l+(4+mimeTypeLen)], mimeType)
l += (4 + mimeTypeLen)
err := i.Context().WriteMsg(_reqBuf, nil)
return err
}
// Destroy : destroy this source
//
// Destroys the data source object.
func (i *ExtDataControlSourceV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
type ExtDataControlSourceV1Error uint32
// ExtDataControlSourceV1Error :
const (
// ExtDataControlSourceV1ErrorInvalidOffer : offer sent after ext_data_control_device.set_selection
ExtDataControlSourceV1ErrorInvalidOffer ExtDataControlSourceV1Error = 1
)
func (e ExtDataControlSourceV1Error) Name() string {
switch e {
case ExtDataControlSourceV1ErrorInvalidOffer:
return "invalid_offer"
default:
return ""
}
}
func (e ExtDataControlSourceV1Error) Value() string {
switch e {
case ExtDataControlSourceV1ErrorInvalidOffer:
return "1"
default:
return ""
}
}
func (e ExtDataControlSourceV1Error) String() string {
return e.Name() + "=" + e.Value()
}
// ExtDataControlSourceV1SendEvent : send the data
//
// Request for data from the client. Send the data as the specified MIME
// type over the passed file descriptor, then close it.
type ExtDataControlSourceV1SendEvent struct {
MimeType string
Fd int
}
type ExtDataControlSourceV1SendHandlerFunc func(ExtDataControlSourceV1SendEvent)
// SetSendHandler : sets handler for ExtDataControlSourceV1SendEvent
func (i *ExtDataControlSourceV1) SetSendHandler(f ExtDataControlSourceV1SendHandlerFunc) {
i.sendHandler = f
}
// ExtDataControlSourceV1CancelledEvent : selection was cancelled
//
// This data source is no longer valid. The data source has been replaced
// by another data source.
//
// The client should clean up and destroy this data source.
type ExtDataControlSourceV1CancelledEvent struct{}
type ExtDataControlSourceV1CancelledHandlerFunc func(ExtDataControlSourceV1CancelledEvent)
// SetCancelledHandler : sets handler for ExtDataControlSourceV1CancelledEvent
func (i *ExtDataControlSourceV1) SetCancelledHandler(f ExtDataControlSourceV1CancelledHandlerFunc) {
i.cancelledHandler = f
}
func (i *ExtDataControlSourceV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
if i.sendHandler == nil {
if fd != -1 {
unix.Close(fd)
}
return
}
var e ExtDataControlSourceV1SendEvent
l := 0
mimeTypeLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
l += 4
e.MimeType = client.String(data[l : l+mimeTypeLen])
l += mimeTypeLen
e.Fd = fd
i.sendHandler(e)
case 1:
if i.cancelledHandler == nil {
return
}
var e ExtDataControlSourceV1CancelledEvent
i.cancelledHandler(e)
}
}
// ExtDataControlOfferV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlOfferV1InterfaceName = "ext_data_control_offer_v1"
// ExtDataControlOfferV1 : offer to transfer data
//
// A ext_data_control_offer represents a piece of data offered for transfer
// by another client (the source client). The offer describes the different
// MIME types that the data can be converted to and provides the mechanism
// for transferring the data directly from the source client.
type ExtDataControlOfferV1 struct {
client.BaseProxy
offerHandler ExtDataControlOfferV1OfferHandlerFunc
}
// NewExtDataControlOfferV1 : offer to transfer data
//
// A ext_data_control_offer represents a piece of data offered for transfer
// by another client (the source client). The offer describes the different
// MIME types that the data can be converted to and provides the mechanism
// for transferring the data directly from the source client.
func NewExtDataControlOfferV1(ctx *client.Context) *ExtDataControlOfferV1 {
extDataControlOfferV1 := &ExtDataControlOfferV1{}
ctx.Register(extDataControlOfferV1)
return extDataControlOfferV1
}
// Receive : request that the data is transferred
//
// To transfer the offered data, the client issues this request and
// indicates the MIME type it wants to receive. The transfer happens
// through the passed file descriptor (typically created with the pipe
// system call). The source client writes the data in the MIME type
// representation requested and then closes the file descriptor.
//
// The receiving client reads from the read end of the pipe until EOF and
// then closes its end, at which point the transfer is complete.
//
// This request may happen multiple times for different MIME types.
//
// mimeType: MIME type desired by receiver
// fd: file descriptor for data transfer
func (i *ExtDataControlOfferV1) Receive(mimeType string, fd int) error {
const opcode = 0
mimeTypeLen := client.PaddedLen(len(mimeType) + 1)
_reqBufLen := 8 + (4 + mimeTypeLen)
_reqBuf := make([]byte, _reqBufLen)
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutString(_reqBuf[l:l+(4+mimeTypeLen)], mimeType)
l += (4 + mimeTypeLen)
oob := unix.UnixRights(int(fd))
err := i.Context().WriteMsg(_reqBuf, oob)
return err
}
// Destroy : destroy this offer
//
// Destroys the data offer object.
func (i *ExtDataControlOfferV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// ExtDataControlOfferV1OfferEvent : advertise offered MIME type
//
// Sent immediately after creating the ext_data_control_offer object.
// One event per offered MIME type.
type ExtDataControlOfferV1OfferEvent struct {
MimeType string
}
type ExtDataControlOfferV1OfferHandlerFunc func(ExtDataControlOfferV1OfferEvent)
// SetOfferHandler : sets handler for ExtDataControlOfferV1OfferEvent
func (i *ExtDataControlOfferV1) SetOfferHandler(f ExtDataControlOfferV1OfferHandlerFunc) {
i.offerHandler = f
}
func (i *ExtDataControlOfferV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
if i.offerHandler == nil {
return
}
var e ExtDataControlOfferV1OfferEvent
l := 0
mimeTypeLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
l += 4
e.MimeType = client.String(data[l : l+mimeTypeLen])
l += mimeTypeLen
i.offerHandler(e)
}
}

View File

@@ -238,7 +238,7 @@ func (i *ZwlrOutputManagerV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil {
if proxy == nil || proxy.IsZombie() {
head := &ZwlrOutputHeadV1{}
head.SetContext(i.Context())
head.SetID(objectID)
@@ -723,7 +723,7 @@ func (i *ZwlrOutputHeadV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil {
if proxy == nil || proxy.IsZombie() {
mode := &ZwlrOutputModeV1{}
mode.SetContext(i.Context())
mode.SetID(objectID)
@@ -761,8 +761,8 @@ func (i *ZwlrOutputHeadV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil {
// Mode not yet registered, create it
if proxy == nil || proxy.IsZombie() {
// Mode not yet registered or zombie, create fresh
mode := &ZwlrOutputModeV1{}
mode.SetContext(i.Context())
mode.SetID(objectID)

View File

@@ -0,0 +1,276 @@
<?xml version="1.0" encoding="UTF-8"?>
<protocol name="ext_data_control_v1">
<copyright>
Copyright © 2018 Simon Ser
Copyright © 2019 Ivan Molodetskikh
Copyright © 2024 Neal Gompa
Permission to use, copy, modify, distribute, and sell this
software and its documentation for any purpose is hereby granted
without fee, provided that the above copyright notice appear in
all copies and that both that copyright notice and this permission
notice appear in supporting documentation, and that the name of
the copyright holders not be used in advertising or publicity
pertaining to distribution of the software without specific,
written prior permission. The copyright holders make no
representations about the suitability of this software for any
purpose. It is provided "as is" without express or implied
warranty.
THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
</copyright>
<description summary="control data devices">
This protocol allows a privileged client to control data devices. In
particular, the client will be able to manage the current selection and take
the role of a clipboard manager.
Warning! The protocol described in this file is currently in the testing
phase. Backward compatible changes may be added together with the
corresponding interface version bump. Backward incompatible changes can
only be done by creating a new major version of the extension.
</description>
<interface name="ext_data_control_manager_v1" version="1">
<description summary="manager to control data devices">
This interface is a manager that allows creating per-seat data device
controls.
</description>
<request name="create_data_source">
<description summary="create a new data source">
Create a new data source.
</description>
<arg name="id" type="new_id" interface="ext_data_control_source_v1"
summary="data source to create"/>
</request>
<request name="get_data_device">
<description summary="get a data device for a seat">
Create a data device that can be used to manage a seat's selection.
</description>
<arg name="id" type="new_id" interface="ext_data_control_device_v1"/>
<arg name="seat" type="object" interface="wl_seat"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy the manager">
All objects created by the manager will still remain valid, until their
appropriate destroy request has been called.
</description>
</request>
</interface>
<interface name="ext_data_control_device_v1" version="1">
<description summary="manage a data device for a seat">
This interface allows a client to manage a seat's selection.
When the seat is destroyed, this object becomes inert.
</description>
<request name="set_selection">
<description summary="copy data to the selection">
This request asks the compositor to set the selection to the data from
the source on behalf of the client.
The given source may not be used in any further set_selection or
set_primary_selection requests. Attempting to use a previously used
source triggers the used_source protocol error.
To unset the selection, set the source to NULL.
</description>
<arg name="source" type="object" interface="ext_data_control_source_v1"
allow-null="true"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this data device">
Destroys the data device object.
</description>
</request>
<event name="data_offer">
<description summary="introduce a new ext_data_control_offer">
The data_offer event introduces a new ext_data_control_offer object,
which will subsequently be used in either the
ext_data_control_device.selection event (for the regular clipboard
selections) or the ext_data_control_device.primary_selection event (for
the primary clipboard selections). Immediately following the
ext_data_control_device.data_offer event, the new data_offer object
will send out ext_data_control_offer.offer events to describe the MIME
types it offers.
</description>
<arg name="id" type="new_id" interface="ext_data_control_offer_v1"/>
</event>
<event name="selection">
<description summary="advertise new selection">
The selection event is sent out to notify the client of a new
ext_data_control_offer for the selection for this device. The
ext_data_control_device.data_offer and the ext_data_control_offer.offer
events are sent out immediately before this event to introduce the data
offer object. The selection event is sent to a client when a new
selection is set. The ext_data_control_offer is valid until a new
ext_data_control_offer or NULL is received. The client must destroy the
previous selection ext_data_control_offer, if any, upon receiving this
event. Regardless, the previous selection will be ignored once a new
selection ext_data_control_offer is received.
The first selection event is sent upon binding the
ext_data_control_device object.
</description>
<arg name="id" type="object" interface="ext_data_control_offer_v1"
allow-null="true"/>
</event>
<event name="finished">
<description summary="this data control is no longer valid">
This data control object is no longer valid and should be destroyed by
the client.
</description>
</event>
<event name="primary_selection">
<description summary="advertise new primary selection">
The primary_selection event is sent out to notify the client of a new
ext_data_control_offer for the primary selection for this device. The
ext_data_control_device.data_offer and the ext_data_control_offer.offer
events are sent out immediately before this event to introduce the data
offer object. The primary_selection event is sent to a client when a
new primary selection is set. The ext_data_control_offer is valid until
a new ext_data_control_offer or NULL is received. The client must
destroy the previous primary selection ext_data_control_offer, if any,
upon receiving this event. Regardless, the previous primary selection
will be ignored once a new primary selection ext_data_control_offer is
received.
If the compositor supports primary selection, the first
primary_selection event is sent upon binding the
ext_data_control_device object.
</description>
<arg name="id" type="object" interface="ext_data_control_offer_v1"
allow-null="true"/>
</event>
<request name="set_primary_selection">
<description summary="copy data to the primary selection">
This request asks the compositor to set the primary selection to the
data from the source on behalf of the client.
The given source may not be used in any further set_selection or
set_primary_selection requests. Attempting to use a previously used
source triggers the used_source protocol error.
To unset the primary selection, set the source to NULL.
The compositor will ignore this request if it does not support primary
selection.
</description>
<arg name="source" type="object" interface="ext_data_control_source_v1"
allow-null="true"/>
</request>
<enum name="error">
<entry name="used_source" value="1"
summary="source given to set_selection or set_primary_selection was already used before"/>
</enum>
</interface>
<interface name="ext_data_control_source_v1" version="1">
<description summary="offer to transfer data">
The ext_data_control_source object is the source side of a
ext_data_control_offer. It is created by the source client in a data
transfer and provides a way to describe the offered data and a way to
respond to requests to transfer the data.
</description>
<enum name="error">
<entry name="invalid_offer" value="1"
summary="offer sent after ext_data_control_device.set_selection"/>
</enum>
<request name="offer">
<description summary="add an offered MIME type">
This request adds a MIME type to the set of MIME types advertised to
targets. Can be called several times to offer multiple types.
Calling this after ext_data_control_device.set_selection is a protocol
error.
</description>
<arg name="mime_type" type="string"
summary="MIME type offered by the data source"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this source">
Destroys the data source object.
</description>
</request>
<event name="send">
<description summary="send the data">
Request for data from the client. Send the data as the specified MIME
type over the passed file descriptor, then close it.
</description>
<arg name="mime_type" type="string" summary="MIME type for the data"/>
<arg name="fd" type="fd" summary="file descriptor for the data"/>
</event>
<event name="cancelled">
<description summary="selection was cancelled">
This data source is no longer valid. The data source has been replaced
by another data source.
The client should clean up and destroy this data source.
</description>
</event>
</interface>
<interface name="ext_data_control_offer_v1" version="1">
<description summary="offer to transfer data">
A ext_data_control_offer represents a piece of data offered for transfer
by another client (the source client). The offer describes the different
MIME types that the data can be converted to and provides the mechanism
for transferring the data directly from the source client.
</description>
<request name="receive">
<description summary="request that the data is transferred">
To transfer the offered data, the client issues this request and
indicates the MIME type it wants to receive. The transfer happens
through the passed file descriptor (typically created with the pipe
system call). The source client writes the data in the MIME type
representation requested and then closes the file descriptor.
The receiving client reads from the read end of the pipe until EOF and
then closes its end, at which point the transfer is complete.
This request may happen multiple times for different MIME types.
</description>
<arg name="mime_type" type="string"
summary="MIME type desired by receiver"/>
<arg name="fd" type="fd" summary="file descriptor for data transfer"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this offer">
Destroys the data offer object.
</description>
</request>
<event name="offer">
<description summary="advertise offered MIME type">
Sent immediately after creating the ext_data_control_offer object.
One event per offered MIME type.
</description>
<arg name="mime_type" type="string" summary="offered MIME type"/>
</event>
</interface>
</protocol>

View File

@@ -73,7 +73,7 @@
</description>
<arg name="workspace" type="new_id" interface="ext_workspace_handle_v1"/>
</event>
<request name="commit">
<description summary="all requests about the workspaces have been sent">
The client must send this request after it has finished sending other
@@ -242,7 +242,7 @@
- a list of states, conveyed to the client with the state event
- and optionally a set of coordinates, conveyed to the client with the
coordinates event
The client may request that the compositor activate or deactivate the workspace.
Each workspace can belong to only a single workspace group.

View File

@@ -12,6 +12,7 @@ import (
"strings"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
@@ -119,7 +120,12 @@ func GetOutputDir() string {
}
func getXDGPicturesDir() string {
userDirsFile := filepath.Join(utils.XDGConfigHome(), "user-dirs.dirs")
userConfigDir, err := os.UserConfigDir()
if err != nil {
log.Error("failed to get user config dir", "err", err)
return ""
}
userDirsFile := filepath.Join(userConfigDir, "user-dirs.dirs")
data, err := os.ReadFile(userDirsFile)
if err != nil {
return ""

View File

@@ -7,7 +7,7 @@ import (
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
)
type ThemeColors struct {
@@ -74,7 +74,12 @@ func loadColorsFile() *ColorScheme {
}
func getColorsFilePath() string {
return filepath.Join(utils.XDGCacheHome(), "DankMaterialShell", "dms-colors.json")
cacheDir, err := os.UserCacheDir()
if err != nil {
log.Error("Failed to get user cache dir", "err", err)
return ""
}
return filepath.Join(cacheDir, "DankMaterialShell", "dms-colors.json")
}
func isLightMode() bool {

View File

@@ -0,0 +1,235 @@
package clipboard
import (
"encoding/json"
"fmt"
"net"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/params"
)
func HandleRequest(conn net.Conn, req models.Request, m *Manager) {
switch req.Method {
case "clipboard.getState":
handleGetState(conn, req, m)
case "clipboard.getHistory":
handleGetHistory(conn, req, m)
case "clipboard.getEntry":
handleGetEntry(conn, req, m)
case "clipboard.deleteEntry":
handleDeleteEntry(conn, req, m)
case "clipboard.clearHistory":
handleClearHistory(conn, req, m)
case "clipboard.copy":
handleCopy(conn, req, m)
case "clipboard.copyEntry":
handleCopyEntry(conn, req, m)
case "clipboard.paste":
handlePaste(conn, req, m)
case "clipboard.subscribe":
handleSubscribe(conn, req, m)
case "clipboard.search":
handleSearch(conn, req, m)
case "clipboard.getConfig":
handleGetConfig(conn, req, m)
case "clipboard.setConfig":
handleSetConfig(conn, req, m)
case "clipboard.store":
handleStore(conn, req, m)
default:
models.RespondError(conn, req.ID, "unknown method: "+req.Method)
}
}
func handleGetState(conn net.Conn, req models.Request, m *Manager) {
models.Respond(conn, req.ID, m.GetState())
}
func handleGetHistory(conn net.Conn, req models.Request, m *Manager) {
history := m.GetHistory()
for i := range history {
history[i].Data = nil
}
models.Respond(conn, req.ID, history)
}
func handleGetEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
entry, err := m.GetEntry(uint64(id))
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, entry)
}
func handleDeleteEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.DeleteEntry(uint64(id)); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "entry deleted"})
}
func handleClearHistory(conn net.Conn, req models.Request, m *Manager) {
m.ClearHistory()
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "history cleared"})
}
func handleCopy(conn net.Conn, req models.Request, m *Manager) {
text, err := params.String(req.Params, "text")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.CopyText(text); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "copied to clipboard"})
}
func handleCopyEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
entry, err := m.GetEntry(uint64(id))
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.SetClipboard(entry.Data, entry.MimeType); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "copied to clipboard"})
}
func handlePaste(conn net.Conn, req models.Request, m *Manager) {
text, err := m.PasteText()
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, map[string]string{"text": text})
}
func handleSubscribe(conn net.Conn, req models.Request, m *Manager) {
clientID := fmt.Sprintf("clipboard-%d", req.ID)
ch := m.Subscribe(clientID)
defer m.Unsubscribe(clientID)
initialState := m.GetState()
if err := json.NewEncoder(conn).Encode(models.Response[State]{
ID: req.ID,
Result: &initialState,
}); err != nil {
return
}
for state := range ch {
if err := json.NewEncoder(conn).Encode(models.Response[State]{
ID: req.ID,
Result: &state,
}); err != nil {
return
}
}
}
func handleSearch(conn net.Conn, req models.Request, m *Manager) {
p := SearchParams{
Query: params.StringOpt(req.Params, "query", ""),
MimeType: params.StringOpt(req.Params, "mimeType", ""),
Limit: params.IntOpt(req.Params, "limit", 50),
Offset: params.IntOpt(req.Params, "offset", 0),
}
if img, ok := req.Params["isImage"].(bool); ok {
p.IsImage = &img
}
if b, ok := req.Params["before"].(float64); ok {
v := int64(b)
p.Before = &v
}
if a, ok := req.Params["after"].(float64); ok {
v := int64(a)
p.After = &v
}
models.Respond(conn, req.ID, m.Search(p))
}
func handleGetConfig(conn net.Conn, req models.Request, m *Manager) {
models.Respond(conn, req.ID, m.GetConfig())
}
func handleSetConfig(conn net.Conn, req models.Request, m *Manager) {
cfg := m.GetConfig()
if _, ok := req.Params["maxHistory"]; ok {
cfg.MaxHistory = params.IntOpt(req.Params, "maxHistory", cfg.MaxHistory)
}
if _, ok := req.Params["maxEntrySize"]; ok {
cfg.MaxEntrySize = int64(params.IntOpt(req.Params, "maxEntrySize", int(cfg.MaxEntrySize)))
}
if _, ok := req.Params["autoClearDays"]; ok {
cfg.AutoClearDays = params.IntOpt(req.Params, "autoClearDays", cfg.AutoClearDays)
}
if v, ok := req.Params["clearAtStartup"].(bool); ok {
cfg.ClearAtStartup = v
}
if v, ok := req.Params["disabled"].(bool); ok {
cfg.Disabled = v
}
if v, ok := req.Params["disableHistory"].(bool); ok {
cfg.DisableHistory = v
}
if err := m.SetConfig(cfg); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "config updated"})
}
func handleStore(conn net.Conn, req models.Request, m *Manager) {
data, err := params.String(req.Params, "data")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
mimeType := params.StringOpt(req.Params, "mimeType", "text/plain;charset=utf-8")
if err := m.StoreData([]byte(data), mimeType); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "stored"})
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,532 @@
package clipboard
import (
"sync"
"sync/atomic"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
mocks_wlcontext "github.com/AvengeMedia/DankMaterialShell/core/internal/mocks/wlcontext"
)
func TestEncodeDecodeEntry_Roundtrip(t *testing.T) {
original := Entry{
ID: 12345,
Data: []byte("hello world"),
MimeType: "text/plain;charset=utf-8",
Preview: "hello world",
Size: 11,
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Equal(t, original.Data, decoded.Data)
assert.Equal(t, original.MimeType, decoded.MimeType)
assert.Equal(t, original.Preview, decoded.Preview)
assert.Equal(t, original.Size, decoded.Size)
assert.Equal(t, original.Timestamp.Unix(), decoded.Timestamp.Unix())
assert.Equal(t, original.IsImage, decoded.IsImage)
}
func TestEncodeDecodeEntry_Image(t *testing.T) {
original := Entry{
ID: 99999,
Data: []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A},
MimeType: "image/png",
Preview: "[[ image 8 B png 100x100 ]]",
Size: 8,
Timestamp: time.Now().Truncate(time.Second),
IsImage: true,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Equal(t, original.Data, decoded.Data)
assert.True(t, decoded.IsImage)
assert.Equal(t, original.Preview, decoded.Preview)
}
func TestEncodeDecodeEntry_EmptyData(t *testing.T) {
original := Entry{
ID: 1,
Data: []byte{},
MimeType: "text/plain",
Preview: "",
Size: 0,
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Empty(t, decoded.Data)
}
func TestEncodeDecodeEntry_LargeData(t *testing.T) {
largeData := make([]byte, 100000)
for i := range largeData {
largeData[i] = byte(i % 256)
}
original := Entry{
ID: 777,
Data: largeData,
MimeType: "application/octet-stream",
Preview: "binary data...",
Size: len(largeData),
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.Data, decoded.Data)
assert.Equal(t, original.Size, decoded.Size)
}
func TestStateEqual_BothNil(t *testing.T) {
assert.False(t, stateEqual(nil, nil))
}
func TestStateEqual_OneNil(t *testing.T) {
s := &State{Enabled: true}
assert.False(t, stateEqual(s, nil))
assert.False(t, stateEqual(nil, s))
}
func TestStateEqual_EnabledDiffers(t *testing.T) {
a := &State{Enabled: true, History: []Entry{}}
b := &State{Enabled: false, History: []Entry{}}
assert.False(t, stateEqual(a, b))
}
func TestStateEqual_HistoryLengthDiffers(t *testing.T) {
a := &State{Enabled: true, History: []Entry{{ID: 1}}}
b := &State{Enabled: true, History: []Entry{}}
assert.False(t, stateEqual(a, b))
}
func TestStateEqual_BothEqual(t *testing.T) {
a := &State{Enabled: true, History: []Entry{{ID: 1}, {ID: 2}}}
b := &State{Enabled: true, History: []Entry{{ID: 3}, {ID: 4}}}
assert.True(t, stateEqual(a, b))
}
func TestManager_ConcurrentSubscriberAccess(t *testing.T) {
m := &Manager{
subscribers: make(map[string]chan State),
dirty: make(chan struct{}, 1),
}
var wg sync.WaitGroup
const goroutines = 20
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
subID := string(rune('a' + id))
ch := m.Subscribe(subID)
assert.NotNil(t, ch)
time.Sleep(time.Millisecond)
m.Unsubscribe(subID)
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentGetState(t *testing.T) {
m := &Manager{
state: &State{
Enabled: true,
History: []Entry{{ID: 1}, {ID: 2}},
},
}
var wg sync.WaitGroup
const goroutines = 50
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
s := m.GetState()
_ = s.Enabled
_ = len(s.History)
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.stateMutex.Lock()
m.state = &State{
Enabled: j%2 == 0,
History: []Entry{{ID: uint64(j)}},
}
m.stateMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentConfigAccess(t *testing.T) {
m := &Manager{
config: DefaultConfig(),
}
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
cfg := m.getConfig()
_ = cfg.MaxHistory
_ = cfg.MaxEntrySize
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.configMutex.Lock()
m.config.MaxHistory = 50 + j
m.config.MaxEntrySize = int64(1024 * j)
m.configMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_NotifySubscribersNonBlocking(t *testing.T) {
m := &Manager{
dirty: make(chan struct{}, 1),
}
for i := 0; i < 10; i++ {
m.notifySubscribers()
}
assert.Len(t, m.dirty, 1)
}
func TestManager_ConcurrentOfferAccess(t *testing.T) {
m := &Manager{
offerMimeTypes: make(map[any][]string),
offerRegistry: make(map[uint32]any),
}
var wg sync.WaitGroup
const goroutines = 20
const iterations = 50
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
key := uint32(id)
for j := 0; j < iterations; j++ {
m.offerMutex.Lock()
m.offerRegistry[key] = struct{}{}
m.offerMimeTypes[key] = []string{"text/plain"}
m.offerMutex.Unlock()
m.offerMutex.RLock()
_ = m.offerRegistry[key]
_ = m.offerMimeTypes[key]
m.offerMutex.RUnlock()
m.offerMutex.Lock()
delete(m.offerRegistry, key)
delete(m.offerMimeTypes, key)
m.offerMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentPersistAccess(t *testing.T) {
m := &Manager{
persistData: make(map[string][]byte),
persistMimeTypes: []string{},
}
var wg sync.WaitGroup
const goroutines = 20
const iterations = 50
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.persistMutex.RLock()
_ = m.persistData
_ = m.persistMimeTypes
m.persistMutex.RUnlock()
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.persistMutex.Lock()
m.persistMimeTypes = []string{"text/plain", "text/html"}
m.persistData = map[string][]byte{
"text/plain": []byte("test"),
}
m.persistMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentOwnerAccess(t *testing.T) {
m := &Manager{}
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.ownerLock.Lock()
_ = m.isOwner
m.ownerLock.Unlock()
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.ownerLock.Lock()
m.isOwner = j%2 == 0
m.ownerLock.Unlock()
}
}()
}
wg.Wait()
}
func TestItob(t *testing.T) {
tests := []struct {
input uint64
expected []byte
}{
{0, []byte{0, 0, 0, 0, 0, 0, 0, 0}},
{1, []byte{0, 0, 0, 0, 0, 0, 0, 1}},
{256, []byte{0, 0, 0, 0, 0, 0, 1, 0}},
{0xFFFFFFFFFFFFFFFF, []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
}
for _, tt := range tests {
result := itob(tt.input)
assert.Equal(t, tt.expected, result)
}
}
func TestSizeStr(t *testing.T) {
tests := []struct {
input int
expected string
}{
{0, "0 B"},
{100, "100 B"},
{1024, "1 KiB"},
{2048, "2 KiB"},
{1048576, "1 MiB"},
{5242880, "5 MiB"},
}
for _, tt := range tests {
result := sizeStr(tt.input)
assert.Equal(t, tt.expected, result)
}
}
func TestSelectMimeType(t *testing.T) {
m := &Manager{}
tests := []struct {
mimes []string
expected string
}{
{[]string{"text/plain;charset=utf-8", "text/html"}, "text/plain;charset=utf-8"},
{[]string{"text/html", "text/plain"}, "text/plain"},
{[]string{"text/html", "image/png"}, "image/png"},
{[]string{"image/png", "image/jpeg"}, "image/png"},
{[]string{"image/png"}, "image/png"},
{[]string{"application/octet-stream"}, "application/octet-stream"},
{[]string{}, ""},
}
for _, tt := range tests {
result := m.selectMimeType(tt.mimes)
assert.Equal(t, tt.expected, result)
}
}
func TestIsImageMimeType(t *testing.T) {
m := &Manager{}
assert.True(t, m.isImageMimeType("image/png"))
assert.True(t, m.isImageMimeType("image/jpeg"))
assert.True(t, m.isImageMimeType("image/gif"))
assert.False(t, m.isImageMimeType("text/plain"))
assert.False(t, m.isImageMimeType("application/json"))
}
func TestTextPreview(t *testing.T) {
m := &Manager{}
short := m.textPreview([]byte("hello world"))
assert.Equal(t, "hello world", short)
withWhitespace := m.textPreview([]byte(" hello world "))
assert.Equal(t, "hello world", withWhitespace)
longText := make([]byte, 200)
for i := range longText {
longText[i] = 'a'
}
preview := m.textPreview(longText)
assert.True(t, len(preview) > 100)
assert.Contains(t, preview, "…")
}
func TestDefaultConfig(t *testing.T) {
cfg := DefaultConfig()
assert.Equal(t, 100, cfg.MaxHistory)
assert.Equal(t, int64(5*1024*1024), cfg.MaxEntrySize)
assert.Equal(t, 0, cfg.AutoClearDays)
assert.False(t, cfg.ClearAtStartup)
assert.False(t, cfg.Disabled)
assert.False(t, cfg.DisableHistory)
}
func TestManager_PostDelegatesToWlContext(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var called atomic.Bool
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
called.Store(true)
fn()
}).Once()
m := &Manager{
wlCtx: mockCtx,
}
executed := false
m.post(func() {
executed = true
})
assert.True(t, called.Load())
assert.True(t, executed)
}
func TestManager_PostExecutesFunctionViaContext(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var capturedFn func()
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
capturedFn = fn
}).Times(3)
m := &Manager{
wlCtx: mockCtx,
}
counter := 0
m.post(func() { counter++ })
m.post(func() { counter += 10 })
m.post(func() { counter += 100 })
assert.NotNil(t, capturedFn)
capturedFn()
assert.Equal(t, 100, counter)
}
func TestManager_ConcurrentPostWithMock(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var postCount atomic.Int32
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
postCount.Add(1)
}).Times(100)
m := &Manager{
wlCtx: mockCtx,
}
var wg sync.WaitGroup
for i := 0; i < 10; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < 10; j++ {
m.post(func() {})
}
}()
}
wg.Wait()
assert.Equal(t, int32(100), postCount.Load())
}

View File

@@ -0,0 +1,192 @@
package clipboard
import (
"encoding/json"
"os"
"path/filepath"
"sync"
"time"
bolt "go.etcd.io/bbolt"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/wlcontext"
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
)
type Config struct {
MaxHistory int `json:"maxHistory"`
MaxEntrySize int64 `json:"maxEntrySize"`
AutoClearDays int `json:"autoClearDays"`
ClearAtStartup bool `json:"clearAtStartup"`
Disabled bool `json:"disabled"`
DisableHistory bool `json:"disableHistory"`
}
func DefaultConfig() Config {
return Config{
MaxHistory: 100,
MaxEntrySize: 5 * 1024 * 1024,
AutoClearDays: 0,
ClearAtStartup: false,
}
}
func getConfigPath() (string, error) {
configDir, err := os.UserConfigDir()
if err != nil {
return "", err
}
return filepath.Join(configDir, "DankMaterialShell", "clsettings.json"), nil
}
func LoadConfig() Config {
cfg := DefaultConfig()
path, err := getConfigPath()
if err != nil {
return cfg
}
data, err := os.ReadFile(path)
if err != nil {
return cfg
}
if err := json.Unmarshal(data, &cfg); err != nil {
return DefaultConfig()
}
return cfg
}
func SaveConfig(cfg Config) error {
path, err := getConfigPath()
if err != nil {
return err
}
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
return err
}
data, err := json.MarshalIndent(cfg, "", " ")
if err != nil {
return err
}
return os.WriteFile(path, data, 0644)
}
type SearchParams struct {
Query string `json:"query"`
MimeType string `json:"mimeType"`
IsImage *bool `json:"isImage"`
Limit int `json:"limit"`
Offset int `json:"offset"`
Before *int64 `json:"before"`
After *int64 `json:"after"`
}
type SearchResult struct {
Entries []Entry `json:"entries"`
Total int `json:"total"`
HasMore bool `json:"hasMore"`
}
type Entry struct {
ID uint64 `json:"id"`
Data []byte `json:"data,omitempty"`
MimeType string `json:"mimeType"`
Preview string `json:"preview"`
Size int `json:"size"`
Timestamp time.Time `json:"timestamp"`
IsImage bool `json:"isImage"`
Hash uint64 `json:"hash,omitempty"`
}
type State struct {
Enabled bool `json:"enabled"`
History []Entry `json:"history"`
Current *Entry `json:"current,omitempty"`
}
type Manager struct {
config Config
configMutex sync.RWMutex
configPath string
display wlclient.WaylandDisplay
wlCtx wlcontext.WaylandContext
registry *wlclient.Registry
dataControlMgr any
seat *wlclient.Seat
dataDevice any
currentOffer any
currentSource any
seatName uint32
mimeTypes []string
offerMimeTypes map[any][]string
offerMutex sync.RWMutex
offerRegistry map[uint32]any
sourceMimeTypes []string
sourceMutex sync.RWMutex
persistData map[string][]byte
persistMimeTypes []string
persistMutex sync.RWMutex
isOwner bool
ownerLock sync.Mutex
initialized bool
alive bool
stopChan chan struct{}
db *bolt.DB
dbPath string
state *State
stateMutex sync.RWMutex
subscribers map[string]chan State
subMutex sync.RWMutex
dirty chan struct{}
notifierWg sync.WaitGroup
lastState *State
}
func (m *Manager) GetState() State {
m.stateMutex.RLock()
defer m.stateMutex.RUnlock()
if m.state == nil {
return State{}
}
return *m.state
}
func (m *Manager) Subscribe(id string) chan State {
ch := make(chan State, 64)
m.subMutex.Lock()
m.subscribers[id] = ch
m.subMutex.Unlock()
return ch
}
func (m *Manager) Unsubscribe(id string) {
m.subMutex.Lock()
if ch, ok := m.subscribers[id]; ok {
close(ch)
delete(m.subscribers, id)
}
m.subMutex.Unlock()
}
func (m *Manager) notifySubscribers() {
select {
case m.dirty <- struct{}{}:
default:
}
}

View File

@@ -2,6 +2,8 @@ package cups
import (
"errors"
"net"
"net/url"
"strings"
"time"
@@ -156,9 +158,42 @@ func (m *Manager) PurgeJobs(printerName string) error {
return err
}
func resolveIPFromURI(uri string) string {
parsed, err := url.Parse(uri)
if err != nil {
return ""
}
host := parsed.Hostname()
if host == "" {
return ""
}
if ip := net.ParseIP(host); ip != nil {
return ip.String()
}
addrs, err := net.LookupIP(host)
if err != nil || len(addrs) == 0 {
return ""
}
for _, addr := range addrs {
if v4 := addr.To4(); v4 != nil {
return v4.String()
}
}
return addrs[0].String()
}
func (m *Manager) GetDevices() ([]Device, error) {
if m.pkHelper != nil {
return m.pkHelper.DevicesGet(10, 0, nil, nil)
devices, err := m.pkHelper.DevicesGet(10, 0, nil, nil)
if err != nil {
return nil, err
}
for i := range devices {
if devices[i].Class == "network" {
devices[i].IP = resolveIPFromURI(devices[i].URI)
}
}
return devices, nil
}
deviceAttrs, err := m.client.GetDevices()
@@ -176,6 +211,9 @@ func (m *Manager) GetDevices() ([]Device, error) {
ID: getStringAttr(attrs, "device-id"),
Location: getStringAttr(attrs, "device-location"),
}
if device.Class == "network" {
device.IP = resolveIPFromURI(uri)
}
devices = append(devices, device)
}

View File

@@ -42,6 +42,7 @@ type Device struct {
MakeModel string `json:"makeModel"`
ID string `json:"id"`
Location string `json:"location"`
IP string `json:"ip,omitempty"`
}
type PPD struct {

View File

@@ -10,7 +10,7 @@ import (
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/dwl_ipc"
)
func NewManager(display *wlclient.Display) (*Manager, error) {
func NewManager(display wlclient.WaylandDisplay) (*Manager, error) {
m := &Manager{
display: display,
ctx: display.Context(),

View File

@@ -0,0 +1,366 @@
package dwl
import (
"errors"
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
mocks_wlclient "github.com/AvengeMedia/DankMaterialShell/core/internal/mocks/wlclient"
)
func TestStateChanged_BothNil(t *testing.T) {
assert.True(t, stateChanged(nil, nil))
}
func TestStateChanged_OneNil(t *testing.T) {
s := &State{TagCount: 9}
assert.True(t, stateChanged(s, nil))
assert.True(t, stateChanged(nil, s))
}
func TestStateChanged_TagCountDiffers(t *testing.T) {
a := &State{TagCount: 9, Outputs: make(map[string]*OutputState), Layouts: []string{}}
b := &State{TagCount: 10, Outputs: make(map[string]*OutputState), Layouts: []string{}}
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_LayoutLengthDiffers(t *testing.T) {
a := &State{TagCount: 9, Layouts: []string{"tile"}, Outputs: make(map[string]*OutputState)}
b := &State{TagCount: 9, Layouts: []string{"tile", "monocle"}, Outputs: make(map[string]*OutputState)}
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_ActiveOutputDiffers(t *testing.T) {
a := &State{TagCount: 9, ActiveOutput: "eDP-1", Outputs: make(map[string]*OutputState), Layouts: []string{}}
b := &State{TagCount: 9, ActiveOutput: "HDMI-A-1", Outputs: make(map[string]*OutputState), Layouts: []string{}}
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_OutputCountDiffers(t *testing.T) {
a := &State{
TagCount: 9,
Outputs: map[string]*OutputState{"eDP-1": {}},
Layouts: []string{},
}
b := &State{
TagCount: 9,
Outputs: map[string]*OutputState{},
Layouts: []string{},
}
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_OutputFieldsDiffer(t *testing.T) {
a := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Active: 1, Layout: 0, Title: "Firefox"},
},
}
b := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Active: 0, Layout: 0, Title: "Firefox"},
},
}
assert.True(t, stateChanged(a, b))
b.Outputs["eDP-1"].Active = 1
b.Outputs["eDP-1"].Layout = 1
assert.True(t, stateChanged(a, b))
b.Outputs["eDP-1"].Layout = 0
b.Outputs["eDP-1"].Title = "Code"
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_TagsDiffer(t *testing.T) {
a := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Tags: []TagState{{Tag: 1, State: 1, Clients: 2, Focused: 1}}},
},
}
b := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Tags: []TagState{{Tag: 1, State: 2, Clients: 2, Focused: 1}}},
},
}
assert.True(t, stateChanged(a, b))
b.Outputs["eDP-1"].Tags[0].State = 1
b.Outputs["eDP-1"].Tags[0].Clients = 3
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_Equal(t *testing.T) {
a := &State{
TagCount: 9,
ActiveOutput: "eDP-1",
Layouts: []string{"tile", "monocle"},
Outputs: map[string]*OutputState{
"eDP-1": {
Name: "eDP-1",
Active: 1,
Layout: 0,
LayoutSymbol: "[]=",
Title: "Firefox",
AppID: "firefox",
KbLayout: "us",
Keymode: "",
Tags: []TagState{{Tag: 1, State: 1, Clients: 2, Focused: 1}},
},
},
}
b := &State{
TagCount: 9,
ActiveOutput: "eDP-1",
Layouts: []string{"tile", "monocle"},
Outputs: map[string]*OutputState{
"eDP-1": {
Name: "eDP-1",
Active: 1,
Layout: 0,
LayoutSymbol: "[]=",
Title: "Firefox",
AppID: "firefox",
KbLayout: "us",
Keymode: "",
Tags: []TagState{{Tag: 1, State: 1, Clients: 2, Focused: 1}},
},
},
}
assert.False(t, stateChanged(a, b))
}
func TestManager_ConcurrentGetState(t *testing.T) {
m := &Manager{
state: &State{
TagCount: 9,
Layouts: []string{"tile"},
Outputs: map[string]*OutputState{"eDP-1": {Name: "eDP-1"}},
},
}
var wg sync.WaitGroup
const goroutines = 50
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
s := m.GetState()
_ = s.TagCount
_ = s.Outputs
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.stateMutex.Lock()
m.state = &State{
TagCount: uint32(j % 10),
Layouts: []string{"tile", "monocle"},
Outputs: map[string]*OutputState{"eDP-1": {Active: uint32(j % 2)}},
}
m.stateMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentSubscriberAccess(t *testing.T) {
m := &Manager{
stopChan: make(chan struct{}),
dirty: make(chan struct{}, 1),
}
var wg sync.WaitGroup
const goroutines = 20
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
subID := string(rune('a' + id))
ch := m.Subscribe(subID)
assert.NotNil(t, ch)
time.Sleep(time.Millisecond)
m.Unsubscribe(subID)
}(i)
}
wg.Wait()
}
func TestManager_SyncmapOutputsConcurrentAccess(t *testing.T) {
m := &Manager{}
var wg sync.WaitGroup
const goroutines = 30
const iterations = 50
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
key := uint32(id)
for j := 0; j < iterations; j++ {
state := &outputState{
id: key,
name: "test-output",
active: uint32(j % 2),
tags: []TagState{{Tag: uint32(j), State: 1}},
}
m.outputs.Store(key, state)
if loaded, ok := m.outputs.Load(key); ok {
assert.Equal(t, key, loaded.id)
}
m.outputs.Range(func(k uint32, v *outputState) bool {
_ = v.name
_ = v.active
return true
})
}
m.outputs.Delete(key)
}(i)
}
wg.Wait()
}
func TestManager_NotifySubscribersNonBlocking(t *testing.T) {
m := &Manager{
dirty: make(chan struct{}, 1),
}
for i := 0; i < 10; i++ {
m.notifySubscribers()
}
assert.Len(t, m.dirty, 1)
}
func TestManager_PostQueueFull(t *testing.T) {
m := &Manager{
cmdq: make(chan cmd, 2),
stopChan: make(chan struct{}),
}
m.post(func() {})
m.post(func() {})
m.post(func() {})
m.post(func() {})
assert.Len(t, m.cmdq, 2)
}
func TestManager_GetStateNilState(t *testing.T) {
m := &Manager{}
s := m.GetState()
assert.NotNil(t, s.Outputs)
assert.NotNil(t, s.Layouts)
assert.Equal(t, uint32(0), s.TagCount)
}
func TestTagState_Fields(t *testing.T) {
tag := TagState{
Tag: 1,
State: 2,
Clients: 3,
Focused: 1,
}
assert.Equal(t, uint32(1), tag.Tag)
assert.Equal(t, uint32(2), tag.State)
assert.Equal(t, uint32(3), tag.Clients)
assert.Equal(t, uint32(1), tag.Focused)
}
func TestOutputState_Fields(t *testing.T) {
out := OutputState{
Name: "eDP-1",
Active: 1,
Tags: []TagState{{Tag: 1}},
Layout: 0,
LayoutSymbol: "[]=",
Title: "Firefox",
AppID: "firefox",
KbLayout: "us",
Keymode: "",
}
assert.Equal(t, "eDP-1", out.Name)
assert.Equal(t, uint32(1), out.Active)
assert.Len(t, out.Tags, 1)
assert.Equal(t, "[]=", out.LayoutSymbol)
}
func TestStateChanged_NewOutputAppears(t *testing.T) {
a := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Name: "eDP-1"},
},
}
b := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Name: "eDP-1"},
"HDMI-A-1": {Name: "HDMI-A-1"},
},
}
assert.True(t, stateChanged(a, b))
}
func TestStateChanged_TagsLengthDiffers(t *testing.T) {
a := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Tags: []TagState{{Tag: 1}}},
},
}
b := &State{
TagCount: 9,
Layouts: []string{},
Outputs: map[string]*OutputState{
"eDP-1": {Tags: []TagState{{Tag: 1}, {Tag: 2}}},
},
}
assert.True(t, stateChanged(a, b))
}
func TestNewManager_GetRegistryError(t *testing.T) {
mockDisplay := mocks_wlclient.NewMockWaylandDisplay(t)
mockDisplay.EXPECT().Context().Return(nil)
mockDisplay.EXPECT().GetRegistry().Return(nil, errors.New("failed to get registry"))
_, err := NewManager(mockDisplay)
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to get registry")
}

View File

@@ -38,7 +38,7 @@ type cmd struct {
}
type Manager struct {
display *wlclient.Display
display wlclient.WaylandDisplay
ctx *wlclient.Context
registry *wlclient.Registry
manager any

View File

@@ -38,7 +38,7 @@ func CheckCapability() bool {
return found
}
func NewManager(display *wlclient.Display) (*Manager, error) {
func NewManager(display wlclient.WaylandDisplay) (*Manager, error) {
m := &Manager{
display: display,
ctx: display.Context(),

Some files were not shown because too many files have changed in this diff Show More