mirror of
https://github.com/AvengeMedia/DankMaterialShell.git
synced 2026-05-02 02:22:06 -04:00
Compare commits
333 Commits
bcf41ed5ca
...
frame
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1217b25de5 | ||
|
|
e913630f90 | ||
|
|
220bb2708b | ||
|
|
e57ab3e1f3 | ||
|
|
952ab9b753 | ||
|
|
28f9aabcd9 | ||
|
|
3d9bd73336 | ||
|
|
3497d5f523 | ||
|
|
8ef1d95e65 | ||
|
|
e9aeb9ac60 | ||
|
|
fb02f7294d | ||
|
|
f15d49d80a | ||
|
|
c471cff456 | ||
|
|
f83bb10e0c | ||
|
|
74ad58b1e1 | ||
|
|
577863b969 | ||
|
|
03d2a3fd39 | ||
|
|
802b23ed60 | ||
|
|
2b9f3a9eef | ||
|
|
62c60900eb | ||
|
|
b381e1e54c | ||
|
|
e7ee26ce74 | ||
|
|
521a3fa6e8 | ||
|
|
5ee93a67fe | ||
|
|
5d0a03c822 | ||
|
|
293c2a0035 | ||
|
|
9a5fa50541 | ||
|
|
d5ceea8a56 | ||
|
|
faa5e7e02d | ||
|
|
516c478f3d | ||
|
|
906c6a2501 | ||
|
|
86d8fe4fa4 | ||
|
|
9b44bc3259 | ||
|
|
59b6d2237b | ||
|
|
7e559cc0bb | ||
|
|
fd1facfce8 | ||
|
|
8f26193cc3 | ||
|
|
43b2e5315d | ||
|
|
5cad89e9cc | ||
|
|
3804d2f00b | ||
|
|
4d649468d5 | ||
|
|
c5f145be36 | ||
|
|
76dff870a7 | ||
|
|
6c8d3fc007 | ||
|
|
e7ffa23016 | ||
|
|
4266c064a9 | ||
|
|
5f631b36cd | ||
|
|
be8326f497 | ||
|
|
07dbba6c53 | ||
|
|
a53b9afb44 | ||
|
|
a0c7ffd6b9 | ||
|
|
7ca1d2325a | ||
|
|
8d0f256f74 | ||
|
|
1a9449da1b | ||
|
|
1caf8942b7 | ||
|
|
9efbcbcd20 | ||
|
|
3d07b8c9c1 | ||
|
|
dae74a40c0 | ||
|
|
959190dcbc | ||
|
|
1e48976ae5 | ||
|
|
0a8c111e12 | ||
|
|
19c786c0be | ||
|
|
7f8b260560 | ||
|
|
368536f698 | ||
|
|
b227221df6 | ||
|
|
8e047f45f5 | ||
|
|
fbe8cbb23f | ||
|
|
28315a165f | ||
|
|
1b32829dac | ||
|
|
1fce29324f | ||
|
|
1fab90178a | ||
|
|
eb04ab7dca | ||
|
|
e9fa2c78ee | ||
|
|
59dae954cd | ||
|
|
5c4ce86da4 | ||
|
|
0cf2c40377 | ||
|
|
679a59ad76 | ||
|
|
db3209afbe | ||
|
|
f0be36062e | ||
|
|
9578d6daf9 | ||
|
|
cc6766135d | ||
|
|
28c9bb0925 | ||
|
|
7826d827dd | ||
|
|
7f392acc54 | ||
|
|
190fd662ad | ||
|
|
e18587c471 | ||
|
|
ddb079b62d | ||
|
|
e7c8d208e2 | ||
|
|
0e2162cf29 | ||
|
|
4cf9b0adc7 | ||
|
|
1661d32641 | ||
|
|
aa59187403 | ||
|
|
bb08e1233a | ||
|
|
5343e97ab2 | ||
|
|
edc544df7a | ||
|
|
a880edd9fb | ||
|
|
7e1d808d70 | ||
|
|
ce93f22669 | ||
|
|
a58037b968 | ||
|
|
ccf0b60935 | ||
|
|
aad7011b1c | ||
|
|
3bde7ef4d3 | ||
|
|
04555dbfa7 | ||
|
|
3b494aa591 | ||
|
|
365387c3cd | ||
|
|
bb74a0ca4d | ||
|
|
9cf2ef84b7 | ||
|
|
46aaf5ff77 | ||
|
|
c544bda5df | ||
|
|
e86227f05f | ||
|
|
53da60e4ca | ||
|
|
727d9c6c22 | ||
|
|
908e1f600e | ||
|
|
270d800df2 | ||
|
|
d445d182ea | ||
|
|
476256c9e7 | ||
|
|
06ea7373f7 | ||
|
|
e78ba77def | ||
|
|
7113afe9e2 | ||
|
|
1a2b6524e6 | ||
|
|
95c4aa9e4c | ||
|
|
9f2518c9e1 | ||
|
|
76c50a654a | ||
|
|
ded2c38551 | ||
|
|
772094eacd | ||
|
|
bddc2f6295 | ||
|
|
25dce2961b | ||
|
|
653cfbe6e0 | ||
|
|
c539311083 | ||
|
|
60118c5d5b | ||
|
|
c6b9b36566 | ||
|
|
fd5b1b7c00 | ||
|
|
ebc77b62c8 | ||
|
|
2ce888581f | ||
|
|
0e901b6404 | ||
|
|
688b9076e7 | ||
|
|
c6ec7579b6 | ||
|
|
9417edac8d | ||
|
|
6185cc79d7 | ||
|
|
4ecdba94c2 | ||
|
|
a11640d840 | ||
|
|
177a4c4095 | ||
|
|
63df19ab78 | ||
|
|
54e0eb5979 | ||
|
|
185284d422 | ||
|
|
ce240405d9 | ||
|
|
58b700ed0d | ||
|
|
d436fa4920 | ||
|
|
d58486193e | ||
|
|
e9404eb9b6 | ||
|
|
0fef4d515e | ||
|
|
86f9cf4376 | ||
|
|
acf63c57e8 | ||
|
|
baa956c3a1 | ||
|
|
bb2081a936 | ||
|
|
c984b0b9ae | ||
|
|
754bf8fa3c | ||
|
|
7840294517 | ||
|
|
caaee88654 | ||
|
|
e872ddc1e7 | ||
|
|
1eca9b4c2c | ||
|
|
fe5bd42e25 | ||
|
|
32d16d0673 | ||
|
|
27c26d35ab | ||
|
|
e04c919d78 | ||
|
|
246b6c44b0 | ||
|
|
847ddf7d38 | ||
|
|
16e8199f9e | ||
|
|
7d1519f546 | ||
|
|
1bf66ee482 | ||
|
|
39a43f4de5 | ||
|
|
971a511edb | ||
|
|
0f8e0bc2b4 | ||
|
|
537c44e354 | ||
|
|
db53a9a719 | ||
|
|
f4a10de790 | ||
|
|
8c9fe84d02 | ||
|
|
f0fcc77bdb | ||
|
|
cf4c4b7d69 | ||
|
|
7bb8499353 | ||
|
|
ee1a2bc7de | ||
|
|
20d383d4ab | ||
|
|
9cb0d8baf2 | ||
|
|
362ded3bc9 | ||
|
|
654f2ec7ad | ||
|
|
3600e034b8 | ||
|
|
d7c501e175 | ||
|
|
b9e9da579f | ||
|
|
7bea6b4a62 | ||
|
|
ab211266a6 | ||
|
|
4da22a4345 | ||
|
|
fbc1ff62c7 | ||
|
|
1fe72e1a66 | ||
|
|
f82d7610e3 | ||
|
|
bd6ad53875 | ||
|
|
5d09acca4c | ||
|
|
b4e7c4a4cd | ||
|
|
a6269084c0 | ||
|
|
8271d8423d | ||
|
|
c76e29c457 | ||
|
|
4750a7553b | ||
|
|
60786921a9 | ||
|
|
751bbcc127 | ||
|
|
58e8dd5456 | ||
|
|
1586c25847 | ||
|
|
cded5a7948 | ||
|
|
6238e065f2 | ||
|
|
72fbbfdd0d | ||
|
|
2796c1cd4d | ||
|
|
54c9886627 | ||
|
|
05713cb389 | ||
|
|
8bb3ee5f18 | ||
|
|
bc0b4825f1 | ||
|
|
ef7f17abf4 | ||
|
|
876cd21f0b | ||
|
|
5c92d49873 | ||
|
|
da47b573be | ||
|
|
2f04be8778 | ||
|
|
69178ddfd8 | ||
|
|
a310f6fff0 | ||
|
|
7474abe286 | ||
|
|
df2ba3a3c6 | ||
|
|
e536456236 | ||
|
|
8d77122da3 | ||
|
|
fb66effa51 | ||
|
|
5052e71c31 | ||
|
|
bfc78d16ca | ||
|
|
c425e3562b | ||
|
|
1f26092aa9 | ||
|
|
2849bb96f4 | ||
|
|
7b749f2a4c | ||
|
|
8803c94ce0 | ||
|
|
f5235c943b | ||
|
|
59fec889b5 | ||
|
|
f42f04a807 | ||
|
|
51f6f37925 | ||
|
|
9651a4ca98 | ||
|
|
2b7fd36322 | ||
|
|
b8014fd4df | ||
|
|
07460f6e1f | ||
|
|
f7bf3b2afb | ||
|
|
056f298cdf | ||
|
|
e83da53162 | ||
|
|
9f38a47a02 | ||
|
|
236a4d4a6d | ||
|
|
0909471510 | ||
|
|
05eaf59c89 | ||
|
|
7749613801 | ||
|
|
e3dbaedbb4 | ||
|
|
9f17ced6de | ||
|
|
de54ef871d | ||
|
|
b0da45d6d0 | ||
|
|
9b2a46fa92 | ||
|
|
12099d2db6 | ||
|
|
84fa75936a | ||
|
|
d78d8121a1 | ||
|
|
a9a3a52872 | ||
|
|
912e3bdfce | ||
|
|
ee1b25d9e8 | ||
|
|
20ef5e2c18 | ||
|
|
6ee419bc52 | ||
|
|
85b00d3c76 | ||
|
|
bc4ad31d48 | ||
|
|
71aad8ee32 | ||
|
|
8bb8231559 | ||
|
|
3cf9caae89 | ||
|
|
f983c67135 | ||
|
|
f2aef5b93f | ||
|
|
46d4288969 | ||
|
|
65516e872f | ||
|
|
171329246c | ||
|
|
b2bee699e0 | ||
|
|
95c66b4d67 | ||
|
|
babc8feb2b | ||
|
|
2f445c546a | ||
|
|
a0283b3e3e | ||
|
|
61bd156fb0 | ||
|
|
8ad0cf8e5f | ||
|
|
ecd6d70da6 | ||
|
|
359617d927 | ||
|
|
38c286329a | ||
|
|
401b4095cc | ||
|
|
06ab1a8ef0 | ||
|
|
726fb8b015 | ||
|
|
b3b5c7a59f | ||
|
|
d18f934978 | ||
|
|
e67f1f79bc | ||
|
|
e931829411 | ||
|
|
db8ebd606c | ||
|
|
072a358a94 | ||
|
|
6ceb1b150c | ||
|
|
a4e03e1877 | ||
|
|
02b3e4277b | ||
|
|
37daf801e6 | ||
|
|
68d9f7eeb2 | ||
|
|
526e2420ca | ||
|
|
a9cc58fc28 | ||
|
|
77889ce1c6 | ||
|
|
549073119e | ||
|
|
5c5af5795f | ||
|
|
68e10934e4 | ||
|
|
c67bb1444a | ||
|
|
07389a152e | ||
|
|
e562e21555 | ||
|
|
86dfe7dd3f | ||
|
|
ac0a8f3449 | ||
|
|
8e4a63db67 | ||
|
|
c02c63806f | ||
|
|
42e5d7f6e9 | ||
|
|
d8cf1af422 | ||
|
|
9723661c80 | ||
|
|
81cba7ad97 | ||
|
|
c23f58de40 | ||
|
|
2cf67ca7da | ||
|
|
392bd850ea | ||
|
|
3b2ad9d1bd | ||
|
|
27b7474180 | ||
|
|
63948d728e | ||
|
|
d219d3b873 | ||
|
|
93ab290bc1 | ||
|
|
7335c5d79a | ||
|
|
242ead722a | ||
|
|
8a6d9696a8 | ||
|
|
896b7ea242 | ||
|
|
0c7f4c7828 | ||
|
|
3d35af2a87 | ||
|
|
fed3c36f84 | ||
|
|
414d81aa40 | ||
|
|
d548803769 | ||
|
|
1180258394 | ||
|
|
48a566a24b | ||
|
|
3bc5d1df81 | ||
|
|
c7222e2e86 |
73
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
73
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -7,32 +7,32 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
## DankMaterialShell Bug Report
|
## DankMaterialShell Bug Report
|
||||||
Limit your report to one issue per submission unless closely related
|
Limit your report to one issue per submission unless similarly related
|
||||||
- type: checkboxes
|
- type: dropdown
|
||||||
id: compositor
|
id: compositor
|
||||||
attributes:
|
attributes:
|
||||||
label: Compositor
|
label: Compositor
|
||||||
options:
|
options:
|
||||||
- label: Niri
|
- Niri
|
||||||
- label: Hyprland
|
- Hyprland
|
||||||
- label: MangoWC (dwl)
|
- MangoWC (dwl)
|
||||||
- label: Sway
|
- Sway
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: dropdown
|
||||||
id: distribution
|
id: distribution
|
||||||
attributes:
|
attributes:
|
||||||
label: Distribution
|
label: Distribution
|
||||||
options:
|
options:
|
||||||
- label: Arch Linux
|
- Arch Linux
|
||||||
- label: CachyOS
|
- CachyOS
|
||||||
- label: Fedora
|
- Fedora
|
||||||
- label: NixOS
|
- NixOS
|
||||||
- label: Debian
|
- Debian
|
||||||
- label: Ubuntu
|
- Ubuntu
|
||||||
- label: Gentoo
|
- Gentoo
|
||||||
- label: OpenSUSE
|
- OpenSUSE
|
||||||
- label: Other (specify below)
|
- Other (specify below)
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: input
|
||||||
@@ -42,12 +42,45 @@ body:
|
|||||||
placeholder: e.g., PikaOS, Void Linux, etc.
|
placeholder: e.g., PikaOS, Void Linux, etc.
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
|
- type: dropdown
|
||||||
|
id: installation_method
|
||||||
|
attributes:
|
||||||
|
label: Select your Installation Method
|
||||||
|
options:
|
||||||
|
- DankInstaller
|
||||||
|
- Distro Packaging
|
||||||
|
- Source
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: original_installation_method
|
||||||
|
attributes:
|
||||||
|
label: Was this your original Installation method?
|
||||||
|
options:
|
||||||
|
- "Yes"
|
||||||
|
- No (specify below)
|
||||||
|
default: 0
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: input
|
||||||
|
id: original_installation_method_specify
|
||||||
|
attributes:
|
||||||
|
label: If no, specify
|
||||||
|
placeholder: e.g., Distro Packaging, then Source
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: dms_doctor
|
id: dms_doctor
|
||||||
attributes:
|
attributes:
|
||||||
label: dms doctor -v
|
label: dms doctor -vC
|
||||||
description: Output of `dms doctor -v` command
|
description: Output of `dms doctor -vC` command — paste between the details tags below to keep it collapsed in the issue
|
||||||
placeholder: Paste the output of `dms doctor -v` here
|
placeholder: Paste the output of `dms doctor -vC` here
|
||||||
|
value: |
|
||||||
|
<details>
|
||||||
|
<summary>Click to expand</summary>
|
||||||
|
|
||||||
|
|
||||||
|
</details>
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -69,7 +102,7 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: steps_to_reproduce
|
id: steps_to_reproduce
|
||||||
attributes:
|
attributes:
|
||||||
label: Steps to Reproduce & Installation Method
|
label: Steps to Reproduce
|
||||||
description: Please provide detailed steps to reproduce the issue
|
description: Please provide detailed steps to reproduce the issue
|
||||||
placeholder: |
|
placeholder: |
|
||||||
1. ...
|
1. ...
|
||||||
|
|||||||
21
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
21
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -23,18 +23,25 @@ body:
|
|||||||
placeholder: Why is this feature important?
|
placeholder: Why is this feature important?
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: checkboxes
|
- type: dropdown
|
||||||
id: compositor
|
id: compositor
|
||||||
attributes:
|
attributes:
|
||||||
label: Compositor(s)
|
label: Compositor(s)
|
||||||
description: Is this feature specific to one or more compositors?
|
description: Is this feature specific to one or more compositors?
|
||||||
options:
|
options:
|
||||||
- label: All compositors
|
- All compositors
|
||||||
- label: Niri
|
- Niri
|
||||||
- label: Hyprland
|
- Hyprland
|
||||||
- label: MangoWC (dwl)
|
- MangoWC (dwl)
|
||||||
- label: Sway
|
- Sway
|
||||||
- label: Other (specify below)
|
- Other (specify below)
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: compositor_other
|
||||||
|
attributes:
|
||||||
|
label: If Other, please specify
|
||||||
|
placeholder: e.g., Wayfire, Mutter, etc.
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
|
|||||||
79
.github/ISSUE_TEMPLATE/support_request.yml
vendored
79
.github/ISSUE_TEMPLATE/support_request.yml
vendored
@@ -7,32 +7,87 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
## DankMaterialShell Support Request
|
## DankMaterialShell Support Request
|
||||||
- type: checkboxes
|
- type: dropdown
|
||||||
id: compositor
|
id: compositor
|
||||||
attributes:
|
attributes:
|
||||||
label: Compositor
|
label: Compositor
|
||||||
options:
|
options:
|
||||||
- label: Niri
|
- Niri
|
||||||
- label: Hyprland
|
- Hyprland
|
||||||
- label: MangoWC (dwl)
|
- MangoWC (dwl)
|
||||||
- label: Sway
|
- Sway
|
||||||
- label: Other (specify below)
|
- Other (specify below)
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: compositor_other
|
||||||
|
attributes:
|
||||||
|
label: If Other, please specify
|
||||||
|
placeholder: e.g., Wayfire, Mutter, etc.
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: input
|
- type: dropdown
|
||||||
id: distribution
|
id: distribution
|
||||||
attributes:
|
attributes:
|
||||||
label: Distribution
|
label: Distribution
|
||||||
description: Which Linux distribution are you using? (e.g., Arch, Fedora, Debian, etc.)
|
options:
|
||||||
placeholder: Your Linux distribution
|
- Arch Linux
|
||||||
|
- CachyOS
|
||||||
|
- Fedora
|
||||||
|
- NixOS
|
||||||
|
- Debian
|
||||||
|
- Ubuntu
|
||||||
|
- Gentoo
|
||||||
|
- OpenSUSE
|
||||||
|
- Other (specify below)
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: distribution_other
|
||||||
|
attributes:
|
||||||
|
label: If Other, please specify
|
||||||
|
placeholder: e.g., PikaOS, Void Linux, etc.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: dropdown
|
||||||
|
id: installation_method
|
||||||
|
attributes:
|
||||||
|
label: Select your Installation Method
|
||||||
|
options:
|
||||||
|
- DankInstaller
|
||||||
|
- Distro Packaging
|
||||||
|
- Source
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: original_installation_method_different
|
||||||
|
attributes:
|
||||||
|
label: Was your original Installation method different?
|
||||||
|
options:
|
||||||
|
- "Yes"
|
||||||
|
- No (specify below)
|
||||||
|
default: 0
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: input
|
||||||
|
id: original_installation_method_specify
|
||||||
|
attributes:
|
||||||
|
label: If no, specify
|
||||||
|
placeholder: e.g., Distro Packaging, then Source
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: dms_doctor
|
id: dms_doctor
|
||||||
attributes:
|
attributes:
|
||||||
label: dms doctor -v
|
label: dms doctor -vC
|
||||||
description: Output of `dms doctor -v` command
|
description: Output of `dms doctor -vC` command — paste between the lines below to keep it collapsed in the issue
|
||||||
placeholder: Paste the output of `dms doctor -v` here
|
placeholder: Paste the output of `dms doctor -vC` here
|
||||||
|
value: |
|
||||||
|
<details>
|
||||||
|
<summary>Click to expand</summary>
|
||||||
|
|
||||||
|
|
||||||
|
</details>
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
|
|||||||
383
.github/workflows/backup/run-obs.yml.bak
vendored
383
.github/workflows/backup/run-obs.yml.bak
vendored
@@ -1,383 +0,0 @@
|
|||||||
name: Update OBS Packages
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
package:
|
|
||||||
description: "Package to update (dms, dms-git, or all)"
|
|
||||||
required: false
|
|
||||||
default: "all"
|
|
||||||
force_upload:
|
|
||||||
description: "Force upload without version check"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- "false"
|
|
||||||
- "true"
|
|
||||||
rebuild_release:
|
|
||||||
description: "Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)"
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- "v*"
|
|
||||||
schedule:
|
|
||||||
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-updates:
|
|
||||||
name: Check for updates
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
has_updates: ${{ steps.check.outputs.has_updates }}
|
|
||||||
packages: ${{ steps.check.outputs.packages }}
|
|
||||||
version: ${{ steps.check.outputs.version }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Install OSC
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y osc
|
|
||||||
|
|
||||||
mkdir -p ~/.config/osc
|
|
||||||
cat > ~/.config/osc/oscrc << EOF
|
|
||||||
[general]
|
|
||||||
apiurl = https://api.opensuse.org
|
|
||||||
|
|
||||||
[https://api.opensuse.org]
|
|
||||||
user = ${{ secrets.OBS_USERNAME }}
|
|
||||||
pass = ${{ secrets.OBS_PASSWORD }}
|
|
||||||
EOF
|
|
||||||
chmod 600 ~/.config/osc/oscrc
|
|
||||||
|
|
||||||
- name: Check for updates
|
|
||||||
id: check
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
|
||||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "Triggered by tag: $VERSION (always update)"
|
|
||||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
|
||||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
|
||||||
echo "Checking if dms-git source has changed..."
|
|
||||||
|
|
||||||
# Get current commit hash (8 chars to match spec format)
|
|
||||||
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
|
|
||||||
|
|
||||||
# Check OBS for last uploaded commit
|
|
||||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
|
||||||
mkdir -p "$OBS_BASE"
|
|
||||||
OBS_PROJECT="home:AvengeMedia:dms-git"
|
|
||||||
|
|
||||||
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
|
|
||||||
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
|
|
||||||
osc up -q 2>/dev/null || true
|
|
||||||
|
|
||||||
# Extract commit hash from spec Version line & format like; 0.6.2+git2264.a679be68
|
|
||||||
if [[ -f "dms-git.spec" ]]; then
|
|
||||||
OBS_COMMIT=$(grep "^Version:" "dms-git.spec" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
|
|
||||||
|
|
||||||
if [[ -n "$OBS_COMMIT" ]]; then
|
|
||||||
if [[ "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
|
|
||||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Commit $CURRENT_COMMIT already uploaded to OBS, skipping"
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 New commit detected: $CURRENT_COMMIT (OBS has $OBS_COMMIT)"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Could not extract OBS commit, proceeding with update"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 No spec file in OBS, proceeding with update"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd "${{ github.workspace }}"
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 First upload to OBS, update needed"
|
|
||||||
fi
|
|
||||||
elif [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
|
|
||||||
PKG="${{ github.event.inputs.package }}"
|
|
||||||
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
|
|
||||||
echo "packages=all" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "🚀 Force upload: all packages"
|
|
||||||
else
|
|
||||||
echo "packages=$PKG" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "🚀 Force upload: $PKG"
|
|
||||||
fi
|
|
||||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
|
||||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
|
||||||
else
|
|
||||||
echo "packages=all" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
update-obs:
|
|
||||||
name: Upload to OBS
|
|
||||||
needs: check-updates
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
if: |
|
|
||||||
github.event.inputs.force_upload == 'true' ||
|
|
||||||
github.event_name == 'workflow_dispatch' ||
|
|
||||||
needs.check-updates.outputs.has_updates == 'true'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Generate GitHub App Token
|
|
||||||
id: generate_token
|
|
||||||
uses: actions/create-github-app-token@v1
|
|
||||||
with:
|
|
||||||
app-id: ${{ secrets.APP_ID }}
|
|
||||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
|
|
||||||
- name: Check if last commit was automated
|
|
||||||
id: check-loop
|
|
||||||
run: |
|
|
||||||
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
|
|
||||||
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
|
|
||||||
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "✅ Last commit was not automated, proceeding"
|
|
||||||
echo "skip=false" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Determine packages to update
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
id: packages
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
|
||||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
echo "Triggered by tag: $VERSION"
|
|
||||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
|
||||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "Triggered by schedule: updating git package"
|
|
||||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
|
||||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
|
||||||
else
|
|
||||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Update dms-git spec version
|
|
||||||
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
|
|
||||||
run: |
|
|
||||||
# Get commit info for dms-git versioning
|
|
||||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
|
||||||
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
|
|
||||||
|
|
||||||
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
|
|
||||||
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
|
|
||||||
|
|
||||||
# Update version in spec
|
|
||||||
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
|
|
||||||
|
|
||||||
# Add changelog entry
|
|
||||||
DATE_STR=$(date "+%a %b %d %Y")
|
|
||||||
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
|
|
||||||
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
|
|
||||||
|
|
||||||
- name: Update Debian dms-git changelog version
|
|
||||||
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
|
|
||||||
run: |
|
|
||||||
# Get commit info for dms-git versioning
|
|
||||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
|
||||||
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
|
|
||||||
|
|
||||||
# Debian version format: 0.6.2+git2256.9162e314
|
|
||||||
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
|
|
||||||
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
|
|
||||||
|
|
||||||
CHANGELOG_DATE=$(date -R)
|
|
||||||
|
|
||||||
CHANGELOG_FILE="distro/debian/dms-git/debian/changelog"
|
|
||||||
|
|
||||||
# Get current version from changelog
|
|
||||||
CURRENT_VERSION=$(head -1 "$CHANGELOG_FILE" | sed 's/.*(\([^)]*\)).*/\1/')
|
|
||||||
|
|
||||||
echo "Current Debian version: $CURRENT_VERSION"
|
|
||||||
echo "New version: $NEW_VERSION"
|
|
||||||
|
|
||||||
# Only update if version changed
|
|
||||||
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
|
|
||||||
# Create new changelog entry at top
|
|
||||||
TEMP_CHANGELOG=$(mktemp)
|
|
||||||
|
|
||||||
cat > "$TEMP_CHANGELOG" << EOF
|
|
||||||
dms-git ($NEW_VERSION) nightly; urgency=medium
|
|
||||||
|
|
||||||
* Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)
|
|
||||||
|
|
||||||
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Prepend to existing changelog
|
|
||||||
cat "$CHANGELOG_FILE" >> "$TEMP_CHANGELOG"
|
|
||||||
mv "$TEMP_CHANGELOG" "$CHANGELOG_FILE"
|
|
||||||
|
|
||||||
echo "✓ Updated Debian changelog: $CURRENT_VERSION → $NEW_VERSION"
|
|
||||||
else
|
|
||||||
echo "✓ Debian changelog already at version $NEW_VERSION"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Update dms stable version
|
|
||||||
if: steps.check-loop.outputs.skip != 'true' && steps.packages.outputs.version != ''
|
|
||||||
run: |
|
|
||||||
VERSION="${{ steps.packages.outputs.version }}"
|
|
||||||
VERSION_NO_V="${VERSION#v}"
|
|
||||||
echo "Updating packaging to version $VERSION_NO_V"
|
|
||||||
|
|
||||||
# Update openSUSE dms spec (stable only)
|
|
||||||
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
|
|
||||||
|
|
||||||
# Update openSUSE spec changelog
|
|
||||||
DATE_STR=$(date "+%a %b %d %Y")
|
|
||||||
CHANGELOG_ENTRY="* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1\\n- Update to stable $VERSION release\\n- Bug fixes and improvements"
|
|
||||||
sed -i "/%changelog/a\\$CHANGELOG_ENTRY\\n" distro/opensuse/dms.spec
|
|
||||||
|
|
||||||
# Update Debian _service files (both tar_scm and download_url formats)
|
|
||||||
for service in distro/debian/*/_service; do
|
|
||||||
if [[ -f "$service" ]]; then
|
|
||||||
# Update tar_scm revision parameter (for dms-git)
|
|
||||||
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
|
|
||||||
|
|
||||||
# Update download_url paths (for dms stable)
|
|
||||||
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
|
|
||||||
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Update Debian changelog for dms stable
|
|
||||||
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
|
|
||||||
CHANGELOG_DATE=$(date -R)
|
|
||||||
TEMP_CHANGELOG=$(mktemp)
|
|
||||||
|
|
||||||
cat > "$TEMP_CHANGELOG" << EOF
|
|
||||||
dms ($VERSION_NO_V) stable; urgency=medium
|
|
||||||
|
|
||||||
* Update to $VERSION stable release
|
|
||||||
* Bug fixes and improvements
|
|
||||||
|
|
||||||
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat "distro/debian/dms/debian/changelog" >> "$TEMP_CHANGELOG"
|
|
||||||
mv "$TEMP_CHANGELOG" "distro/debian/dms/debian/changelog"
|
|
||||||
|
|
||||||
echo "✓ Updated Debian changelog to $VERSION_NO_V"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Install Go
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: "1.24"
|
|
||||||
|
|
||||||
- name: Install OSC
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y osc
|
|
||||||
|
|
||||||
mkdir -p ~/.config/osc
|
|
||||||
cat > ~/.config/osc/oscrc << EOF
|
|
||||||
[general]
|
|
||||||
apiurl = https://api.opensuse.org
|
|
||||||
|
|
||||||
[https://api.opensuse.org]
|
|
||||||
user = ${{ secrets.OBS_USERNAME }}
|
|
||||||
pass = ${{ secrets.OBS_PASSWORD }}
|
|
||||||
EOF
|
|
||||||
chmod 600 ~/.config/osc/oscrc
|
|
||||||
|
|
||||||
- name: Upload to OBS
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
FORCE_UPLOAD: ${{ github.event.inputs.force_upload }}
|
|
||||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
|
||||||
run: |
|
|
||||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
|
||||||
MESSAGE="Automated update from GitHub Actions"
|
|
||||||
|
|
||||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
|
||||||
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$PACKAGES" == "all" ]]; then
|
|
||||||
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
|
|
||||||
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
|
|
||||||
else
|
|
||||||
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Get changed packages
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
id: changed-packages
|
|
||||||
run: |
|
|
||||||
# Check if there are any changes to commit
|
|
||||||
if git diff --exit-code distro/debian/ distro/opensuse/ >/dev/null 2>&1; then
|
|
||||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 No changelog or spec changes to commit"
|
|
||||||
else
|
|
||||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
|
||||||
# Get list of changed packages for commit message
|
|
||||||
CHANGED_DEB=$(git diff --name-only distro/debian/ 2>/dev/null | grep 'debian/changelog' | xargs dirname 2>/dev/null | xargs dirname 2>/dev/null | xargs basename 2>/dev/null | tr '\n' ', ' | sed 's/, $//' || echo "")
|
|
||||||
CHANGED_SUSE=$(git diff --name-only distro/opensuse/ 2>/dev/null | grep '\.spec$' | sed 's|distro/opensuse/||' | sed 's/\.spec$//' | tr '\n' ', ' | sed 's/, $//' || echo "")
|
|
||||||
|
|
||||||
PKGS=$(echo "$CHANGED_DEB,$CHANGED_SUSE" | tr ',' '\n' | grep -v '^$' | sort -u | tr '\n' ',' | sed 's/,$//')
|
|
||||||
echo "packages=$PKGS" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Changed packages: $PKGS"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Commit packaging changes
|
|
||||||
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
|
|
||||||
run: |
|
|
||||||
git config user.name "dms-ci[bot]"
|
|
||||||
git config user.email "dms-ci[bot]@users.noreply.github.com"
|
|
||||||
git add distro/debian/*/debian/changelog distro/opensuse/*.spec
|
|
||||||
git commit -m "ci: Auto-update OBS packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
|
|
||||||
git pull --rebase origin master
|
|
||||||
git push
|
|
||||||
|
|
||||||
- name: Summary
|
|
||||||
run: |
|
|
||||||
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
|
||||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
|
||||||
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
|
|
||||||
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
|
||||||
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY
|
|
||||||
298
.github/workflows/backup/run-ppa.yml.bak
vendored
298
.github/workflows/backup/run-ppa.yml.bak
vendored
@@ -1,298 +0,0 @@
|
|||||||
name: Update PPA Packages
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
package:
|
|
||||||
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
|
|
||||||
required: false
|
|
||||||
default: "dms-git"
|
|
||||||
force_upload:
|
|
||||||
description: "Force upload without version check"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- "false"
|
|
||||||
- "true"
|
|
||||||
rebuild_release:
|
|
||||||
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
schedule:
|
|
||||||
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-updates:
|
|
||||||
name: Check for updates
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
has_updates: ${{ steps.check.outputs.has_updates }}
|
|
||||||
packages: ${{ steps.check.outputs.packages }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Check for updates
|
|
||||||
id: check
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event_name }}" == "schedule" ]]; then
|
|
||||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
|
||||||
echo "Checking if dms-git source has changed..."
|
|
||||||
|
|
||||||
# Get current commit hash (8 chars to match changelog format)
|
|
||||||
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
|
|
||||||
|
|
||||||
# Extract commit hash from changelog
|
|
||||||
# Format: dms-git (0.6.2+git2264.c5c5ce84) questing; urgency=medium
|
|
||||||
CHANGELOG_FILE="distro/ubuntu/dms-git/debian/changelog"
|
|
||||||
|
|
||||||
if [[ -f "$CHANGELOG_FILE" ]]; then
|
|
||||||
CHANGELOG_COMMIT=$(head -1 "$CHANGELOG_FILE" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
|
|
||||||
|
|
||||||
if [[ -n "$CHANGELOG_COMMIT" ]]; then
|
|
||||||
if [[ "$CURRENT_COMMIT" == "$CHANGELOG_COMMIT" ]]; then
|
|
||||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Commit $CURRENT_COMMIT already in changelog, skipping upload"
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 New commit detected: $CURRENT_COMMIT (changelog has $CHANGELOG_COMMIT)"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Could not extract commit from changelog, proceeding with upload"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 No changelog file found, proceeding with upload"
|
|
||||||
fi
|
|
||||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
|
||||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
|
||||||
else
|
|
||||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
upload-ppa:
|
|
||||||
name: Upload to PPA
|
|
||||||
needs: check-updates
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
if: |
|
|
||||||
github.event.inputs.force_upload == 'true' ||
|
|
||||||
github.event_name == 'workflow_dispatch' ||
|
|
||||||
needs.check-updates.outputs.has_updates == 'true'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Generate GitHub App Token
|
|
||||||
id: generate_token
|
|
||||||
uses: actions/create-github-app-token@v1
|
|
||||||
with:
|
|
||||||
app-id: ${{ secrets.APP_ID }}
|
|
||||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
|
|
||||||
- name: Check if last commit was automated
|
|
||||||
id: check-loop
|
|
||||||
run: |
|
|
||||||
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
|
|
||||||
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
|
|
||||||
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "✅ Last commit was not automated, proceeding"
|
|
||||||
echo "skip=false" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: "1.24"
|
|
||||||
cache: false
|
|
||||||
|
|
||||||
- name: Install build dependencies
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y \
|
|
||||||
debhelper \
|
|
||||||
devscripts \
|
|
||||||
dput \
|
|
||||||
lftp \
|
|
||||||
build-essential \
|
|
||||||
fakeroot \
|
|
||||||
dpkg-dev
|
|
||||||
|
|
||||||
- name: Configure GPG
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
env:
|
|
||||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
|
||||||
run: |
|
|
||||||
echo "$GPG_KEY" | gpg --import
|
|
||||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
|
||||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Determine packages to upload
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
id: packages
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
|
|
||||||
PKG="${{ github.event.inputs.package }}"
|
|
||||||
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
|
|
||||||
echo "packages=all" >> $GITHUB_OUTPUT
|
|
||||||
echo "🚀 Force upload: all packages"
|
|
||||||
else
|
|
||||||
echo "packages=$PKG" >> $GITHUB_OUTPUT
|
|
||||||
echo "🚀 Force upload: $PKG"
|
|
||||||
fi
|
|
||||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
|
||||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "Triggered by schedule: uploading git package"
|
|
||||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
|
||||||
# Manual package selection should respect change detection
|
|
||||||
SELECTED_PKG="${{ github.event.inputs.package }}"
|
|
||||||
UPDATED_PKG="${{ needs.check-updates.outputs.packages }}"
|
|
||||||
|
|
||||||
# Check if manually selected package is in the updated list
|
|
||||||
if [[ "$UPDATED_PKG" == *"$SELECTED_PKG"* ]] || [[ "$SELECTED_PKG" == "all" ]]; then
|
|
||||||
echo "packages=$SELECTED_PKG" >> $GITHUB_OUTPUT
|
|
||||||
echo "📦 Manual selection (has updates): $SELECTED_PKG"
|
|
||||||
else
|
|
||||||
echo "packages=" >> $GITHUB_OUTPUT
|
|
||||||
echo "⚠️ Manual selection '$SELECTED_PKG' has no updates - skipping (use force_upload to override)"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Upload to PPA
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
run: |
|
|
||||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
|
||||||
REBUILD_RELEASE="${{ github.event.inputs.rebuild_release }}"
|
|
||||||
|
|
||||||
if [[ -z "$PACKAGES" ]]; then
|
|
||||||
echo "No packages selected for upload. Skipping."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Build command arguments
|
|
||||||
BUILD_ARGS=()
|
|
||||||
if [[ -n "$REBUILD_RELEASE" ]]; then
|
|
||||||
BUILD_ARGS+=("$REBUILD_RELEASE")
|
|
||||||
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$PACKAGES" == "all" ]]; then
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "Uploading dms to PPA..."
|
|
||||||
if [ -n "$REBUILD_RELEASE" ]; then
|
|
||||||
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
|
|
||||||
fi
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
bash distro/scripts/ppa-upload.sh dms dms questing "${BUILD_ARGS[@]}"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "Uploading dms-git to PPA..."
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
bash distro/scripts/ppa-upload.sh dms-git dms-git questing "${BUILD_ARGS[@]}"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "Uploading dms-greeter to PPA..."
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
bash distro/scripts/ppa-upload.sh dms-greeter danklinux questing "${BUILD_ARGS[@]}"
|
|
||||||
else
|
|
||||||
# Map package to PPA name
|
|
||||||
case "$PACKAGES" in
|
|
||||||
dms)
|
|
||||||
PPA_NAME="dms"
|
|
||||||
;;
|
|
||||||
dms-git)
|
|
||||||
PPA_NAME="dms-git"
|
|
||||||
;;
|
|
||||||
dms-greeter)
|
|
||||||
PPA_NAME="danklinux"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
PPA_NAME="$PACKAGES"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "Uploading $PACKAGES to PPA..."
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
bash distro/scripts/ppa-upload.sh "$PACKAGES" "$PPA_NAME" questing "${BUILD_ARGS[@]}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Get changed packages
|
|
||||||
if: steps.check-loop.outputs.skip != 'true'
|
|
||||||
id: changed-packages
|
|
||||||
run: |
|
|
||||||
# Check if there are any changelog changes to commit
|
|
||||||
if git diff --exit-code distro/ubuntu/ >/dev/null 2>&1; then
|
|
||||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 No changelog changes to commit"
|
|
||||||
else
|
|
||||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
|
||||||
# Get list of changed packages for commit message (deduplicate)
|
|
||||||
CHANGED=$(git diff --name-only distro/ubuntu/ | grep 'debian/changelog' | sed 's|/debian/changelog||' | xargs -I{} basename {} | sort -u | tr '\n' ',' | sed 's/,$//')
|
|
||||||
echo "packages=$CHANGED" >> $GITHUB_OUTPUT
|
|
||||||
echo "📋 Changed packages: $CHANGED"
|
|
||||||
echo "📋 Debug - Changed files:"
|
|
||||||
git diff --name-only distro/ubuntu/ | grep 'debian/changelog' || echo "No changelog files found"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Commit changelog changes
|
|
||||||
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
|
|
||||||
run: |
|
|
||||||
git config user.name "dms-ci[bot]"
|
|
||||||
git config user.email "dms-ci[bot]@users.noreply.github.com"
|
|
||||||
git add distro/ubuntu/*/debian/changelog
|
|
||||||
git commit -m "ci: Auto-update PPA packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
|
|
||||||
git pull --rebase origin master
|
|
||||||
git push
|
|
||||||
|
|
||||||
- name: Summary
|
|
||||||
run: |
|
|
||||||
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
|
|
||||||
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
|
|
||||||
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
|
||||||
|
|
||||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
|
||||||
if [[ "$PACKAGES" == "all" ]]; then
|
|
||||||
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "- **PPA danklinux**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
elif [[ "$PACKAGES" == "dms" ]]; then
|
|
||||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
elif [[ "$PACKAGES" == "dms-git" ]]; then
|
|
||||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
|
|
||||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
|
||||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
|
||||||
fi
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY
|
|
||||||
2
.github/workflows/dms-stable.yml
vendored
2
.github/workflows/dms-stable.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
token: ${{ steps.app_token.outputs.token }}
|
token: ${{ steps.app_token.outputs.token }}
|
||||||
|
|||||||
4
.github/workflows/go-ci.yml
vendored
4
.github/workflows/go-ci.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install flatpak
|
- name: Install flatpak
|
||||||
run: sudo apt update && sudo apt install -y flatpak
|
run: sudo apt update && sudo apt install -y flatpak
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
|
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: ./core/go.mod
|
go-version-file: ./core/go.mod
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/nix-pr-check.yml
vendored
2
.github/workflows/nix-pr-check.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/prek.yml
vendored
4
.github/workflows/prek.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install flatpak
|
- name: Install flatpak
|
||||||
run: sudo apt update && sudo apt install -y flatpak
|
run: sudo apt update && sudo apt install -y flatpak
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
|
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: core/go.mod
|
go-version-file: core/go.mod
|
||||||
|
|
||||||
|
|||||||
16
.github/workflows/release.yml
vendored
16
.github/workflows/release.yml
vendored
@@ -32,13 +32,13 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.tag }}
|
ref: ${{ inputs.tag }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: ./core/go.mod
|
go-version-file: ./core/go.mod
|
||||||
|
|
||||||
@@ -106,7 +106,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload artifacts (${{ matrix.arch }})
|
- name: Upload artifacts (${{ matrix.arch }})
|
||||||
if: matrix.arch == 'arm64'
|
if: matrix.arch == 'arm64'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: core-assets-${{ matrix.arch }}
|
name: core-assets-${{ matrix.arch }}
|
||||||
path: |
|
path: |
|
||||||
@@ -120,7 +120,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload artifacts with completions
|
- name: Upload artifacts with completions
|
||||||
if: matrix.arch == 'amd64'
|
if: matrix.arch == 'amd64'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: core-assets-${{ matrix.arch }}
|
name: core-assets-${{ matrix.arch }}
|
||||||
path: |
|
path: |
|
||||||
@@ -147,7 +147,7 @@ jobs:
|
|||||||
# private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
# private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
# - name: Checkout
|
# - name: Checkout
|
||||||
# uses: actions/checkout@v4
|
# uses: actions/checkout@v6
|
||||||
# with:
|
# with:
|
||||||
# token: ${{ steps.app_token.outputs.token }}
|
# token: ${{ steps.app_token.outputs.token }}
|
||||||
# fetch-depth: 0
|
# fetch-depth: 0
|
||||||
@@ -181,7 +181,7 @@ jobs:
|
|||||||
TAG: ${{ inputs.tag }}
|
TAG: ${{ inputs.tag }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.tag }}
|
ref: ${{ inputs.tag }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -192,12 +192,12 @@ jobs:
|
|||||||
git checkout ${TAG}
|
git checkout ${TAG}
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: ./core/go.mod
|
go-version-file: ./core/go.mod
|
||||||
|
|
||||||
- name: Download core artifacts
|
- name: Download core artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
pattern: core-assets-*
|
pattern: core-assets-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|||||||
4
.github/workflows/run-copr.yml
vendored
4
.github/workflows/run-copr.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Determine version
|
- name: Determine version
|
||||||
id: version
|
id: version
|
||||||
@@ -134,7 +134,7 @@ jobs:
|
|||||||
rpm -qpi "$SRPM"
|
rpm -qpi "$SRPM"
|
||||||
|
|
||||||
- name: Upload SRPM artifact
|
- name: Upload SRPM artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v5
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.package }}-stable-srpm-${{ steps.version.outputs.version }}
|
name: ${{ matrix.package }}-stable-srpm-${{ steps.version.outputs.version }}
|
||||||
path: ${{ steps.build.outputs.srpm_path }}
|
path: ${{ steps.build.outputs.srpm_path }}
|
||||||
|
|||||||
265
.github/workflows/run-obs.yml
vendored
265
.github/workflows/run-obs.yml
vendored
@@ -9,6 +9,7 @@ on:
|
|||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
- dms
|
- dms
|
||||||
|
- dms-greeter
|
||||||
- dms-git
|
- dms-git
|
||||||
- all
|
- all
|
||||||
default: "dms"
|
default: "dms"
|
||||||
@@ -31,7 +32,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -72,12 +73,27 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Helper function to check dms-greeter stable tag
|
||||||
|
check_dms_greeter_stable() {
|
||||||
|
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
|
||||||
|
local OBS_SPEC=$(curl -s -u "$OBS_USERNAME:$OBS_PASSWORD" "https://api.opensuse.org/source/home:AvengeMedia:danklinux/dms-greeter/dms-greeter.spec" 2>/dev/null || echo "")
|
||||||
|
local OBS_VERSION=$(echo "$OBS_SPEC" | grep "^Version:" | awk '{print $2}' | xargs | sed 's/^v//')
|
||||||
|
|
||||||
|
if [[ -n "$LATEST_TAG" && "$LATEST_TAG" == "v$OBS_VERSION" ]]; then
|
||||||
|
echo "📋 dms-greeter: Tag $LATEST_TAG already exists, skipping"
|
||||||
|
return 1 # No update needed
|
||||||
|
else
|
||||||
|
echo "📋 dms-greeter: New tag ${LATEST_TAG:-unknown} (OBS has ${OBS_VERSION:-none})"
|
||||||
|
return 0 # Update needed
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Main logic
|
# Main logic
|
||||||
REBUILD="${{ github.event.inputs.rebuild_release }}"
|
REBUILD="${{ github.event.inputs.rebuild_release }}"
|
||||||
|
|
||||||
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]] && [[ -z "${{ github.event.inputs.package }}" ]]; then
|
||||||
# Tag selected or pushed - always update stable package
|
# Run from tag with no package specified - update both stable packages
|
||||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
echo "packages=dms dms-greeter" >> $GITHUB_OUTPUT
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
VERSION="${GITHUB_REF#refs/tags/}"
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||||
@@ -103,15 +119,18 @@ jobs:
|
|||||||
echo "🔄 Manual rebuild requested: $PKG (db$REBUILD)"
|
echo "🔄 Manual rebuild requested: $PKG (db$REBUILD)"
|
||||||
|
|
||||||
elif [[ "$PKG" == "all" ]]; then
|
elif [[ "$PKG" == "all" ]]; then
|
||||||
# Check each package and build list of those needing updates
|
# Check each stable package and build list of those needing updates
|
||||||
PACKAGES_TO_UPDATE=()
|
PACKAGES_TO_UPDATE=()
|
||||||
check_dms_git && PACKAGES_TO_UPDATE+=("dms-git")
|
|
||||||
if check_dms_stable; then
|
if check_dms_stable; then
|
||||||
PACKAGES_TO_UPDATE+=("dms")
|
PACKAGES_TO_UPDATE+=("dms")
|
||||||
if [[ -n "$LATEST_TAG" ]]; then
|
if [[ -n "$LATEST_TAG" ]]; then
|
||||||
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
if check_dms_greeter_stable; then
|
||||||
|
PACKAGES_TO_UPDATE+=("dms-greeter")
|
||||||
|
[[ -n "$LATEST_TAG" ]] && echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
|
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
|
||||||
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
|
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
|
||||||
@@ -120,7 +139,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "packages=" >> $GITHUB_OUTPUT
|
echo "packages=" >> $GITHUB_OUTPUT
|
||||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
echo "has_updates=false" >> $GITHUB_OUTPUT
|
||||||
echo "✓ All packages up to date"
|
echo "✓ Both packages up to date"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
elif [[ "$PKG" == "dms-git" ]]; then
|
elif [[ "$PKG" == "dms-git" ]]; then
|
||||||
@@ -144,6 +163,18 @@ jobs:
|
|||||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
echo "has_updates=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
elif [[ "$PKG" == "dms-greeter" ]]; then
|
||||||
|
if check_dms_greeter_stable; then
|
||||||
|
echo "packages=$PKG" >> $GITHUB_OUTPUT
|
||||||
|
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||||
|
if [[ -n "$LATEST_TAG" ]]; then
|
||||||
|
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "packages=" >> $GITHUB_OUTPUT
|
||||||
|
echo "has_updates=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
else
|
else
|
||||||
# Unknown package - proceed anyway
|
# Unknown package - proceed anyway
|
||||||
echo "packages=$PKG" >> $GITHUB_OUTPUT
|
echo "packages=$PKG" >> $GITHUB_OUTPUT
|
||||||
@@ -164,22 +195,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Wait before OBS upload
|
||||||
|
run: sleep 3
|
||||||
|
|
||||||
- name: Determine packages to update
|
- name: Determine packages to update
|
||||||
id: packages
|
id: packages
|
||||||
run: |
|
run: |
|
||||||
# Check if GITHUB_REF points to a tag (works for both push events and workflow_dispatch with tag selected)
|
# Use check-updates outputs when available
|
||||||
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
if [[ -n "${{ needs.check-updates.outputs.version }}" ]]; then
|
||||||
# Tag selected or pushed - use the tag from GITHUB_REF
|
|
||||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
echo "Using tag from GITHUB_REF: $VERSION"
|
|
||||||
# Check if check-updates already determined a version (from auto-detection)
|
|
||||||
elif [[ -n "${{ needs.check-updates.outputs.version }}" ]]; then
|
|
||||||
# Use version from check-updates job
|
# Use version from check-updates job
|
||||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
||||||
echo "version=${{ needs.check-updates.outputs.version }}" >> $GITHUB_OUTPUT
|
echo "version=${{ needs.check-updates.outputs.version }}" >> $GITHUB_OUTPUT
|
||||||
@@ -191,40 +218,16 @@ jobs:
|
|||||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||||
# Manual workflow dispatch
|
# Manual workflow dispatch
|
||||||
|
|
||||||
# Determine version for dms stable
|
# Determine version for dms stable and dms-greeter using the API
|
||||||
if [[ "${{ github.event.inputs.package }}" == "dms" ]]; then
|
# GITHUB_REF is unreliable when "Use workflow from" a tag; API works from any ref
|
||||||
# Use github.ref if tag selected, otherwise auto-detect latest
|
if [[ "${{ github.event.inputs.package }}" == "dms" ]] || [[ "${{ github.event.inputs.package }}" == "dms-greeter" ]] || [[ "${{ github.event.inputs.package }}" == "all" ]]; then
|
||||||
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
if [[ -n "$LATEST_TAG" ]]; then
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||||
echo "Using tag from GITHUB_REF: $VERSION"
|
echo "Using latest release from API: $LATEST_TAG"
|
||||||
else
|
else
|
||||||
# Auto-detect latest release for dms
|
echo "ERROR: Could not fetch latest release from API"
|
||||||
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
|
exit 1
|
||||||
if [[ -n "$LATEST_TAG" ]]; then
|
|
||||||
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
|
||||||
echo "Auto-detected latest release: $LATEST_TAG"
|
|
||||||
else
|
|
||||||
echo "ERROR: Could not auto-detect latest release"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif [[ "${{ github.event.inputs.package }}" == "all" ]]; then
|
|
||||||
# Use github.ref if tag selected, otherwise auto-detect latest
|
|
||||||
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
|
||||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
||||||
echo "Using tag from GITHUB_REF: $VERSION"
|
|
||||||
else
|
|
||||||
# Auto-detect latest release for "all"
|
|
||||||
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
|
|
||||||
if [[ -n "$LATEST_TAG" ]]; then
|
|
||||||
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
|
|
||||||
echo "Auto-detected latest release: $LATEST_TAG"
|
|
||||||
else
|
|
||||||
echo "ERROR: Could not auto-detect latest release"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -244,7 +247,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Update dms-git spec version
|
- name: Update dms-git spec version
|
||||||
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
|
if: contains(steps.packages.outputs.packages, 'dms-git')
|
||||||
run: |
|
run: |
|
||||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||||
@@ -265,7 +268,7 @@ jobs:
|
|||||||
} > distro/opensuse/dms-git.spec
|
} > distro/opensuse/dms-git.spec
|
||||||
|
|
||||||
- name: Update Debian dms-git changelog version
|
- name: Update Debian dms-git changelog version
|
||||||
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
|
if: contains(steps.packages.outputs.packages, 'dms-git')
|
||||||
run: |
|
run: |
|
||||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||||
@@ -283,57 +286,68 @@ jobs:
|
|||||||
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
|
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
|
||||||
} > "distro/debian/dms-git/debian/changelog"
|
} > "distro/debian/dms-git/debian/changelog"
|
||||||
|
|
||||||
- name: Update dms stable version
|
- name: Update stable version (dms + dms-greeter)
|
||||||
if: steps.packages.outputs.version != ''
|
if: steps.packages.outputs.version != ''
|
||||||
run: |
|
run: |
|
||||||
VERSION="${{ steps.packages.outputs.version }}"
|
VERSION="${{ steps.packages.outputs.version }}"
|
||||||
VERSION_NO_V="${VERSION#v}"
|
VERSION_NO_V="${VERSION#v}"
|
||||||
|
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||||
echo "==> Updating packaging files to version: $VERSION_NO_V"
|
echo "==> Updating packaging files to version: $VERSION_NO_V"
|
||||||
|
|
||||||
# Update spec file
|
# Update dms spec and changelog when dms is in the upload list
|
||||||
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
|
if [[ "$PACKAGES" == *"dms"* ]]; then
|
||||||
|
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
|
||||||
|
UPDATED_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1)
|
||||||
|
echo "✓ dms spec now shows Version: $UPDATED_VERSION"
|
||||||
|
|
||||||
# Verify the update
|
DATE_STR=$(date "+%a %b %d %Y")
|
||||||
UPDATED_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1)
|
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms.spec)
|
||||||
echo "✓ Spec file now shows Version: $UPDATED_VERSION"
|
{
|
||||||
|
echo "$LOCAL_SPEC_HEAD"
|
||||||
|
echo "%changelog"
|
||||||
|
echo "* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1"
|
||||||
|
echo "- Update to stable $VERSION release"
|
||||||
|
} > distro/opensuse/dms.spec
|
||||||
|
|
||||||
# Single changelog entry (full history on OBS website)
|
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
|
||||||
DATE_STR=$(date "+%a %b %d %Y")
|
CHANGELOG_DATE=$(date -R)
|
||||||
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms.spec)
|
{
|
||||||
{
|
echo "dms (${VERSION_NO_V}db1) stable; urgency=medium"
|
||||||
echo "$LOCAL_SPEC_HEAD"
|
echo ""
|
||||||
echo "%changelog"
|
echo " * Update to $VERSION stable release"
|
||||||
echo "* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1"
|
echo ""
|
||||||
echo "- Update to stable $VERSION release"
|
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
|
||||||
} > distro/opensuse/dms.spec
|
} > "distro/debian/dms/debian/changelog"
|
||||||
|
echo "✓ Updated dms changelog to ${VERSION_NO_V}db1"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Update Debian _service files (both tar_scm and download_url formats)
|
# Update dms-greeter changelog when dms-greeter is in the upload list
|
||||||
|
if [[ "$PACKAGES" == *"dms-greeter"* ]] && [[ -f "distro/debian/dms-greeter/debian/changelog" ]]; then
|
||||||
|
CHANGELOG_DATE=$(date -R)
|
||||||
|
{
|
||||||
|
echo "dms-greeter (${VERSION_NO_V}db1) unstable; urgency=medium"
|
||||||
|
echo ""
|
||||||
|
echo " * Update to $VERSION stable release"
|
||||||
|
echo ""
|
||||||
|
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
|
||||||
|
} > "distro/debian/dms-greeter/debian/changelog"
|
||||||
|
echo "✓ Updated dms-greeter changelog to ${VERSION_NO_V}db1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update Debian _service files for packages in upload list (download_url paths)
|
||||||
for service in distro/debian/*/_service; do
|
for service in distro/debian/*/_service; do
|
||||||
if [[ -f "$service" ]]; then
|
if [[ -f "$service" ]]; then
|
||||||
# Update tar_scm revision parameter (for dms-git)
|
# Update tar_scm revision parameter (for dms-git)
|
||||||
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
|
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
|
||||||
|
# Update download_url paths (for dms, dms-greeter stable)
|
||||||
# Update download_url paths (for dms stable)
|
|
||||||
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
|
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
|
||||||
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
|
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Update Debian changelog for dms stable (single entry, history on OBS website)
|
|
||||||
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
|
|
||||||
CHANGELOG_DATE=$(date -R)
|
|
||||||
{
|
|
||||||
echo "dms (${VERSION_NO_V}db1) stable; urgency=medium"
|
|
||||||
echo ""
|
|
||||||
echo " * Update to $VERSION stable release"
|
|
||||||
echo ""
|
|
||||||
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
|
|
||||||
} > "distro/debian/dms/debian/changelog"
|
|
||||||
echo "✓ Updated Debian changelog to ${VERSION_NO_V}db1"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Install Go
|
- name: Install Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: ./core/go.mod
|
go-version-file: ./core/go.mod
|
||||||
|
|
||||||
@@ -354,6 +368,7 @@ jobs:
|
|||||||
chmod 600 ~/.config/osc/oscrc
|
chmod 600 ~/.config/osc/oscrc
|
||||||
|
|
||||||
- name: Upload to OBS
|
- name: Upload to OBS
|
||||||
|
id: upload
|
||||||
env:
|
env:
|
||||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||||
TAG_VERSION: ${{ steps.packages.outputs.version }}
|
TAG_VERSION: ${{ steps.packages.outputs.version }}
|
||||||
@@ -362,6 +377,8 @@ jobs:
|
|||||||
|
|
||||||
if [[ -z "$PACKAGES" ]]; then
|
if [[ -z "$PACKAGES" ]]; then
|
||||||
echo "✓ No packages need uploading. All up to date!"
|
echo "✓ No packages need uploading. All up to date!"
|
||||||
|
echo "uploaded_packages=" >> $GITHUB_OUTPUT
|
||||||
|
echo "skipped_packages=" >> $GITHUB_OUTPUT
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -371,7 +388,10 @@ jobs:
|
|||||||
echo "==> Version being uploaded: ${{ steps.packages.outputs.version }}"
|
echo "==> Version being uploaded: ${{ steps.packages.outputs.version }}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
|
UPLOADED_PACKAGES=()
|
||||||
|
SKIPPED_PACKAGES=()
|
||||||
|
|
||||||
|
# PACKAGES can be space-separated list (e.g., "dms dms-greeter" from "all" check)
|
||||||
# Loop through each package and upload
|
# Loop through each package and upload
|
||||||
for PKG in $PACKAGES; do
|
for PKG in $PACKAGES; do
|
||||||
echo ""
|
echo ""
|
||||||
@@ -382,13 +402,37 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
|
||||||
|
LOG_FILE=$(mktemp)
|
||||||
|
set +e
|
||||||
if [[ "$PKG" == "dms-git" ]]; then
|
if [[ "$PKG" == "dms-git" ]]; then
|
||||||
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
|
bash distro/scripts/obs-upload.sh dms-git "Automated git update" >"$LOG_FILE" 2>&1
|
||||||
else
|
else
|
||||||
bash distro/scripts/obs-upload.sh "$PKG" "$MESSAGE"
|
bash distro/scripts/obs-upload.sh "$PKG" "$MESSAGE" >"$LOG_FILE" 2>&1
|
||||||
fi
|
fi
|
||||||
|
STATUS=$?
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat "$LOG_FILE"
|
||||||
|
|
||||||
|
if [[ $STATUS -ne 0 ]]; then
|
||||||
|
rm -f "$LOG_FILE"
|
||||||
|
echo "❌ Upload failed for $PKG"
|
||||||
|
exit $STATUS
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -Eq "Exiting gracefully \(no changes needed\)|No changes needed for this package\. Exiting gracefully\." "$LOG_FILE"; then
|
||||||
|
echo "ℹ️ $PKG is already up to date. Skipped."
|
||||||
|
SKIPPED_PACKAGES+=("$PKG")
|
||||||
|
else
|
||||||
|
UPLOADED_PACKAGES+=("$PKG")
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -f "$LOG_FILE"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
echo "uploaded_packages=${UPLOADED_PACKAGES[*]}" >> $GITHUB_OUTPUT
|
||||||
|
echo "skipped_packages=${SKIPPED_PACKAGES[*]}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Summary
|
- name: Summary
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
@@ -402,20 +446,59 @@ jobs:
|
|||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "All packages are current. Run completed successfully." >> $GITHUB_STEP_SUMMARY
|
echo "All packages are current. Run completed successfully." >> $GITHUB_STEP_SUMMARY
|
||||||
else
|
else
|
||||||
echo "**Packages Uploaded:**" >> $GITHUB_STEP_SUMMARY
|
UPLOADED_PACKAGES="${{ steps.upload.outputs.uploaded_packages }}"
|
||||||
|
SKIPPED_PACKAGES="${{ steps.upload.outputs.skipped_packages }}"
|
||||||
|
TOTAL_COUNT=$(wc -w <<<"$PACKAGES" | tr -d ' ')
|
||||||
|
UPLOADED_COUNT=0
|
||||||
|
SKIPPED_COUNT=0
|
||||||
|
if [[ -n "$UPLOADED_PACKAGES" ]]; then
|
||||||
|
UPLOADED_COUNT=$(wc -w <<<"$UPLOADED_PACKAGES" | tr -d ' ')
|
||||||
|
fi
|
||||||
|
if [[ -n "$SKIPPED_PACKAGES" ]]; then
|
||||||
|
SKIPPED_COUNT=$(wc -w <<<"$SKIPPED_PACKAGES" | tr -d ' ')
|
||||||
|
fi
|
||||||
|
in_list() {
|
||||||
|
local item="$1"
|
||||||
|
local list="$2"
|
||||||
|
[[ " $list " == *" $item "* ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "${{ job.status }}" == "success" ]]; then
|
||||||
|
echo "**Status:** ✅ Completed successfully" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "**Status:** ❌ Completed with errors" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Processed:** $TOTAL_COUNT package(s)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Uploaded:** $UPLOADED_COUNT package(s)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Skipped (up to date):** $SKIPPED_COUNT package(s)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Packages:**" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
for PKG in $PACKAGES; do
|
for PKG in $PACKAGES; do
|
||||||
|
STATUS_ICON="✅"
|
||||||
|
STATUS_TEXT="uploaded"
|
||||||
|
if in_list "$PKG" "$SKIPPED_PACKAGES"; then
|
||||||
|
STATUS_ICON="ℹ️"
|
||||||
|
STATUS_TEXT="up to date (skipped)"
|
||||||
|
elif ! in_list "$PKG" "$UPLOADED_PACKAGES"; then
|
||||||
|
STATUS_ICON="❌"
|
||||||
|
STATUS_TEXT="failed"
|
||||||
|
fi
|
||||||
|
|
||||||
case "$PKG" in
|
case "$PKG" in
|
||||||
dms)
|
dms)
|
||||||
echo "- ✅ **dms** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms/dms)" >> $GITHUB_STEP_SUMMARY
|
echo "- $STATUS_ICON **dms** ($STATUS_TEXT) → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms/dms)" >> $GITHUB_STEP_SUMMARY
|
||||||
;;
|
;;
|
||||||
dms-git)
|
dms-git)
|
||||||
echo "- ✅ **dms-git** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms-git/dms-git)" >> $GITHUB_STEP_SUMMARY
|
echo "- $STATUS_ICON **dms-git** ($STATUS_TEXT) → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms-git/dms-git)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
;;
|
||||||
|
dms-greeter)
|
||||||
|
echo "- $STATUS_ICON **dms-greeter** ($STATUS_TEXT) → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:danklinux/dms-greeter)" >> $GITHUB_STEP_SUMMARY
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
|
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
|
||||||
|
|||||||
28
.github/workflows/run-ppa.yml
vendored
28
.github/workflows/run-ppa.yml
vendored
@@ -4,9 +4,15 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
package:
|
package:
|
||||||
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
|
description: "Package to upload"
|
||||||
required: false
|
required: true
|
||||||
default: "dms-git"
|
type: choice
|
||||||
|
options:
|
||||||
|
- dms
|
||||||
|
- dms-greeter
|
||||||
|
- dms-git
|
||||||
|
- all
|
||||||
|
default: "dms"
|
||||||
rebuild_release:
|
rebuild_release:
|
||||||
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
|
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
|
||||||
required: false
|
required: false
|
||||||
@@ -25,7 +31,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -139,7 +145,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
# Fallback
|
# Fallback
|
||||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
echo "packages=dms" >> $GITHUB_OUTPUT
|
||||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -151,12 +157,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: ./core/go.mod
|
go-version-file: ./core/go.mod
|
||||||
cache: false
|
cache: false
|
||||||
@@ -209,7 +215,7 @@ jobs:
|
|||||||
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
|
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
|
# PACKAGES can be space-separated list (e.g., "dms-git dms dms-greeter" from "all" check)
|
||||||
# Loop through each package and upload
|
# Loop through each package and upload
|
||||||
for PKG in $PACKAGES; do
|
for PKG in $PACKAGES; do
|
||||||
# Map package to PPA name
|
# Map package to PPA name
|
||||||
@@ -236,7 +242,11 @@ jobs:
|
|||||||
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
|
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
|
||||||
fi
|
fi
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
bash distro/scripts/ppa-upload.sh "$PKG" "$PPA_NAME" questing ${REBUILD_RELEASE:+"$REBUILD_RELEASE"}
|
# ppa-upload.sh uploads to questing + resolute when series is omitted
|
||||||
|
if ! bash distro/scripts/ppa-upload.sh "$PKG" "$PPA_NAME" ${REBUILD_RELEASE:+"$REBUILD_RELEASE"}; then
|
||||||
|
echo "::error::Upload failed for $PKG"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
- name: Summary
|
- name: Summary
|
||||||
|
|||||||
8
.github/workflows/update-vendor-hash.yml
vendored
8
.github/workflows/update-vendor-hash.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
token: ${{ steps.app_token.outputs.token }}
|
token: ${{ steps.app_token.outputs.token }}
|
||||||
@@ -40,7 +40,7 @@ jobs:
|
|||||||
echo "Build succeeded, no hash update needed"
|
echo "Build succeeded, no hash update needed"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
new_hash=$(echo "$output" | grep -oP "got:\s+\K\S+" | head -n1)
|
new_hash=$(echo "$output" | grep -oP "got:\s+\K\S+" | head -n1 || true)
|
||||||
[ -n "$new_hash" ] || { echo "Could not extract new vendorHash"; echo "$output"; exit 1; }
|
[ -n "$new_hash" ] || { echo "Could not extract new vendorHash"; echo "$output"; exit 1; }
|
||||||
current_hash=$(grep -oP 'vendorHash = "\K[^"]+' flake.nix)
|
current_hash=$(grep -oP 'vendorHash = "\K[^"]+' flake.nix)
|
||||||
[ "$current_hash" = "$new_hash" ] && { echo "vendorHash already up to date"; exit 0; }
|
[ "$current_hash" = "$new_hash" ] && { echo "vendorHash already up to date"; exit 0; }
|
||||||
@@ -59,8 +59,8 @@ jobs:
|
|||||||
git config user.email "dms-ci[bot]@users.noreply.github.com"
|
git config user.email "dms-ci[bot]@users.noreply.github.com"
|
||||||
git add flake.nix
|
git add flake.nix
|
||||||
git commit -m "nix: update vendorHash for go.mod changes" || exit 0
|
git commit -m "nix: update vendorHash for go.mod changes" || exit 0
|
||||||
git pull --rebase origin master
|
git pull --rebase origin ${{ github.ref_name }}
|
||||||
git push https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git HEAD:master
|
git push https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }}.git HEAD:${{ github.ref_name }}
|
||||||
else
|
else
|
||||||
echo "No changes to flake.nix"
|
echo "No changes to flake.nix"
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -5,11 +5,13 @@ repos:
|
|||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: local
|
||||||
rev: v0.10.0.1
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
args: [-e, SC2164, -e, SC2001, -e, SC2012, -e, SC2317]
|
name: shellcheck
|
||||||
|
entry: shellcheck -e SC2164 -e SC2001 -e SC2012 -e SC2317
|
||||||
|
language: system
|
||||||
|
types: [shell]
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: go-mod-tidy
|
- id: go-mod-tidy
|
||||||
|
|||||||
@@ -1,5 +1,13 @@
|
|||||||
This file is more of a quick reference so I know what to account for before next releases.
|
This file is more of a quick reference so I know what to account for before next releases.
|
||||||
|
|
||||||
|
# 1.5.0
|
||||||
|
- Overhauled shadows
|
||||||
|
- App ID changed to com.danklinux.dms - breaking for window rules
|
||||||
|
- Greeter stuff
|
||||||
|
- Terminal mux
|
||||||
|
- Locale overrides
|
||||||
|
- new neovim theming
|
||||||
|
|
||||||
# 1.4.0
|
# 1.4.0
|
||||||
|
|
||||||
- Overhauled system monitor, graphs, styling
|
- Overhauled system monitor, graphs, styling
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ nix develop
|
|||||||
|
|
||||||
This will provide:
|
This will provide:
|
||||||
|
|
||||||
- Go 1.24 toolchain (go, gopls, delve, go-tools) and GNU Make
|
- Go 1.25+ toolchain (go, gopls, delve, go-tools) and GNU Make
|
||||||
- Quickshell and required QML packages
|
- Quickshell and required QML packages
|
||||||
- Properly configured QML2_IMPORT_PATH
|
- Properly configured QML2_IMPORT_PATH
|
||||||
|
|
||||||
@@ -86,7 +86,9 @@ touch .qmlls.ini
|
|||||||
|
|
||||||
4. Restart dms to generate the `.qmlls.ini` file
|
4. Restart dms to generate the `.qmlls.ini` file
|
||||||
|
|
||||||
5. Make your changes, test, and open a pull request.
|
5. Run `make lint-qml` from the repo root to lint QML entrypoints (requires the `.qmlls.ini` generated above). The script needs the **Qt 6** `qmllint`; it checks `qmllint6`, Fedora's `qmllint-qt6`, `/usr/lib/qt6/bin/qmllint`, then `qmllint` in `PATH`. If your Qt 6 binary lives elsewhere, set `QMLLINT=/path/to/qmllint`.
|
||||||
|
|
||||||
|
6. Make your changes, test, and open a pull request.
|
||||||
|
|
||||||
### I18n/Localization
|
### I18n/Localization
|
||||||
|
|
||||||
|
|||||||
8
Makefile
8
Makefile
@@ -18,7 +18,7 @@ SHELL_INSTALL_DIR=$(DATA_DIR)/quickshell/dms
|
|||||||
ASSETS_DIR=assets
|
ASSETS_DIR=assets
|
||||||
APPLICATIONS_DIR=$(DATA_DIR)/applications
|
APPLICATIONS_DIR=$(DATA_DIR)/applications
|
||||||
|
|
||||||
.PHONY: all build clean install install-bin install-shell install-completions install-systemd install-icon install-desktop uninstall uninstall-bin uninstall-shell uninstall-completions uninstall-systemd uninstall-icon uninstall-desktop help
|
.PHONY: all build clean lint-qml install install-bin install-shell install-completions install-systemd install-icon install-desktop uninstall uninstall-bin uninstall-shell uninstall-completions uninstall-systemd uninstall-icon uninstall-desktop help
|
||||||
|
|
||||||
all: build
|
all: build
|
||||||
|
|
||||||
@@ -32,6 +32,9 @@ clean:
|
|||||||
@$(MAKE) -C $(CORE_DIR) clean
|
@$(MAKE) -C $(CORE_DIR) clean
|
||||||
@echo "Clean complete"
|
@echo "Clean complete"
|
||||||
|
|
||||||
|
lint-qml:
|
||||||
|
@./quickshell/scripts/qmllint-entrypoints.sh
|
||||||
|
|
||||||
# Installation targets
|
# Installation targets
|
||||||
install-bin:
|
install-bin:
|
||||||
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
||||||
@@ -76,7 +79,7 @@ install-desktop:
|
|||||||
@update-desktop-database -q $(APPLICATIONS_DIR) 2>/dev/null || true
|
@update-desktop-database -q $(APPLICATIONS_DIR) 2>/dev/null || true
|
||||||
@echo "Desktop entry installed"
|
@echo "Desktop entry installed"
|
||||||
|
|
||||||
install: build install-bin install-shell install-completions install-systemd install-icon install-desktop
|
install: install-bin install-shell install-completions install-systemd install-icon install-desktop
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Installation complete!"
|
@echo "Installation complete!"
|
||||||
@echo ""
|
@echo ""
|
||||||
@@ -130,6 +133,7 @@ help:
|
|||||||
@echo " all (default) - Build the DMS binary"
|
@echo " all (default) - Build the DMS binary"
|
||||||
@echo " build - Same as 'all'"
|
@echo " build - Same as 'all'"
|
||||||
@echo " clean - Clean build artifacts"
|
@echo " clean - Clean build artifacts"
|
||||||
|
@echo " lint-qml - Run qmllint on shell entrypoints using the Quickshell tooling VFS"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Install:"
|
@echo "Install:"
|
||||||
@echo " install - Build and install everything (requires sudo)"
|
@echo " install - Build and install everything (requires sudo)"
|
||||||
|
|||||||
@@ -28,6 +28,12 @@ packages:
|
|||||||
outpkg: mocks_brightness
|
outpkg: mocks_brightness
|
||||||
interfaces:
|
interfaces:
|
||||||
DBusConn:
|
DBusConn:
|
||||||
|
github.com/AvengeMedia/DankMaterialShell/core/internal/geolocation:
|
||||||
|
config:
|
||||||
|
dir: "internal/mocks/geolocation"
|
||||||
|
outpkg: mocks_geolocation
|
||||||
|
interfaces:
|
||||||
|
Client:
|
||||||
github.com/AvengeMedia/DankMaterialShell/core/internal/server/network:
|
github.com/AvengeMedia/DankMaterialShell/core/internal/server/network:
|
||||||
config:
|
config:
|
||||||
dir: "internal/mocks/network"
|
dir: "internal/mocks/network"
|
||||||
|
|||||||
@@ -1,13 +1,26 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/golangci/golangci-lint
|
|
||||||
rev: v2.9.0
|
|
||||||
hooks:
|
|
||||||
- id: golangci-lint-fmt
|
|
||||||
require_serial: true
|
|
||||||
- id: golangci-lint-full
|
|
||||||
- id: golangci-lint-config-verify
|
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: golangci-lint-fmt
|
||||||
|
name: golangci-lint-fmt
|
||||||
|
entry: go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.3 fmt
|
||||||
|
language: system
|
||||||
|
require_serial: true
|
||||||
|
types: [go]
|
||||||
|
pass_filenames: false
|
||||||
|
- id: golangci-lint-full
|
||||||
|
name: golangci-lint-full
|
||||||
|
entry: go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.3 run --fix
|
||||||
|
language: system
|
||||||
|
require_serial: true
|
||||||
|
types: [go]
|
||||||
|
pass_filenames: false
|
||||||
|
- id: golangci-lint-config-verify
|
||||||
|
name: golangci-lint-config-verify
|
||||||
|
entry: go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.3 config verify
|
||||||
|
language: system
|
||||||
|
files: \.golangci\.(?:yml|yaml|toml|json)
|
||||||
|
pass_filenames: false
|
||||||
- id: go-test
|
- id: go-test
|
||||||
name: go test
|
name: go test
|
||||||
entry: go test ./...
|
entry: go test ./...
|
||||||
|
|||||||
@@ -63,19 +63,19 @@ endif
|
|||||||
|
|
||||||
build-all: build dankinstall
|
build-all: build dankinstall
|
||||||
|
|
||||||
install: build
|
install:
|
||||||
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
||||||
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME) $(INSTALL_DIR)/$(BINARY_NAME)
|
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME) $(INSTALL_DIR)/$(BINARY_NAME)
|
||||||
@echo "Installation complete"
|
@echo "Installation complete"
|
||||||
|
|
||||||
install-all: build-all
|
install-all:
|
||||||
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
@echo "Installing $(BINARY_NAME) to $(INSTALL_DIR)..."
|
||||||
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME) $(INSTALL_DIR)/$(BINARY_NAME)
|
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME) $(INSTALL_DIR)/$(BINARY_NAME)
|
||||||
@echo "Installing $(BINARY_NAME_INSTALL) to $(INSTALL_DIR)..."
|
@echo "Installing $(BINARY_NAME_INSTALL) to $(INSTALL_DIR)..."
|
||||||
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME_INSTALL) $(INSTALL_DIR)/$(BINARY_NAME_INSTALL)
|
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME_INSTALL) $(INSTALL_DIR)/$(BINARY_NAME_INSTALL)
|
||||||
@echo "Installation complete"
|
@echo "Installation complete"
|
||||||
|
|
||||||
install-dankinstall: dankinstall
|
install-dankinstall:
|
||||||
@echo "Installing $(BINARY_NAME_INSTALL) to $(INSTALL_DIR)..."
|
@echo "Installing $(BINARY_NAME_INSTALL) to $(INSTALL_DIR)..."
|
||||||
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME_INSTALL) $(INSTALL_DIR)/$(BINARY_NAME_INSTALL)
|
@install -D -m 755 $(BUILD_DIR)/$(BINARY_NAME_INSTALL) $(INSTALL_DIR)/$(BINARY_NAME_INSTALL)
|
||||||
@echo "Installation complete"
|
@echo "Installation complete"
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ The on-screen preview displays the selected format. JSON output includes hex, RG
|
|||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
Requires Go 1.24+
|
Requires Go 1.25+
|
||||||
|
|
||||||
**Development build:**
|
**Development build:**
|
||||||
|
|
||||||
|
|||||||
10
core/cmd/dms/assets/cli-policy.default.json
Normal file
10
core/cmd/dms/assets/cli-policy.default.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"policy_version": 1,
|
||||||
|
"blocked_commands": [
|
||||||
|
"greeter install",
|
||||||
|
"greeter enable",
|
||||||
|
"greeter uninstall",
|
||||||
|
"setup"
|
||||||
|
],
|
||||||
|
"message": "This command is disabled on immutable/image-based systems. Use your distro-native workflow for system-level changes."
|
||||||
|
}
|
||||||
76
core/cmd/dms/commands_auth.go
Normal file
76
core/cmd/dms/commands_auth.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
|
sharedpam "github.com/AvengeMedia/DankMaterialShell/core/internal/pam"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var authCmd = &cobra.Command{
|
||||||
|
Use: "auth",
|
||||||
|
Short: "Manage DMS authentication sync",
|
||||||
|
Long: "Manage shared PAM/authentication setup for DMS greeter and lock screen",
|
||||||
|
}
|
||||||
|
|
||||||
|
var authSyncCmd = &cobra.Command{
|
||||||
|
Use: "sync",
|
||||||
|
Short: "Sync DMS authentication configuration",
|
||||||
|
Long: "Apply shared PAM/authentication changes for the lock screen and greeter based on current DMS settings",
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
yes, _ := cmd.Flags().GetBool("yes")
|
||||||
|
term, _ := cmd.Flags().GetBool("terminal")
|
||||||
|
if term {
|
||||||
|
if err := syncAuthInTerminal(yes); err != nil {
|
||||||
|
log.Fatalf("Error launching auth sync in terminal: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := syncAuth(yes); err != nil {
|
||||||
|
log.Fatalf("Error syncing authentication: %v", err)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
authSyncCmd.Flags().BoolP("yes", "y", false, "Non-interactive mode: skip prompts")
|
||||||
|
authSyncCmd.Flags().BoolP("terminal", "t", false, "Run auth sync in a new terminal (for entering sudo password)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncAuth(nonInteractive bool) error {
|
||||||
|
if !nonInteractive {
|
||||||
|
fmt.Println("=== DMS Authentication Sync ===")
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
logFunc := func(msg string) {
|
||||||
|
fmt.Println(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := sharedpam.SyncAuthConfig(logFunc, "", sharedpam.SyncAuthOptions{}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !nonInteractive {
|
||||||
|
fmt.Println("\n=== Authentication Sync Complete ===")
|
||||||
|
fmt.Println("\nAuthentication changes have been applied.")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncAuthInTerminal(nonInteractive bool) error {
|
||||||
|
syncFlags := make([]string, 0, 1)
|
||||||
|
if nonInteractive {
|
||||||
|
syncFlags = append(syncFlags, "--yes")
|
||||||
|
}
|
||||||
|
|
||||||
|
shellSyncCmd := "dms auth sync"
|
||||||
|
if len(syncFlags) > 0 {
|
||||||
|
shellSyncCmd += " " + strings.Join(syncFlags, " ")
|
||||||
|
}
|
||||||
|
shellCmd := shellSyncCmd + `; echo; echo "Authentication sync finished. Closing in 3 seconds..."; sleep 3`
|
||||||
|
return runCommandInTerminal(shellCmd)
|
||||||
|
}
|
||||||
40
core/cmd/dms/commands_blur.go
Normal file
40
core/cmd/dms/commands_blur.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/blur"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var blurCmd = &cobra.Command{
|
||||||
|
Use: "blur",
|
||||||
|
Short: "Background blur utilities",
|
||||||
|
}
|
||||||
|
|
||||||
|
var blurCheckCmd = &cobra.Command{
|
||||||
|
Use: "check",
|
||||||
|
Short: "Check if the compositor supports background blur (ext-background-effect-v1)",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
Run: runBlurCheck,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
blurCmd.AddCommand(blurCheckCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runBlurCheck(cmd *cobra.Command, args []string) {
|
||||||
|
supported, err := blur.ProbeSupport()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch supported {
|
||||||
|
case true:
|
||||||
|
fmt.Println("supported")
|
||||||
|
default:
|
||||||
|
fmt.Println("unsupported")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -222,16 +222,19 @@ func init() {
|
|||||||
|
|
||||||
func runClipCopy(cmd *cobra.Command, args []string) {
|
func runClipCopy(cmd *cobra.Command, args []string) {
|
||||||
var data []byte
|
var data []byte
|
||||||
|
copyFromStdin := false
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case len(args) > 0:
|
case len(args) > 0:
|
||||||
data = []byte(args[0])
|
data = []byte(args[0])
|
||||||
default:
|
case clipCopyDownload || clipCopyType == "__multi__":
|
||||||
var err error
|
var err error
|
||||||
data, err = io.ReadAll(os.Stdin)
|
data, err = io.ReadAll(os.Stdin)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("read stdin: %v", err)
|
log.Fatalf("read stdin: %v", err)
|
||||||
}
|
}
|
||||||
|
default:
|
||||||
|
copyFromStdin = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if clipCopyDownload {
|
if clipCopyDownload {
|
||||||
@@ -257,6 +260,13 @@ func runClipCopy(cmd *cobra.Command, args []string) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if copyFromStdin {
|
||||||
|
if err := clipboard.CopyReader(os.Stdin, clipCopyType, clipCopyForeground, clipCopyPasteOnce); err != nil {
|
||||||
|
log.Fatalf("copy: %v", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if err := clipboard.CopyOpts(data, clipCopyType, clipCopyForeground, clipCopyPasteOnce); err != nil {
|
if err := clipboard.CopyOpts(data, clipCopyType, clipCopyForeground, clipCopyPasteOnce); err != nil {
|
||||||
log.Fatalf("copy: %v", err)
|
log.Fatalf("copy: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,6 +37,9 @@ Output format flags (mutually exclusive, default: --hex):
|
|||||||
--cmyk - CMYK values (C% M% Y% K%)
|
--cmyk - CMYK values (C% M% Y% K%)
|
||||||
--json - JSON with all formats
|
--json - JSON with all formats
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
--raw - Removes ANSI escape codes and background colors. Use this when piping to other commands
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
dms color pick # Pick color, output as hex
|
dms color pick # Pick color, output as hex
|
||||||
dms color pick --rgb # Output as RGB
|
dms color pick --rgb # Output as RGB
|
||||||
@@ -53,6 +56,7 @@ func init() {
|
|||||||
colorPickCmd.Flags().Bool("hsv", false, "Output as HSV (H S% V%)")
|
colorPickCmd.Flags().Bool("hsv", false, "Output as HSV (H S% V%)")
|
||||||
colorPickCmd.Flags().Bool("cmyk", false, "Output as CMYK (C% M% Y% K%)")
|
colorPickCmd.Flags().Bool("cmyk", false, "Output as CMYK (C% M% Y% K%)")
|
||||||
colorPickCmd.Flags().Bool("json", false, "Output all formats as JSON")
|
colorPickCmd.Flags().Bool("json", false, "Output all formats as JSON")
|
||||||
|
colorPickCmd.Flags().Bool("raw", false, "Removes ANSI escape codes and background colors. Use this when piping to other commands")
|
||||||
colorPickCmd.Flags().StringVarP(&colorOutputFmt, "output-format", "o", "", "Custom output format template")
|
colorPickCmd.Flags().StringVarP(&colorOutputFmt, "output-format", "o", "", "Custom output format template")
|
||||||
colorPickCmd.Flags().BoolVarP(&colorAutocopy, "autocopy", "a", false, "Copy result to clipboard")
|
colorPickCmd.Flags().BoolVarP(&colorAutocopy, "autocopy", "a", false, "Copy result to clipboard")
|
||||||
colorPickCmd.Flags().BoolVarP(&colorLowercase, "lowercase", "l", false, "Output hex in lowercase")
|
colorPickCmd.Flags().BoolVarP(&colorLowercase, "lowercase", "l", false, "Output hex in lowercase")
|
||||||
@@ -113,7 +117,15 @@ func runColorPick(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
if jsonOutput {
|
if jsonOutput {
|
||||||
fmt.Println(output)
|
fmt.Println(output)
|
||||||
} else if color.IsDark() {
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if raw, _ := cmd.Flags().GetBool("raw"); raw {
|
||||||
|
fmt.Printf("%s\n", output)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if color.IsDark() {
|
||||||
fmt.Printf("\033[48;2;%d;%d;%dm\033[97m %s \033[0m\n", color.R, color.G, color.B, output)
|
fmt.Printf("\033[48;2;%d;%d;%dm\033[97m %s \033[0m\n", color.R, color.G, color.B, output)
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("\033[48;2;%d;%d;%dm\033[30m %s \033[0m\n", color.R, color.G, color.B, output)
|
fmt.Printf("\033[48;2;%d;%d;%dm\033[30m %s \033[0m\n", color.R, color.G, color.B, output)
|
||||||
|
|||||||
@@ -64,9 +64,8 @@ var killCmd = &cobra.Command{
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ipcCmd = &cobra.Command{
|
var ipcCmd = &cobra.Command{
|
||||||
Use: "ipc [target] [function] [args...]",
|
Use: "ipc [target] [function] [args...]",
|
||||||
Short: "Send IPC commands to running DMS shell",
|
Short: "Send IPC commands to running DMS shell",
|
||||||
PreRunE: findConfig,
|
|
||||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
_ = findConfig(cmd, args)
|
_ = findConfig(cmd, args)
|
||||||
return getShellIPCCompletions(args, toComplete), cobra.ShellCompDirectiveNoFileComp
|
return getShellIPCCompletions(args, toComplete), cobra.ShellCompDirectiveNoFileComp
|
||||||
@@ -525,5 +524,7 @@ func getCommonCommands() []*cobra.Command {
|
|||||||
doctorCmd,
|
doctorCmd,
|
||||||
configCmd,
|
configCmd,
|
||||||
dlCmd,
|
dlCmd,
|
||||||
|
randrCmd,
|
||||||
|
blurCmd,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -649,58 +649,104 @@ func checkI2CAvailability() checkResult {
|
|||||||
return checkResult{catOptionalFeatures, "I2C/DDC", statusOK, fmt.Sprintf("%d monitor(s) detected", len(devices)), "External monitor brightness control", doctorDocsURL + "#optional-features"}
|
return checkResult{catOptionalFeatures, "I2C/DDC", statusOK, fmt.Sprintf("%d monitor(s) detected", len(devices)), "External monitor brightness control", doctorDocsURL + "#optional-features"}
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkKImageFormats() checkResult {
|
func checkImageFormatPlugins() []checkResult {
|
||||||
url := doctorDocsURL + "#optional-features"
|
url := doctorDocsURL + "#optional-features"
|
||||||
desc := "Extra image format support (AVIF, HEIF, JXL)"
|
|
||||||
|
|
||||||
pluginDir := findQtPluginDir()
|
pluginDirs := findQtPluginDirs()
|
||||||
if pluginDir == "" {
|
if len(pluginDirs) == 0 {
|
||||||
return checkResult{catOptionalFeatures, "kimageformats", statusInfo, "Cannot detect (qtpaths not found)", desc, url}
|
return []checkResult{
|
||||||
}
|
{catOptionalFeatures, "qt6-imageformats", statusInfo, "Cannot detect (plugin dir not found)", "WebP, TIFF, JP2 support", url},
|
||||||
|
{catOptionalFeatures, "kimageformats", statusInfo, "Cannot detect (plugin dir not found)", "AVIF, HEIF, JXL support", url},
|
||||||
imageFormatsDir := filepath.Join(pluginDir, "imageformats")
|
|
||||||
keyPlugins := []struct{ file, format string }{
|
|
||||||
{"kimg_avif.so", "AVIF"},
|
|
||||||
{"kimg_heif.so", "HEIF"},
|
|
||||||
{"kimg_jxl.so", "JXL"},
|
|
||||||
{"kimg_exr.so", "EXR"},
|
|
||||||
}
|
|
||||||
|
|
||||||
var found []string
|
|
||||||
for _, p := range keyPlugins {
|
|
||||||
if _, err := os.Stat(filepath.Join(imageFormatsDir, p.file)); err == nil {
|
|
||||||
found = append(found, p.format)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(found) == 0 {
|
type pluginCheck struct {
|
||||||
return checkResult{catOptionalFeatures, "kimageformats", statusWarn, "Not installed", desc, url}
|
name string
|
||||||
|
desc string
|
||||||
|
plugins []struct{ file, format string }
|
||||||
}
|
}
|
||||||
|
|
||||||
details := ""
|
checks := []pluginCheck{
|
||||||
if doctorVerbose {
|
{
|
||||||
details = fmt.Sprintf("Formats: %s (%s)", strings.Join(found, ", "), imageFormatsDir)
|
name: "qt6-imageformats",
|
||||||
|
desc: "WebP, TIFF, GIF, JP2 support",
|
||||||
|
plugins: []struct{ file, format string }{
|
||||||
|
{"libqwebp.so", "WebP"},
|
||||||
|
{"libqtiff.so", "TIFF"},
|
||||||
|
{"libqgif.so", "GIF"},
|
||||||
|
{"libqjp2.so", "JP2"},
|
||||||
|
{"libqicns.so", "ICNS"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "kimageformats",
|
||||||
|
desc: "AVIF, HEIF, JXL support",
|
||||||
|
plugins: []struct{ file, format string }{
|
||||||
|
{"kimg_avif.so", "AVIF"},
|
||||||
|
{"kimg_heif.so", "HEIF"},
|
||||||
|
{"kimg_jxl.so", "JXL"},
|
||||||
|
{"kimg_exr.so", "EXR"},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return checkResult{catOptionalFeatures, "kimageformats", statusOK, fmt.Sprintf("Installed (%d formats)", len(found)), details, url}
|
var results []checkResult
|
||||||
|
for _, c := range checks {
|
||||||
|
var found []string
|
||||||
|
var foundDirs []string
|
||||||
|
for _, pluginDir := range pluginDirs {
|
||||||
|
imageFormatsDir := filepath.Join(pluginDir, "imageformats")
|
||||||
|
for _, p := range c.plugins {
|
||||||
|
if _, err := os.Stat(filepath.Join(imageFormatsDir, p.file)); err == nil {
|
||||||
|
if !slices.Contains(found, p.format) {
|
||||||
|
found = append(found, p.format)
|
||||||
|
}
|
||||||
|
if !slices.Contains(foundDirs, imageFormatsDir) {
|
||||||
|
foundDirs = append(foundDirs, imageFormatsDir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var result checkResult
|
||||||
|
switch {
|
||||||
|
case len(found) == 0:
|
||||||
|
result = checkResult{catOptionalFeatures, c.name, statusWarn, "Not installed", c.desc, url}
|
||||||
|
default:
|
||||||
|
details := ""
|
||||||
|
if doctorVerbose {
|
||||||
|
details = fmt.Sprintf("Formats: %s (%s)", strings.Join(found, ", "), strings.Join(foundDirs, ":"))
|
||||||
|
}
|
||||||
|
result = checkResult{catOptionalFeatures, c.name, statusOK, fmt.Sprintf("Installed (%d formats)", len(found)), details, url}
|
||||||
|
}
|
||||||
|
results = append(results, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
func findQtPluginDir() string {
|
func findQtPluginDirs() []string {
|
||||||
// Check QT_PLUGIN_PATH env var first (used by NixOS and custom setups)
|
var dirs []string
|
||||||
|
|
||||||
|
addDir := func(dir string) {
|
||||||
|
if dir != "" {
|
||||||
|
if _, err := os.Stat(filepath.Join(dir, "imageformats")); err == nil {
|
||||||
|
dirs = append(dirs, dir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all paths in QT_PLUGIN_PATH env var (used by NixOS and custom setups)
|
||||||
if envPath := os.Getenv("QT_PLUGIN_PATH"); envPath != "" {
|
if envPath := os.Getenv("QT_PLUGIN_PATH"); envPath != "" {
|
||||||
for dir := range strings.SplitSeq(envPath, ":") {
|
for dir := range strings.SplitSeq(envPath, ":") {
|
||||||
if _, err := os.Stat(filepath.Join(dir, "imageformats")); err == nil {
|
addDir(dir)
|
||||||
return dir
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try qtpaths
|
// Try qtpaths
|
||||||
for _, cmd := range []string{"qtpaths6", "qtpaths"} {
|
for _, cmd := range []string{"qtpaths6", "qtpaths"} {
|
||||||
if output, err := exec.Command(cmd, "-query", "QT_INSTALL_PLUGINS").Output(); err == nil {
|
if output, err := exec.Command(cmd, "-query", "QT_INSTALL_PLUGINS").Output(); err == nil {
|
||||||
if dir := strings.TrimSpace(string(output)); dir != "" {
|
addDir(strings.TrimSpace(string(output)))
|
||||||
return dir
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -711,12 +757,10 @@ func findQtPluginDir() string {
|
|||||||
"/usr/lib/x86_64-linux-gnu/qt6/plugins",
|
"/usr/lib/x86_64-linux-gnu/qt6/plugins",
|
||||||
"/usr/lib/aarch64-linux-gnu/qt6/plugins",
|
"/usr/lib/aarch64-linux-gnu/qt6/plugins",
|
||||||
} {
|
} {
|
||||||
if _, err := os.Stat(filepath.Join(dir, "imageformats")); err == nil {
|
addDir(dir)
|
||||||
return dir
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ""
|
return dirs
|
||||||
}
|
}
|
||||||
|
|
||||||
func detectNetworkBackend(stackResult *network.DetectResult) string {
|
func detectNetworkBackend(stackResult *network.DetectResult) string {
|
||||||
@@ -773,7 +817,7 @@ func checkOptionalDependencies() []checkResult {
|
|||||||
results = append(results, checkResult{catOptionalFeatures, "cups-pk-helper", cupsPkStatus, cupsPkMsg, "Printer management", optionalFeaturesURL})
|
results = append(results, checkResult{catOptionalFeatures, "cups-pk-helper", cupsPkStatus, cupsPkMsg, "Printer management", optionalFeaturesURL})
|
||||||
|
|
||||||
results = append(results, checkI2CAvailability())
|
results = append(results, checkI2CAvailability())
|
||||||
results = append(results, checkKImageFormats())
|
results = append(results, checkImageFormatPlugins()...)
|
||||||
|
|
||||||
terminals := []string{"ghostty", "kitty", "alacritty", "foot", "wezterm"}
|
terminals := []string{"ghostty", "kitty", "alacritty", "foot", "wezterm"}
|
||||||
if idx := slices.IndexFunc(terminals, utils.CommandExists); idx >= 0 {
|
if idx := slices.IndexFunc(terminals, utils.CommandExists); idx >= 0 {
|
||||||
@@ -1035,14 +1079,14 @@ func formatResultsPlain(results []checkResult) string {
|
|||||||
if currentCategory != -1 {
|
if currentCategory != -1 {
|
||||||
sb.WriteString("\n")
|
sb.WriteString("\n")
|
||||||
}
|
}
|
||||||
sb.WriteString(fmt.Sprintf("**%s**\n", r.category.String()))
|
fmt.Fprintf(&sb, "**%s**\n", r.category.String())
|
||||||
currentCategory = r.category
|
currentCategory = r.category
|
||||||
}
|
}
|
||||||
|
|
||||||
sb.WriteString(fmt.Sprintf("- [%s] %s: %s\n", r.status, r.name, r.message))
|
fmt.Fprintf(&sb, "- [%s] %s: %s\n", r.status, r.name, r.message)
|
||||||
|
|
||||||
if doctorVerbose && r.details != "" {
|
if doctorVerbose && r.details != "" {
|
||||||
sb.WriteString(fmt.Sprintf(" - %s\n", r.details))
|
fmt.Fprintf(&sb, " - %s\n", r.details)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1052,8 +1096,8 @@ func formatResultsPlain(results []checkResult) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sb.WriteString("\n---\n")
|
sb.WriteString("\n---\n")
|
||||||
sb.WriteString(fmt.Sprintf("**Summary:** %d error(s), %d warning(s), %d ok\n",
|
fmt.Fprintf(&sb, "**Summary:** %d error(s), %d warning(s), %d ok\n",
|
||||||
ds.ErrorCount(), ds.WarningCount(), ds.OKCount()))
|
ds.ErrorCount(), ds.WarningCount(), ds.OKCount())
|
||||||
|
|
||||||
return sb.String()
|
return sb.String()
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
87
core/cmd/dms/commands_greeter_test.go
Normal file
87
core/cmd/dms/commands_greeter_test.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
sharedpam "github.com/AvengeMedia/DankMaterialShell/core/internal/pam"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSyncGreeterConfigsAndAuthDelegatesSharedAuth(t *testing.T) {
|
||||||
|
origGreeterConfigSyncFn := greeterConfigSyncFn
|
||||||
|
origSharedAuthSyncFn := sharedAuthSyncFn
|
||||||
|
t.Cleanup(func() {
|
||||||
|
greeterConfigSyncFn = origGreeterConfigSyncFn
|
||||||
|
sharedAuthSyncFn = origSharedAuthSyncFn
|
||||||
|
})
|
||||||
|
|
||||||
|
var calls []string
|
||||||
|
greeterConfigSyncFn = func(dmsPath, compositor string, logFunc func(string), sudoPassword string) error {
|
||||||
|
if dmsPath != "/tmp/dms" {
|
||||||
|
t.Fatalf("unexpected dmsPath %q", dmsPath)
|
||||||
|
}
|
||||||
|
if compositor != "niri" {
|
||||||
|
t.Fatalf("unexpected compositor %q", compositor)
|
||||||
|
}
|
||||||
|
if sudoPassword != "" {
|
||||||
|
t.Fatalf("expected empty sudoPassword, got %q", sudoPassword)
|
||||||
|
}
|
||||||
|
calls = append(calls, "configs")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var gotOptions sharedpam.SyncAuthOptions
|
||||||
|
sharedAuthSyncFn = func(logFunc func(string), sudoPassword string, options sharedpam.SyncAuthOptions) error {
|
||||||
|
if sudoPassword != "" {
|
||||||
|
t.Fatalf("expected empty sudoPassword, got %q", sudoPassword)
|
||||||
|
}
|
||||||
|
gotOptions = options
|
||||||
|
calls = append(calls, "auth")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := syncGreeterConfigsAndAuth("/tmp/dms", "niri", func(string) {}, sharedpam.SyncAuthOptions{
|
||||||
|
ForceGreeterAuth: true,
|
||||||
|
}, func() {
|
||||||
|
calls = append(calls, "before-auth")
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncGreeterConfigsAndAuth returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
wantCalls := []string{"configs", "before-auth", "auth"}
|
||||||
|
if !reflect.DeepEqual(calls, wantCalls) {
|
||||||
|
t.Fatalf("call order = %v, want %v", calls, wantCalls)
|
||||||
|
}
|
||||||
|
if !gotOptions.ForceGreeterAuth {
|
||||||
|
t.Fatalf("expected ForceGreeterAuth to be true, got %+v", gotOptions)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSyncGreeterConfigsAndAuthStopsOnConfigError(t *testing.T) {
|
||||||
|
origGreeterConfigSyncFn := greeterConfigSyncFn
|
||||||
|
origSharedAuthSyncFn := sharedAuthSyncFn
|
||||||
|
t.Cleanup(func() {
|
||||||
|
greeterConfigSyncFn = origGreeterConfigSyncFn
|
||||||
|
sharedAuthSyncFn = origSharedAuthSyncFn
|
||||||
|
})
|
||||||
|
|
||||||
|
greeterConfigSyncFn = func(string, string, func(string), string) error {
|
||||||
|
return errors.New("config sync failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
authCalled := false
|
||||||
|
sharedAuthSyncFn = func(func(string), string, sharedpam.SyncAuthOptions) error {
|
||||||
|
authCalled = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := syncGreeterConfigsAndAuth("/tmp/dms", "niri", func(string) {}, sharedpam.SyncAuthOptions{}, nil)
|
||||||
|
if err == nil || err.Error() != "config sync failed" {
|
||||||
|
t.Fatalf("expected config sync error, got %v", err)
|
||||||
|
}
|
||||||
|
if authCalled {
|
||||||
|
t.Fatal("expected auth sync not to run after config sync failure")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,7 +3,9 @@ package main
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
@@ -55,10 +57,11 @@ func init() {
|
|||||||
cmd.Flags().Bool("sync-mode-with-portal", false, "Sync color scheme with GNOME portal")
|
cmd.Flags().Bool("sync-mode-with-portal", false, "Sync color scheme with GNOME portal")
|
||||||
cmd.Flags().Bool("terminals-always-dark", false, "Force terminal themes to dark variant")
|
cmd.Flags().Bool("terminals-always-dark", false, "Force terminal themes to dark variant")
|
||||||
cmd.Flags().String("skip-templates", "", "Comma-separated list of templates to skip")
|
cmd.Flags().String("skip-templates", "", "Comma-separated list of templates to skip")
|
||||||
|
cmd.Flags().Float64("contrast", 0, "Contrast value from -1 to 1 (0 = standard)")
|
||||||
}
|
}
|
||||||
|
|
||||||
matugenQueueCmd.Flags().Bool("wait", true, "Wait for completion")
|
matugenQueueCmd.Flags().Bool("wait", true, "Wait for completion")
|
||||||
matugenQueueCmd.Flags().Duration("timeout", 30*time.Second, "Timeout for waiting")
|
matugenQueueCmd.Flags().Duration("timeout", 90*time.Second, "Timeout for waiting")
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
|
func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
|
||||||
@@ -75,6 +78,7 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
|
|||||||
syncModeWithPortal, _ := cmd.Flags().GetBool("sync-mode-with-portal")
|
syncModeWithPortal, _ := cmd.Flags().GetBool("sync-mode-with-portal")
|
||||||
terminalsAlwaysDark, _ := cmd.Flags().GetBool("terminals-always-dark")
|
terminalsAlwaysDark, _ := cmd.Flags().GetBool("terminals-always-dark")
|
||||||
skipTemplates, _ := cmd.Flags().GetString("skip-templates")
|
skipTemplates, _ := cmd.Flags().GetString("skip-templates")
|
||||||
|
contrast, _ := cmd.Flags().GetFloat64("contrast")
|
||||||
|
|
||||||
return matugen.Options{
|
return matugen.Options{
|
||||||
StateDir: stateDir,
|
StateDir: stateDir,
|
||||||
@@ -85,6 +89,7 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
|
|||||||
Mode: matugen.ColorMode(mode),
|
Mode: matugen.ColorMode(mode),
|
||||||
IconTheme: iconTheme,
|
IconTheme: iconTheme,
|
||||||
MatugenType: matugenType,
|
MatugenType: matugenType,
|
||||||
|
Contrast: contrast,
|
||||||
RunUserTemplates: runUserTemplates,
|
RunUserTemplates: runUserTemplates,
|
||||||
StockColors: stockColors,
|
StockColors: stockColors,
|
||||||
SyncModeWithPortal: syncModeWithPortal,
|
SyncModeWithPortal: syncModeWithPortal,
|
||||||
@@ -95,7 +100,11 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
|
|||||||
|
|
||||||
func runMatugenGenerate(cmd *cobra.Command, args []string) {
|
func runMatugenGenerate(cmd *cobra.Command, args []string) {
|
||||||
opts := buildMatugenOptions(cmd)
|
opts := buildMatugenOptions(cmd)
|
||||||
if err := matugen.Run(opts); err != nil {
|
err := matugen.Run(opts)
|
||||||
|
switch {
|
||||||
|
case errors.Is(err, matugen.ErrNoChanges):
|
||||||
|
os.Exit(2)
|
||||||
|
case err != nil:
|
||||||
log.Fatalf("Theme generation failed: %v", err)
|
log.Fatalf("Theme generation failed: %v", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -122,6 +131,7 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
|
|||||||
"syncModeWithPortal": opts.SyncModeWithPortal,
|
"syncModeWithPortal": opts.SyncModeWithPortal,
|
||||||
"terminalsAlwaysDark": opts.TerminalsAlwaysDark,
|
"terminalsAlwaysDark": opts.TerminalsAlwaysDark,
|
||||||
"skipTemplates": opts.SkipTemplates,
|
"skipTemplates": opts.SkipTemplates,
|
||||||
|
"contrast": opts.Contrast,
|
||||||
"wait": wait,
|
"wait": wait,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -129,7 +139,11 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
|
|||||||
if !wait {
|
if !wait {
|
||||||
if err := sendServerRequestFireAndForget(request); err != nil {
|
if err := sendServerRequestFireAndForget(request); err != nil {
|
||||||
log.Info("Server unavailable, running synchronously")
|
log.Info("Server unavailable, running synchronously")
|
||||||
if err := matugen.Run(opts); err != nil {
|
err := matugen.Run(opts)
|
||||||
|
switch {
|
||||||
|
case errors.Is(err, matugen.ErrNoChanges):
|
||||||
|
os.Exit(2)
|
||||||
|
case err != nil:
|
||||||
log.Fatalf("Theme generation failed: %v", err)
|
log.Fatalf("Theme generation failed: %v", err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
@@ -146,11 +160,15 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
|
|||||||
resp, ok := tryServerRequest(request)
|
resp, ok := tryServerRequest(request)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Info("Server unavailable, running synchronously")
|
log.Info("Server unavailable, running synchronously")
|
||||||
if err := matugen.Run(opts); err != nil {
|
err := matugen.Run(opts)
|
||||||
|
switch {
|
||||||
|
case errors.Is(err, matugen.ErrNoChanges):
|
||||||
|
resultCh <- matugen.ErrNoChanges
|
||||||
|
case err != nil:
|
||||||
resultCh <- err
|
resultCh <- err
|
||||||
return
|
default:
|
||||||
|
resultCh <- nil
|
||||||
}
|
}
|
||||||
resultCh <- nil
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if resp.Error != "" {
|
if resp.Error != "" {
|
||||||
@@ -162,7 +180,10 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
select {
|
select {
|
||||||
case err := <-resultCh:
|
case err := <-resultCh:
|
||||||
if err != nil {
|
switch {
|
||||||
|
case errors.Is(err, matugen.ErrNoChanges):
|
||||||
|
os.Exit(2)
|
||||||
|
case err != nil:
|
||||||
log.Fatalf("Theme generation failed: %v", err)
|
log.Fatalf("Theme generation failed: %v", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Theme generation completed")
|
fmt.Println("Theme generation completed")
|
||||||
|
|||||||
58
core/cmd/dms/commands_randr.go
Normal file
58
core/cmd/dms/commands_randr.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var randrCmd = &cobra.Command{
|
||||||
|
Use: "randr",
|
||||||
|
Short: "Query output display information",
|
||||||
|
Long: "Query Wayland compositor for output names, scales, resolutions and refresh rates via zwlr-output-management",
|
||||||
|
Run: runRandr,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
randrCmd.Flags().Bool("json", false, "Output in JSON format")
|
||||||
|
}
|
||||||
|
|
||||||
|
type randrJSON struct {
|
||||||
|
Outputs []randrOutput `json:"outputs"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func runRandr(cmd *cobra.Command, args []string) {
|
||||||
|
outputs, err := queryRandr()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("%v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonFlag, _ := cmd.Flags().GetBool("json")
|
||||||
|
|
||||||
|
if jsonFlag {
|
||||||
|
data, err := json.Marshal(randrJSON{Outputs: outputs})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to marshal JSON: %v", err)
|
||||||
|
}
|
||||||
|
fmt.Println(string(data))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, out := range outputs {
|
||||||
|
if i > 0 {
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
status := "enabled"
|
||||||
|
if !out.Enabled {
|
||||||
|
status = "disabled"
|
||||||
|
}
|
||||||
|
fmt.Printf("%s (%s)\n", out.Name, status)
|
||||||
|
fmt.Printf(" Scale: %.4g\n", out.Scale)
|
||||||
|
fmt.Printf(" Resolution: %dx%d\n", out.Width, out.Height)
|
||||||
|
if out.Refresh > 0 {
|
||||||
|
fmt.Printf(" Refresh: %.2f Hz\n", float64(out.Refresh)/1000.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,16 +13,18 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ssOutputName string
|
ssOutputName string
|
||||||
ssIncludeCursor bool
|
ssCursor string
|
||||||
ssFormat string
|
ssFormat string
|
||||||
ssQuality int
|
ssQuality int
|
||||||
ssOutputDir string
|
ssOutputDir string
|
||||||
ssFilename string
|
ssFilename string
|
||||||
ssNoClipboard bool
|
ssNoClipboard bool
|
||||||
ssNoFile bool
|
ssNoFile bool
|
||||||
ssNoNotify bool
|
ssNoNotify bool
|
||||||
ssStdout bool
|
ssNoConfirm bool
|
||||||
|
ssReset bool
|
||||||
|
ssStdout bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var screenshotCmd = &cobra.Command{
|
var screenshotCmd = &cobra.Command{
|
||||||
@@ -50,9 +52,11 @@ Examples:
|
|||||||
dms screenshot output -o DP-1 # Specific output
|
dms screenshot output -o DP-1 # Specific output
|
||||||
dms screenshot window # Focused window (Hyprland)
|
dms screenshot window # Focused window (Hyprland)
|
||||||
dms screenshot last # Last region (pre-selected)
|
dms screenshot last # Last region (pre-selected)
|
||||||
|
dms screenshot --reset # Reset last region pre-selection
|
||||||
dms screenshot --no-clipboard # Save file only
|
dms screenshot --no-clipboard # Save file only
|
||||||
dms screenshot --no-file # Clipboard only
|
dms screenshot --no-file # Clipboard only
|
||||||
dms screenshot --cursor # Include cursor
|
dms screenshot --no-confirm # Region capture on mouse release
|
||||||
|
dms screenshot --cursor=on # Include cursor
|
||||||
dms screenshot -f jpg -q 85 # JPEG with quality 85`,
|
dms screenshot -f jpg -q 85 # JPEG with quality 85`,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,7 +115,7 @@ var notifyActionCmd = &cobra.Command{
|
|||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
screenshotCmd.PersistentFlags().StringVarP(&ssOutputName, "output", "o", "", "Output name for 'output' mode")
|
screenshotCmd.PersistentFlags().StringVarP(&ssOutputName, "output", "o", "", "Output name for 'output' mode")
|
||||||
screenshotCmd.PersistentFlags().BoolVar(&ssIncludeCursor, "cursor", false, "Include cursor in screenshot")
|
screenshotCmd.PersistentFlags().StringVar(&ssCursor, "cursor", "off", "Include cursor in screenshot (on/off)")
|
||||||
screenshotCmd.PersistentFlags().StringVarP(&ssFormat, "format", "f", "png", "Output format (png, jpg, ppm)")
|
screenshotCmd.PersistentFlags().StringVarP(&ssFormat, "format", "f", "png", "Output format (png, jpg, ppm)")
|
||||||
screenshotCmd.PersistentFlags().IntVarP(&ssQuality, "quality", "q", 90, "JPEG quality (1-100)")
|
screenshotCmd.PersistentFlags().IntVarP(&ssQuality, "quality", "q", 90, "JPEG quality (1-100)")
|
||||||
screenshotCmd.PersistentFlags().StringVarP(&ssOutputDir, "dir", "d", "", "Output directory")
|
screenshotCmd.PersistentFlags().StringVarP(&ssOutputDir, "dir", "d", "", "Output directory")
|
||||||
@@ -119,6 +123,8 @@ func init() {
|
|||||||
screenshotCmd.PersistentFlags().BoolVar(&ssNoClipboard, "no-clipboard", false, "Don't copy to clipboard")
|
screenshotCmd.PersistentFlags().BoolVar(&ssNoClipboard, "no-clipboard", false, "Don't copy to clipboard")
|
||||||
screenshotCmd.PersistentFlags().BoolVar(&ssNoFile, "no-file", false, "Don't save to file")
|
screenshotCmd.PersistentFlags().BoolVar(&ssNoFile, "no-file", false, "Don't save to file")
|
||||||
screenshotCmd.PersistentFlags().BoolVar(&ssNoNotify, "no-notify", false, "Don't show notification")
|
screenshotCmd.PersistentFlags().BoolVar(&ssNoNotify, "no-notify", false, "Don't show notification")
|
||||||
|
screenshotCmd.PersistentFlags().BoolVar(&ssNoConfirm, "no-confirm", false, "Region mode: capture on mouse release without Enter/Space confirmation")
|
||||||
|
screenshotCmd.PersistentFlags().BoolVar(&ssReset, "reset", false, "Reset saved last-region preselection before capturing")
|
||||||
screenshotCmd.PersistentFlags().BoolVar(&ssStdout, "stdout", false, "Output image to stdout (for piping to swappy, etc.)")
|
screenshotCmd.PersistentFlags().BoolVar(&ssStdout, "stdout", false, "Output image to stdout (for piping to swappy, etc.)")
|
||||||
|
|
||||||
screenshotCmd.AddCommand(ssRegionCmd)
|
screenshotCmd.AddCommand(ssRegionCmd)
|
||||||
@@ -136,10 +142,14 @@ func getScreenshotConfig(mode screenshot.Mode) screenshot.Config {
|
|||||||
config := screenshot.DefaultConfig()
|
config := screenshot.DefaultConfig()
|
||||||
config.Mode = mode
|
config.Mode = mode
|
||||||
config.OutputName = ssOutputName
|
config.OutputName = ssOutputName
|
||||||
config.IncludeCursor = ssIncludeCursor
|
if strings.EqualFold(ssCursor, "on") {
|
||||||
|
config.Cursor = screenshot.CursorOn
|
||||||
|
}
|
||||||
config.Clipboard = !ssNoClipboard
|
config.Clipboard = !ssNoClipboard
|
||||||
config.SaveFile = !ssNoFile
|
config.SaveFile = !ssNoFile
|
||||||
config.Notify = !ssNoNotify
|
config.Notify = !ssNoNotify
|
||||||
|
config.NoConfirm = ssNoConfirm
|
||||||
|
config.Reset = ssReset
|
||||||
config.Stdout = ssStdout
|
config.Stdout = ssStdout
|
||||||
|
|
||||||
if ssOutputDir != "" {
|
if ssOutputDir != "" {
|
||||||
|
|||||||
@@ -16,9 +16,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var setupCmd = &cobra.Command{
|
var setupCmd = &cobra.Command{
|
||||||
Use: "setup",
|
Use: "setup",
|
||||||
Short: "Deploy DMS configurations",
|
Short: "Deploy DMS configurations",
|
||||||
Long: "Deploy compositor and terminal configurations with interactive prompts",
|
Long: "Deploy compositor and terminal configurations with interactive prompts",
|
||||||
|
PersistentPreRunE: requireMutableSystemCommand,
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
if err := runSetup(); err != nil {
|
if err := runSetup(); err != nil {
|
||||||
log.Fatalf("Error during setup: %v", err)
|
log.Fatalf("Error during setup: %v", err)
|
||||||
|
|||||||
271
core/cmd/dms/immutable_policy.go
Normal file
271
core/cmd/dms/immutable_policy.go
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
_ "embed"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
cliPolicyPackagedPath = "/usr/share/dms/cli-policy.json"
|
||||||
|
cliPolicyAdminPath = "/etc/dms/cli-policy.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
immutablePolicyOnce sync.Once
|
||||||
|
immutablePolicy immutableCommandPolicy
|
||||||
|
immutablePolicyErr error
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed assets/cli-policy.default.json
|
||||||
|
var defaultCLIPolicyJSON []byte
|
||||||
|
|
||||||
|
type immutableCommandPolicy struct {
|
||||||
|
ImmutableSystem bool
|
||||||
|
ImmutableReason string
|
||||||
|
BlockedCommands []string
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
type cliPolicyFile struct {
|
||||||
|
PolicyVersion int `json:"policy_version"`
|
||||||
|
ImmutableSystem *bool `json:"immutable_system"`
|
||||||
|
BlockedCommands *[]string `json:"blocked_commands"`
|
||||||
|
Message *string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeCommandSpec(raw string) string {
|
||||||
|
normalized := strings.ToLower(strings.TrimSpace(raw))
|
||||||
|
normalized = strings.TrimPrefix(normalized, "dms ")
|
||||||
|
return strings.Join(strings.Fields(normalized), " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeBlockedCommands(raw []string) []string {
|
||||||
|
normalized := make([]string, 0, len(raw))
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, cmd := range raw {
|
||||||
|
spec := normalizeCommandSpec(cmd)
|
||||||
|
if spec == "" || seen[spec] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[spec] = true
|
||||||
|
normalized = append(normalized, spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
func commandBlockedByPolicy(commandPath string, blocked []string) bool {
|
||||||
|
normalizedPath := normalizeCommandSpec(commandPath)
|
||||||
|
if normalizedPath == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range blocked {
|
||||||
|
spec := normalizeCommandSpec(entry)
|
||||||
|
if spec == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if normalizedPath == spec || strings.HasPrefix(normalizedPath, spec+" ") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadPolicyFile(path string) (*cliPolicyFile, error) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("failed to read %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var policy cliPolicyFile
|
||||||
|
if err := json.Unmarshal(data, &policy); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &policy, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergePolicyFile(base *immutableCommandPolicy, path string) error {
|
||||||
|
policyFile, err := loadPolicyFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if policyFile == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if policyFile.ImmutableSystem != nil {
|
||||||
|
base.ImmutableSystem = *policyFile.ImmutableSystem
|
||||||
|
}
|
||||||
|
if policyFile.BlockedCommands != nil {
|
||||||
|
base.BlockedCommands = normalizeBlockedCommands(*policyFile.BlockedCommands)
|
||||||
|
}
|
||||||
|
if policyFile.Message != nil {
|
||||||
|
msg := strings.TrimSpace(*policyFile.Message)
|
||||||
|
if msg != "" {
|
||||||
|
base.Message = msg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func readOSReleaseMap(path string) map[string]string {
|
||||||
|
values := make(map[string]string)
|
||||||
|
|
||||||
|
file, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
parts := strings.SplitN(line, "=", 2)
|
||||||
|
if len(parts) != 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
key := strings.ToUpper(strings.TrimSpace(parts[0]))
|
||||||
|
value := strings.Trim(strings.TrimSpace(parts[1]), "\"")
|
||||||
|
values[key] = strings.ToLower(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasAnyToken(text string, tokens ...string) bool {
|
||||||
|
if text == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, token := range tokens {
|
||||||
|
if strings.Contains(text, token) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectImmutableSystem() (bool, string) {
|
||||||
|
if _, err := os.Stat("/run/ostree-booted"); err == nil {
|
||||||
|
return true, "/run/ostree-booted is present"
|
||||||
|
}
|
||||||
|
|
||||||
|
osRelease := readOSReleaseMap("/etc/os-release")
|
||||||
|
if len(osRelease) == 0 {
|
||||||
|
return false, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
id := osRelease["ID"]
|
||||||
|
idLike := osRelease["ID_LIKE"]
|
||||||
|
variantID := osRelease["VARIANT_ID"]
|
||||||
|
name := osRelease["NAME"]
|
||||||
|
prettyName := osRelease["PRETTY_NAME"]
|
||||||
|
|
||||||
|
immutableIDs := map[string]bool{
|
||||||
|
"bluefin": true,
|
||||||
|
"bazzite": true,
|
||||||
|
"silverblue": true,
|
||||||
|
"kinoite": true,
|
||||||
|
"sericea": true,
|
||||||
|
"onyx": true,
|
||||||
|
"aurora": true,
|
||||||
|
"fedora-iot": true,
|
||||||
|
"fedora-coreos": true,
|
||||||
|
}
|
||||||
|
if immutableIDs[id] {
|
||||||
|
return true, "os-release ID=" + id
|
||||||
|
}
|
||||||
|
|
||||||
|
markers := []string{"silverblue", "kinoite", "sericea", "onyx", "bazzite", "bluefin", "aurora", "ostree", "atomic"}
|
||||||
|
if hasAnyToken(variantID, markers...) {
|
||||||
|
return true, "os-release VARIANT_ID=" + variantID
|
||||||
|
}
|
||||||
|
if hasAnyToken(idLike, "ostree", "rpm-ostree") {
|
||||||
|
return true, "os-release ID_LIKE=" + idLike
|
||||||
|
}
|
||||||
|
if hasAnyToken(name, markers...) || hasAnyToken(prettyName, markers...) {
|
||||||
|
return true, "os-release identifies an atomic/ostree variant"
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func getImmutablePolicy() (*immutableCommandPolicy, error) {
|
||||||
|
immutablePolicyOnce.Do(func() {
|
||||||
|
detectedImmutable, reason := detectImmutableSystem()
|
||||||
|
immutablePolicy = immutableCommandPolicy{
|
||||||
|
ImmutableSystem: detectedImmutable,
|
||||||
|
ImmutableReason: reason,
|
||||||
|
BlockedCommands: []string{"greeter install", "greeter enable", "setup"},
|
||||||
|
Message: "This command is disabled on immutable/image-based systems. Use your distro-native workflow for system-level changes.",
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultPolicy cliPolicyFile
|
||||||
|
if err := json.Unmarshal(defaultCLIPolicyJSON, &defaultPolicy); err != nil {
|
||||||
|
immutablePolicyErr = fmt.Errorf("failed to parse embedded default CLI policy: %w", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if defaultPolicy.BlockedCommands != nil {
|
||||||
|
immutablePolicy.BlockedCommands = normalizeBlockedCommands(*defaultPolicy.BlockedCommands)
|
||||||
|
}
|
||||||
|
if defaultPolicy.Message != nil {
|
||||||
|
msg := strings.TrimSpace(*defaultPolicy.Message)
|
||||||
|
if msg != "" {
|
||||||
|
immutablePolicy.Message = msg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := mergePolicyFile(&immutablePolicy, cliPolicyPackagedPath); err != nil {
|
||||||
|
immutablePolicyErr = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := mergePolicyFile(&immutablePolicy, cliPolicyAdminPath); err != nil {
|
||||||
|
immutablePolicyErr = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if immutablePolicyErr != nil {
|
||||||
|
return nil, immutablePolicyErr
|
||||||
|
}
|
||||||
|
return &immutablePolicy, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func requireMutableSystemCommand(cmd *cobra.Command, _ []string) error {
|
||||||
|
policy, err := getImmutablePolicy()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !policy.ImmutableSystem {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
commandPath := normalizeCommandSpec(cmd.CommandPath())
|
||||||
|
if !commandBlockedByPolicy(commandPath, policy.BlockedCommands) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
reason := ""
|
||||||
|
if policy.ImmutableReason != "" {
|
||||||
|
reason = "Detected immutable system: " + policy.ImmutableReason + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("%s%s\nCommand: dms %s\nPolicy files:\n %s\n %s", reason, policy.Message, commandPath, cliPolicyPackagedPath, cliPolicyAdminPath)
|
||||||
|
}
|
||||||
@@ -16,21 +16,14 @@ func init() {
|
|||||||
runCmd.Flags().Bool("session", false, "Session managed (like as a systemd unit)")
|
runCmd.Flags().Bool("session", false, "Session managed (like as a systemd unit)")
|
||||||
runCmd.Flags().MarkHidden("daemon-child")
|
runCmd.Flags().MarkHidden("daemon-child")
|
||||||
|
|
||||||
// Add subcommands to greeter
|
greeterCmd.AddCommand(greeterInstallCmd, greeterSyncCmd, greeterEnableCmd, greeterStatusCmd, greeterUninstallCmd)
|
||||||
greeterCmd.AddCommand(greeterInstallCmd, greeterSyncCmd, greeterEnableCmd, greeterStatusCmd)
|
authCmd.AddCommand(authSyncCmd)
|
||||||
|
|
||||||
// Add subcommands to setup
|
|
||||||
setupCmd.AddCommand(setupBindsCmd, setupLayoutCmd, setupColorsCmd, setupAlttabCmd, setupOutputsCmd, setupCursorCmd, setupWindowrulesCmd)
|
setupCmd.AddCommand(setupBindsCmd, setupLayoutCmd, setupColorsCmd, setupAlttabCmd, setupOutputsCmd, setupCursorCmd, setupWindowrulesCmd)
|
||||||
|
|
||||||
// Add subcommands to update
|
|
||||||
updateCmd.AddCommand(updateCheckCmd)
|
updateCmd.AddCommand(updateCheckCmd)
|
||||||
|
|
||||||
// Add subcommands to plugins
|
|
||||||
pluginsCmd.AddCommand(pluginsBrowseCmd, pluginsListCmd, pluginsInstallCmd, pluginsUninstallCmd, pluginsUpdateCmd)
|
pluginsCmd.AddCommand(pluginsBrowseCmd, pluginsListCmd, pluginsInstallCmd, pluginsUninstallCmd, pluginsUpdateCmd)
|
||||||
|
|
||||||
// Add common commands to root
|
|
||||||
rootCmd.AddCommand(getCommonCommands()...)
|
rootCmd.AddCommand(getCommonCommands()...)
|
||||||
|
|
||||||
|
rootCmd.AddCommand(authCmd)
|
||||||
rootCmd.AddCommand(updateCmd)
|
rootCmd.AddCommand(updateCmd)
|
||||||
|
|
||||||
rootCmd.SetHelpTemplate(getHelpTemplate())
|
rootCmd.SetHelpTemplate(getHelpTemplate())
|
||||||
|
|||||||
@@ -11,29 +11,22 @@ import (
|
|||||||
var Version = "dev"
|
var Version = "dev"
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
// Add flags
|
|
||||||
runCmd.Flags().BoolP("daemon", "d", false, "Run in daemon mode")
|
runCmd.Flags().BoolP("daemon", "d", false, "Run in daemon mode")
|
||||||
runCmd.Flags().Bool("daemon-child", false, "Internal flag for daemon child process")
|
runCmd.Flags().Bool("daemon-child", false, "Internal flag for daemon child process")
|
||||||
runCmd.Flags().Bool("session", false, "Session managed (like as a systemd unit)")
|
runCmd.Flags().Bool("session", false, "Session managed (like as a systemd unit)")
|
||||||
runCmd.Flags().MarkHidden("daemon-child")
|
runCmd.Flags().MarkHidden("daemon-child")
|
||||||
|
|
||||||
// Add subcommands to greeter
|
greeterCmd.AddCommand(greeterInstallCmd, greeterSyncCmd, greeterEnableCmd, greeterStatusCmd, greeterUninstallCmd)
|
||||||
greeterCmd.AddCommand(greeterSyncCmd, greeterEnableCmd, greeterStatusCmd)
|
authCmd.AddCommand(authSyncCmd)
|
||||||
|
|
||||||
// Add subcommands to setup
|
|
||||||
setupCmd.AddCommand(setupBindsCmd, setupLayoutCmd, setupColorsCmd, setupAlttabCmd, setupOutputsCmd, setupCursorCmd, setupWindowrulesCmd)
|
setupCmd.AddCommand(setupBindsCmd, setupLayoutCmd, setupColorsCmd, setupAlttabCmd, setupOutputsCmd, setupCursorCmd, setupWindowrulesCmd)
|
||||||
|
|
||||||
// Add subcommands to plugins
|
|
||||||
pluginsCmd.AddCommand(pluginsBrowseCmd, pluginsListCmd, pluginsInstallCmd, pluginsUninstallCmd, pluginsUpdateCmd)
|
pluginsCmd.AddCommand(pluginsBrowseCmd, pluginsListCmd, pluginsInstallCmd, pluginsUninstallCmd, pluginsUpdateCmd)
|
||||||
|
|
||||||
// Add common commands to root
|
|
||||||
rootCmd.AddCommand(getCommonCommands()...)
|
rootCmd.AddCommand(getCommonCommands()...)
|
||||||
|
rootCmd.AddCommand(authCmd)
|
||||||
|
|
||||||
rootCmd.SetHelpTemplate(getHelpTemplate())
|
rootCmd.SetHelpTemplate(getHelpTemplate())
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
// Block root
|
|
||||||
if os.Geteuid() == 0 {
|
if os.Geteuid() == 0 {
|
||||||
log.Fatal("This program should not be run as root. Exiting.")
|
log.Fatal("This program should not be run as root. Exiting.")
|
||||||
}
|
}
|
||||||
|
|||||||
172
core/cmd/dms/randr_client.go
Normal file
172
core/cmd/dms/randr_client.go
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/wlr_output_management"
|
||||||
|
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
|
||||||
|
)
|
||||||
|
|
||||||
|
type randrOutput struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Scale float64 `json:"scale"`
|
||||||
|
Width int32 `json:"width"`
|
||||||
|
Height int32 `json:"height"`
|
||||||
|
Refresh int32 `json:"refresh"`
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type randrHead struct {
|
||||||
|
name string
|
||||||
|
enabled bool
|
||||||
|
scale float64
|
||||||
|
currentModeID uint32
|
||||||
|
modeIDs []uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
type randrMode struct {
|
||||||
|
width int32
|
||||||
|
height int32
|
||||||
|
refresh int32
|
||||||
|
}
|
||||||
|
|
||||||
|
type randrClient struct {
|
||||||
|
display *wlclient.Display
|
||||||
|
ctx *wlclient.Context
|
||||||
|
manager *wlr_output_management.ZwlrOutputManagerV1
|
||||||
|
heads map[uint32]*randrHead
|
||||||
|
modes map[uint32]*randrMode
|
||||||
|
done bool
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func queryRandr() ([]randrOutput, error) {
|
||||||
|
display, err := wlclient.Connect("")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to connect to Wayland: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &randrClient{
|
||||||
|
display: display,
|
||||||
|
ctx: display.Context(),
|
||||||
|
heads: make(map[uint32]*randrHead),
|
||||||
|
modes: make(map[uint32]*randrMode),
|
||||||
|
}
|
||||||
|
defer c.ctx.Close()
|
||||||
|
|
||||||
|
registry, err := display.GetRegistry()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get registry: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
|
||||||
|
if e.Interface == wlr_output_management.ZwlrOutputManagerV1InterfaceName {
|
||||||
|
mgr := wlr_output_management.NewZwlrOutputManagerV1(c.ctx)
|
||||||
|
version := min(e.Version, 4)
|
||||||
|
|
||||||
|
mgr.SetHeadHandler(func(e wlr_output_management.ZwlrOutputManagerV1HeadEvent) {
|
||||||
|
c.handleHead(e)
|
||||||
|
})
|
||||||
|
|
||||||
|
mgr.SetDoneHandler(func(e wlr_output_management.ZwlrOutputManagerV1DoneEvent) {
|
||||||
|
c.done = true
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := registry.Bind(e.Name, e.Interface, version, mgr); err == nil {
|
||||||
|
c.manager = mgr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// First roundtrip: discover globals and bind manager
|
||||||
|
syncCallback, err := display.Sync()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to sync display: %w", err)
|
||||||
|
}
|
||||||
|
syncCallback.SetDoneHandler(func(e wlclient.CallbackDoneEvent) {
|
||||||
|
if c.manager == nil {
|
||||||
|
c.err = fmt.Errorf("zwlr_output_manager_v1 protocol not supported by compositor")
|
||||||
|
c.done = true
|
||||||
|
}
|
||||||
|
// Otherwise wait for manager's DoneHandler
|
||||||
|
})
|
||||||
|
|
||||||
|
for !c.done {
|
||||||
|
if err := c.ctx.Dispatch(); err != nil {
|
||||||
|
return nil, fmt.Errorf("dispatch error: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.err != nil {
|
||||||
|
return nil, c.err
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.buildOutputs(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *randrClient) handleHead(e wlr_output_management.ZwlrOutputManagerV1HeadEvent) {
|
||||||
|
handle := e.Head
|
||||||
|
headID := handle.ID()
|
||||||
|
|
||||||
|
head := &randrHead{
|
||||||
|
modeIDs: make([]uint32, 0),
|
||||||
|
}
|
||||||
|
c.heads[headID] = head
|
||||||
|
|
||||||
|
handle.SetNameHandler(func(e wlr_output_management.ZwlrOutputHeadV1NameEvent) {
|
||||||
|
head.name = e.Name
|
||||||
|
})
|
||||||
|
|
||||||
|
handle.SetEnabledHandler(func(e wlr_output_management.ZwlrOutputHeadV1EnabledEvent) {
|
||||||
|
head.enabled = e.Enabled != 0
|
||||||
|
})
|
||||||
|
|
||||||
|
handle.SetScaleHandler(func(e wlr_output_management.ZwlrOutputHeadV1ScaleEvent) {
|
||||||
|
head.scale = e.Scale
|
||||||
|
})
|
||||||
|
|
||||||
|
handle.SetCurrentModeHandler(func(e wlr_output_management.ZwlrOutputHeadV1CurrentModeEvent) {
|
||||||
|
head.currentModeID = e.Mode.ID()
|
||||||
|
})
|
||||||
|
|
||||||
|
handle.SetModeHandler(func(e wlr_output_management.ZwlrOutputHeadV1ModeEvent) {
|
||||||
|
modeHandle := e.Mode
|
||||||
|
modeID := modeHandle.ID()
|
||||||
|
|
||||||
|
head.modeIDs = append(head.modeIDs, modeID)
|
||||||
|
|
||||||
|
mode := &randrMode{}
|
||||||
|
c.modes[modeID] = mode
|
||||||
|
|
||||||
|
modeHandle.SetSizeHandler(func(e wlr_output_management.ZwlrOutputModeV1SizeEvent) {
|
||||||
|
mode.width = e.Width
|
||||||
|
mode.height = e.Height
|
||||||
|
})
|
||||||
|
|
||||||
|
modeHandle.SetRefreshHandler(func(e wlr_output_management.ZwlrOutputModeV1RefreshEvent) {
|
||||||
|
mode.refresh = e.Refresh
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *randrClient) buildOutputs() []randrOutput {
|
||||||
|
outputs := make([]randrOutput, 0, len(c.heads))
|
||||||
|
|
||||||
|
for _, head := range c.heads {
|
||||||
|
out := randrOutput{
|
||||||
|
Name: head.name,
|
||||||
|
Scale: head.scale,
|
||||||
|
Enabled: head.enabled,
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode, ok := c.modes[head.currentModeID]; ok {
|
||||||
|
out.Width = mode.width
|
||||||
|
out.Height = mode.height
|
||||||
|
out.Refresh = mode.refresh
|
||||||
|
}
|
||||||
|
|
||||||
|
outputs = append(outputs, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
return outputs
|
||||||
|
}
|
||||||
@@ -192,6 +192,9 @@ func runShellInteractive(session bool) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ! TODO - remove when QS 0.3 is up and we can use the pragma
|
||||||
|
cmd.Env = append(cmd.Env, "QS_APP_ID=com.danklinux.dms")
|
||||||
|
|
||||||
if isSessionManaged && hasSystemdRun() {
|
if isSessionManaged && hasSystemdRun() {
|
||||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||||
}
|
}
|
||||||
@@ -432,6 +435,9 @@ func runShellDaemon(session bool) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ! TODO - remove when QS 0.3 is up and we can use the pragma
|
||||||
|
cmd.Env = append(cmd.Env, "QS_APP_ID=com.danklinux.dms")
|
||||||
|
|
||||||
if isSessionManaged && hasSystemdRun() {
|
if isSessionManaged && hasSystemdRun() {
|
||||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||||
}
|
}
|
||||||
@@ -616,6 +622,43 @@ func getShellIPCCompletions(args []string, _ string) []string {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getFirstDMSPID() (int, bool) {
|
||||||
|
dir := getRuntimeDir()
|
||||||
|
entries, err := os.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if !strings.HasPrefix(entry.Name(), "danklinux-") || !strings.HasSuffix(entry.Name(), ".pid") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := os.ReadFile(filepath.Join(dir, entry.Name()))
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
pid, err := strconv.Atoi(strings.TrimSpace(string(data)))
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
proc, err := os.FindProcess(pid)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if proc.Signal(syscall.Signal(0)) != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return pid, true
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
func runShellIPCCommand(args []string) {
|
func runShellIPCCommand(args []string) {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
printIPCHelp()
|
printIPCHelp()
|
||||||
@@ -627,10 +670,21 @@ func runShellIPCCommand(args []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cmdArgs := []string{"ipc"}
|
cmdArgs := []string{"ipc"}
|
||||||
if qsHasAnyDisplay() {
|
|
||||||
cmdArgs = append(cmdArgs, "--any-display")
|
switch pid, ok := getFirstDMSPID(); {
|
||||||
|
case ok:
|
||||||
|
cmdArgs = append(cmdArgs, "--pid", strconv.Itoa(pid))
|
||||||
|
default:
|
||||||
|
if err := findConfig(nil, nil); err != nil {
|
||||||
|
log.Fatalf("Error finding config: %v", err)
|
||||||
|
}
|
||||||
|
// ! TODO - remove check when QS 0.3 is released
|
||||||
|
if qsHasAnyDisplay() {
|
||||||
|
cmdArgs = append(cmdArgs, "--any-display")
|
||||||
|
}
|
||||||
|
cmdArgs = append(cmdArgs, "-p", configPath)
|
||||||
}
|
}
|
||||||
cmdArgs = append(cmdArgs, "-p", configPath)
|
|
||||||
cmdArgs = append(cmdArgs, args...)
|
cmdArgs = append(cmdArgs, args...)
|
||||||
cmd := exec.Command("qs", cmdArgs...)
|
cmd := exec.Command("qs", cmdArgs...)
|
||||||
cmd.Stdin = os.Stdin
|
cmd.Stdin = os.Stdin
|
||||||
|
|||||||
@@ -7,14 +7,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func findCommandPath(cmd string) (string, error) {
|
|
||||||
path, err := exec.LookPath(cmd)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("command '%s' not found in PATH", cmd)
|
|
||||||
}
|
|
||||||
return path, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func isArchPackageInstalled(packageName string) bool {
|
func isArchPackageInstalled(packageName string) bool {
|
||||||
cmd := exec.Command("pacman", "-Q", packageName)
|
cmd := exec.Command("pacman", "-Q", packageName)
|
||||||
err := cmd.Run()
|
err := cmd.Run()
|
||||||
|
|||||||
10
core/go.mod
10
core/go.mod
@@ -1,6 +1,8 @@
|
|||||||
module github.com/AvengeMedia/DankMaterialShell/core
|
module github.com/AvengeMedia/DankMaterialShell/core
|
||||||
|
|
||||||
go 1.25.0
|
go 1.26.0
|
||||||
|
|
||||||
|
toolchain go1.26.1
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Wifx/gonetworkmanager/v2 v2.2.0
|
github.com/Wifx/gonetworkmanager/v2 v2.2.0
|
||||||
@@ -16,6 +18,8 @@ require (
|
|||||||
github.com/sblinch/kdl-go v0.0.0-20260121213736-8b7053306ca6
|
github.com/sblinch/kdl-go v0.0.0-20260121213736-8b7053306ca6
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stretchr/testify v1.11.1
|
github.com/stretchr/testify v1.11.1
|
||||||
|
github.com/yeqown/go-qrcode/v2 v2.2.5
|
||||||
|
github.com/yeqown/go-qrcode/writer/standard v1.3.0
|
||||||
github.com/yuin/goldmark v1.7.16
|
github.com/yuin/goldmark v1.7.16
|
||||||
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
|
||||||
go.etcd.io/bbolt v1.4.3
|
go.etcd.io/bbolt v1.4.3
|
||||||
@@ -32,15 +36,19 @@ require (
|
|||||||
github.com/cyphar/filepath-securejoin v0.6.1 // indirect
|
github.com/cyphar/filepath-securejoin v0.6.1 // indirect
|
||||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||||
github.com/emirpasic/gods v1.18.1 // indirect
|
github.com/emirpasic/gods v1.18.1 // indirect
|
||||||
|
github.com/fogleman/gg v1.3.0 // indirect
|
||||||
github.com/go-git/gcfg/v2 v2.0.2 // indirect
|
github.com/go-git/gcfg/v2 v2.0.2 // indirect
|
||||||
github.com/go-git/go-billy/v6 v6.0.0-20260209124918-37866f83c2d3 // indirect
|
github.com/go-git/go-billy/v6 v6.0.0-20260209124918-37866f83c2d3 // indirect
|
||||||
github.com/go-logfmt/logfmt v0.6.1 // indirect
|
github.com/go-logfmt/logfmt v0.6.1 // indirect
|
||||||
|
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||||
github.com/kevinburke/ssh_config v1.6.0 // indirect
|
github.com/kevinburke/ssh_config v1.6.0 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||||
github.com/pjbgf/sha1cd v0.5.0 // indirect
|
github.com/pjbgf/sha1cd v0.5.0 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/sergi/go-diff v1.4.0 // indirect
|
github.com/sergi/go-diff v1.4.0 // indirect
|
||||||
github.com/stretchr/objx v0.5.3 // indirect
|
github.com/stretchr/objx v0.5.3 // indirect
|
||||||
|
github.com/yeqown/reedsolomon v1.0.0 // indirect
|
||||||
golang.org/x/crypto v0.48.0 // indirect
|
golang.org/x/crypto v0.48.0 // indirect
|
||||||
golang.org/x/net v0.50.0 // indirect
|
golang.org/x/net v0.50.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
12
core/go.sum
12
core/go.sum
@@ -58,6 +58,8 @@ github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc
|
|||||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||||
|
github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
|
||||||
|
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||||
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
|
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
|
||||||
@@ -75,6 +77,8 @@ github.com/go-logfmt/logfmt v0.6.1/go.mod h1:EV2pOAQoZaT1ZXZbqDl5hrymndi4SY9ED9/
|
|||||||
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||||
github.com/godbus/dbus/v5 v5.2.2 h1:TUR3TgtSVDmjiXOgAAyaZbYmIeP3DPkld3jgKGV8mXQ=
|
github.com/godbus/dbus/v5 v5.2.2 h1:TUR3TgtSVDmjiXOgAAyaZbYmIeP3DPkld3jgKGV8mXQ=
|
||||||
github.com/godbus/dbus/v5 v5.2.2/go.mod h1:3AAv2+hPq5rdnr5txxxRwiGjPXamgoIHgz9FPBfOp3c=
|
github.com/godbus/dbus/v5 v5.2.2/go.mod h1:3AAv2+hPq5rdnr5txxxRwiGjPXamgoIHgz9FPBfOp3c=
|
||||||
|
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
|
||||||
|
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
|
||||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
|
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
|
||||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
@@ -115,6 +119,8 @@ github.com/pilebones/go-udev v0.9.1 h1:uN72M1C1fgzhsVmBGEM8w9RD1JY4iVsPZpr+Z6rb3
|
|||||||
github.com/pilebones/go-udev v0.9.1/go.mod h1:Bgcl07crebF3JSeS4+nuaRvhWFdCeFoBhXXeAp93XNo=
|
github.com/pilebones/go-udev v0.9.1/go.mod h1:Bgcl07crebF3JSeS4+nuaRvhWFdCeFoBhXXeAp93XNo=
|
||||||
github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0=
|
github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0=
|
||||||
github.com/pjbgf/sha1cd v0.5.0/go.mod h1:lhpGlyHLpQZoxMv8HcgXvZEhcGs0PG/vsZnEJ7H0iCM=
|
github.com/pjbgf/sha1cd v0.5.0/go.mod h1:lhpGlyHLpQZoxMv8HcgXvZEhcGs0PG/vsZnEJ7H0iCM=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
@@ -142,6 +148,12 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
|
|||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||||
|
github.com/yeqown/go-qrcode/v2 v2.2.5 h1:HCOe2bSjkhZyYoyyNaXNzh4DJZll6inVJQQw+8228Zk=
|
||||||
|
github.com/yeqown/go-qrcode/v2 v2.2.5/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw=
|
||||||
|
github.com/yeqown/go-qrcode/writer/standard v1.3.0 h1:chdyhEfRtUPgQtuPeaWVGQ/TQx4rE1PqeoW3U+53t34=
|
||||||
|
github.com/yeqown/go-qrcode/writer/standard v1.3.0/go.mod h1:O4MbzsotGCvy8upYPCR91j81dr5XLT7heuljcNXW+oQ=
|
||||||
|
github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0=
|
||||||
|
github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM=
|
||||||
github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE=
|
github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE=
|
||||||
github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||||
|
|||||||
35
core/internal/blur/probe.go
Normal file
35
core/internal/blur/probe.go
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
package blur
|
||||||
|
|
||||||
|
import (
|
||||||
|
wlhelpers "github.com/AvengeMedia/DankMaterialShell/core/internal/wayland/client"
|
||||||
|
client "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
|
||||||
|
)
|
||||||
|
|
||||||
|
const extBackgroundEffectInterface = "ext_background_effect_manager_v1"
|
||||||
|
|
||||||
|
func ProbeSupport() (bool, error) {
|
||||||
|
display, err := client.Connect("")
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
defer display.Context().Close()
|
||||||
|
|
||||||
|
registry, err := display.GetRegistry()
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
registry.SetGlobalHandler(func(e client.RegistryGlobalEvent) {
|
||||||
|
switch e.Interface {
|
||||||
|
case extBackgroundEffectInterface:
|
||||||
|
found = true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := wlhelpers.Roundtrip(display, display.Context()); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return found, nil
|
||||||
|
}
|
||||||
@@ -1,10 +1,12 @@
|
|||||||
package clipboard
|
package clipboard
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
|
||||||
@@ -12,17 +14,37 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Copy(data []byte, mimeType string) error {
|
func Copy(data []byte, mimeType string) error {
|
||||||
return CopyOpts(data, mimeType, false, false)
|
return CopyReader(bytes.NewReader(data), mimeType, false, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func CopyOpts(data []byte, mimeType string, foreground, pasteOnce bool) error {
|
func CopyOpts(data []byte, mimeType string, foreground, pasteOnce bool) error {
|
||||||
|
if foreground {
|
||||||
|
return copyServeWithWriter(func(writer io.Writer) error {
|
||||||
|
total := 0
|
||||||
|
for total < len(data) {
|
||||||
|
n, err := writer.Write(data[total:])
|
||||||
|
total += n
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if total != len(data) {
|
||||||
|
return io.ErrShortWrite
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, mimeType, pasteOnce)
|
||||||
|
}
|
||||||
|
return CopyReader(bytes.NewReader(data), mimeType, foreground, pasteOnce)
|
||||||
|
}
|
||||||
|
|
||||||
|
func CopyReader(data io.Reader, mimeType string, foreground, pasteOnce bool) error {
|
||||||
if !foreground {
|
if !foreground {
|
||||||
return copyFork(data, mimeType, pasteOnce)
|
return copyFork(data, mimeType, pasteOnce)
|
||||||
}
|
}
|
||||||
return copyServe(data, mimeType, pasteOnce)
|
return copyServeReader(data, mimeType, pasteOnce)
|
||||||
}
|
}
|
||||||
|
|
||||||
func copyFork(data []byte, mimeType string, pasteOnce bool) error {
|
func copyFork(data io.Reader, mimeType string, pasteOnce bool) error {
|
||||||
args := []string{os.Args[0], "cl", "copy", "--foreground"}
|
args := []string{os.Args[0], "cl", "copy", "--foreground"}
|
||||||
if pasteOnce {
|
if pasteOnce {
|
||||||
args = append(args, "--paste-once")
|
args = append(args, "--paste-once")
|
||||||
@@ -30,30 +52,102 @@ func copyFork(data []byte, mimeType string, pasteOnce bool) error {
|
|||||||
args = append(args, "--type", mimeType)
|
args = append(args, "--type", mimeType)
|
||||||
|
|
||||||
cmd := exec.Command(args[0], args[1:]...)
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
cmd.Stdin = nil
|
|
||||||
cmd.Stdout = nil
|
|
||||||
cmd.Stderr = nil
|
cmd.Stderr = nil
|
||||||
cmd.SysProcAttr = &syscall.SysProcAttr{Setsid: true}
|
cmd.SysProcAttr = &syscall.SysProcAttr{Setsid: true}
|
||||||
|
cmd.Env = append(os.Environ(), "DMS_CLIP_FORKED=1")
|
||||||
|
|
||||||
stdin, err := cmd.StdinPipe()
|
stdout, err := cmd.StdoutPipe()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("stdin pipe: %w", err)
|
return fmt.Errorf("stdout pipe: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := cmd.Start(); err != nil {
|
switch src := data.(type) {
|
||||||
return fmt.Errorf("start: %w", err)
|
case *os.File:
|
||||||
|
cmd.Stdin = src
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return fmt.Errorf("start: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
stdin, err := cmd.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("stdin pipe: %w", err)
|
||||||
|
}
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return fmt.Errorf("start: %w", err)
|
||||||
|
}
|
||||||
|
if _, err := io.Copy(stdin, data); err != nil {
|
||||||
|
stdin.Close()
|
||||||
|
return fmt.Errorf("write stdin: %w", err)
|
||||||
|
}
|
||||||
|
if err := stdin.Close(); err != nil {
|
||||||
|
return fmt.Errorf("close stdin: %w", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := stdin.Write(data); err != nil {
|
var buf [1]byte
|
||||||
stdin.Close()
|
if _, err := stdout.Read(buf[:]); err != nil {
|
||||||
return fmt.Errorf("write stdin: %w", err)
|
return fmt.Errorf("waiting for clipboard ready: %w", err)
|
||||||
}
|
}
|
||||||
stdin.Close()
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func copyServe(data []byte, mimeType string, pasteOnce bool) error {
|
func signalReady() {
|
||||||
|
if os.Getenv("DMS_CLIP_FORKED") == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
os.Stdout.Write([]byte{1})
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyServeReader(data io.Reader, mimeType string, pasteOnce bool) error {
|
||||||
|
cachedData, err := createClipboardCacheFile()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("create clipboard cache file: %w", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(cachedData.Name())
|
||||||
|
|
||||||
|
if _, err := io.Copy(cachedData, data); err != nil {
|
||||||
|
return fmt.Errorf("cache clipboard data: %w", err)
|
||||||
|
}
|
||||||
|
if err := cachedData.Close(); err != nil {
|
||||||
|
return fmt.Errorf("close temp cache file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return copyServeWithWriter(func(writer io.Writer) error {
|
||||||
|
cachedFile, err := os.Open(cachedData.Name())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("open temp cache file: %w", err)
|
||||||
|
}
|
||||||
|
defer cachedFile.Close()
|
||||||
|
|
||||||
|
if _, err := io.Copy(writer, cachedFile); err != nil {
|
||||||
|
return fmt.Errorf("write clipboard data: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}, mimeType, pasteOnce)
|
||||||
|
}
|
||||||
|
|
||||||
|
func createClipboardCacheFile() (*os.File, error) {
|
||||||
|
preferredDirs := []string{}
|
||||||
|
|
||||||
|
if cacheDir, err := os.UserCacheDir(); err == nil {
|
||||||
|
preferredDirs = append(preferredDirs, filepath.Join(cacheDir, "dms", "clipboard"))
|
||||||
|
}
|
||||||
|
preferredDirs = append(preferredDirs, "/var/tmp/dms/clipboard")
|
||||||
|
|
||||||
|
for _, dir := range preferredDirs {
|
||||||
|
if err := os.MkdirAll(dir, 0o700); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cachedData, err := os.CreateTemp(dir, "dms-clipboard-*")
|
||||||
|
if err == nil {
|
||||||
|
return cachedData, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return os.CreateTemp("", "dms-clipboard-*")
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyServeWithWriter(writeTo func(io.Writer) error, mimeType string, pasteOnce bool) error {
|
||||||
display, err := wlclient.Connect("")
|
display, err := wlclient.Connect("")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("wayland connect: %w", err)
|
return fmt.Errorf("wayland connect: %w", err)
|
||||||
@@ -139,12 +233,18 @@ func copyServe(data []byte, mimeType string, pasteOnce bool) error {
|
|||||||
|
|
||||||
cancelled := make(chan struct{})
|
cancelled := make(chan struct{})
|
||||||
pasted := make(chan struct{}, 1)
|
pasted := make(chan struct{}, 1)
|
||||||
|
sendErr := make(chan error, 1)
|
||||||
|
|
||||||
source.SetSendHandler(func(e ext_data_control.ExtDataControlSourceV1SendEvent) {
|
source.SetSendHandler(func(e ext_data_control.ExtDataControlSourceV1SendEvent) {
|
||||||
defer syscall.Close(e.Fd)
|
defer syscall.Close(e.Fd)
|
||||||
file := os.NewFile(uintptr(e.Fd), "pipe")
|
file := os.NewFile(uintptr(e.Fd), "pipe")
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
file.Write(data)
|
if err := writeTo(file); err != nil {
|
||||||
|
select {
|
||||||
|
case sendErr <- err:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
select {
|
select {
|
||||||
case pasted <- struct{}{}:
|
case pasted <- struct{}{}:
|
||||||
default:
|
default:
|
||||||
@@ -160,11 +260,14 @@ func copyServe(data []byte, mimeType string, pasteOnce bool) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
display.Roundtrip()
|
display.Roundtrip()
|
||||||
|
signalReady()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-cancelled:
|
case <-cancelled:
|
||||||
return nil
|
return nil
|
||||||
|
case err := <-sendErr:
|
||||||
|
return err
|
||||||
case <-pasted:
|
case <-pasted:
|
||||||
if pasteOnce {
|
if pasteOnce {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ windowrule = float on, match:class ^(blueman-manager)$
|
|||||||
windowrule = float on, match:class ^(org\.gnome\.Nautilus)$
|
windowrule = float on, match:class ^(org\.gnome\.Nautilus)$
|
||||||
windowrule = float on, match:class ^(xdg-desktop-portal)$
|
windowrule = float on, match:class ^(xdg-desktop-portal)$
|
||||||
|
|
||||||
windowrule = noinitialfocus on, match:class ^(steam)$, match:title ^(notificationtoasts)
|
windowrule = no_initial_focus on, match:class ^(steam)$, match:title ^(notificationtoasts)
|
||||||
windowrule = pin on, match:class ^(steam)$, match:title ^(notificationtoasts)
|
windowrule = pin on, match:class ^(steam)$, match:title ^(notificationtoasts)
|
||||||
|
|
||||||
windowrule = float on, match:class ^(firefox)$, match:title ^(Picture-in-Picture)$
|
windowrule = float on, match:class ^(firefox)$, match:title ^(Picture-in-Picture)$
|
||||||
@@ -111,6 +111,7 @@ windowrule = float on, match:class ^(zoom)$
|
|||||||
# windowrule = float on, match:class ^(org.quickshell)$
|
# windowrule = float on, match:class ^(org.quickshell)$
|
||||||
|
|
||||||
layerrule = no_anim on, match:namespace ^(quickshell)$
|
layerrule = no_anim on, match:namespace ^(quickshell)$
|
||||||
|
layerrule = no_anim on, match:namespace ^dms:.*
|
||||||
|
|
||||||
source = ./dms/colors.conf
|
source = ./dms/colors.conf
|
||||||
source = ./dms/outputs.conf
|
source = ./dms/outputs.conf
|
||||||
|
|||||||
@@ -252,6 +252,7 @@ window-rule {
|
|||||||
// Open dms windows as floating by default
|
// Open dms windows as floating by default
|
||||||
window-rule {
|
window-rule {
|
||||||
match app-id=r#"org.quickshell$"#
|
match app-id=r#"org.quickshell$"#
|
||||||
|
match app-id=r#"com.danklinux.dms$"#
|
||||||
open-floating true
|
open-floating true
|
||||||
}
|
}
|
||||||
debug {
|
debug {
|
||||||
|
|||||||
@@ -26,6 +26,9 @@ func init() {
|
|||||||
Register("cachyos", "#08A283", FamilyArch, func(config DistroConfig, logChan chan<- string) Distribution {
|
Register("cachyos", "#08A283", FamilyArch, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
return NewArchDistribution(config, logChan)
|
return NewArchDistribution(config, logChan)
|
||||||
})
|
})
|
||||||
|
Register("catos", "#1793D1", FamilyArch, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
|
return NewArchDistribution(config, logChan)
|
||||||
|
})
|
||||||
Register("endeavouros", "#7F3FBF", FamilyArch, func(config DistroConfig, logChan chan<- string) Distribution {
|
Register("endeavouros", "#7F3FBF", FamilyArch, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
return NewArchDistribution(config, logChan)
|
return NewArchDistribution(config, logChan)
|
||||||
})
|
})
|
||||||
@@ -94,6 +97,7 @@ func (a *ArchDistribution) DetectDependenciesWithTerminal(ctx context.Context, w
|
|||||||
dependencies = append(dependencies, a.detectGit())
|
dependencies = append(dependencies, a.detectGit())
|
||||||
dependencies = append(dependencies, a.detectWindowManager(wm))
|
dependencies = append(dependencies, a.detectWindowManager(wm))
|
||||||
dependencies = append(dependencies, a.detectQuickshell())
|
dependencies = append(dependencies, a.detectQuickshell())
|
||||||
|
dependencies = append(dependencies, a.detectDMSGreeter())
|
||||||
dependencies = append(dependencies, a.detectXDGPortal())
|
dependencies = append(dependencies, a.detectXDGPortal())
|
||||||
dependencies = append(dependencies, a.detectAccountsService())
|
dependencies = append(dependencies, a.detectAccountsService())
|
||||||
|
|
||||||
@@ -121,12 +125,52 @@ func (a *ArchDistribution) detectAccountsService() deps.Dependency {
|
|||||||
return a.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", a.packageInstalled("accountsservice"))
|
return a.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", a.packageInstalled("accountsservice"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *ArchDistribution) detectDMSGreeter() deps.Dependency {
|
||||||
|
return a.detectOptionalPackage("dms-greeter", "DankMaterialShell greetd greeter", a.packageInstalled("greetd-dms-greeter-git"))
|
||||||
|
}
|
||||||
|
|
||||||
func (a *ArchDistribution) packageInstalled(pkg string) bool {
|
func (a *ArchDistribution) packageInstalled(pkg string) bool {
|
||||||
cmd := exec.Command("pacman", "-Q", pkg)
|
cmd := exec.Command("pacman", "-Q", pkg)
|
||||||
err := cmd.Run()
|
err := cmd.Run()
|
||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parseSRCINFODeps reads a .SRCINFO file and returns runtime dep and makedep package
|
||||||
|
func parseSRCINFODeps(srcinfoPath string) (deps []string, makedeps []string, err error) {
|
||||||
|
data, err := os.ReadFile(srcinfoPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
for _, line := range strings.Split(string(data), "\n") {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
var pkg string
|
||||||
|
var target *[]string
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(line, "makedepends = "):
|
||||||
|
pkg = strings.TrimPrefix(line, "makedepends = ")
|
||||||
|
target = &makedeps
|
||||||
|
case strings.HasPrefix(line, "depends = "):
|
||||||
|
pkg = strings.TrimPrefix(line, "depends = ")
|
||||||
|
target = &deps
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Strip version constraint (>=, <=, >, <, =) and colon-descriptions
|
||||||
|
if idx := strings.IndexAny(pkg, "><:="); idx >= 0 {
|
||||||
|
pkg = pkg[:idx]
|
||||||
|
}
|
||||||
|
pkg = strings.TrimSpace(pkg)
|
||||||
|
if pkg != "" {
|
||||||
|
*target = append(*target, pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return deps, makedeps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ArchDistribution) isInSystemRepo(pkg string) bool {
|
||||||
|
return exec.Command("pacman", "-Si", pkg).Run() == nil
|
||||||
|
}
|
||||||
|
|
||||||
func (a *ArchDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
func (a *ArchDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
||||||
return a.GetPackageMappingWithVariants(wm, make(map[string]deps.PackageVariant))
|
return a.GetPackageMappingWithVariants(wm, make(map[string]deps.PackageVariant))
|
||||||
}
|
}
|
||||||
@@ -136,6 +180,7 @@ func (a *ArchDistribution) GetPackageMappingWithVariants(wm deps.WindowManager,
|
|||||||
"dms (DankMaterialShell)": a.getDMSMapping(variants["dms (DankMaterialShell)"]),
|
"dms (DankMaterialShell)": a.getDMSMapping(variants["dms (DankMaterialShell)"]),
|
||||||
"git": {Name: "git", Repository: RepoTypeSystem},
|
"git": {Name: "git", Repository: RepoTypeSystem},
|
||||||
"quickshell": a.getQuickshellMapping(variants["quickshell"]),
|
"quickshell": a.getQuickshellMapping(variants["quickshell"]),
|
||||||
|
"dms-greeter": {Name: "greetd-dms-greeter-git", Repository: RepoTypeAUR},
|
||||||
"matugen": a.getMatugenMapping(variants["matugen"]),
|
"matugen": a.getMatugenMapping(variants["matugen"]),
|
||||||
"dgop": {Name: "dgop", Repository: RepoTypeSystem},
|
"dgop": {Name: "dgop", Repository: RepoTypeSystem},
|
||||||
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
|
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
|
||||||
@@ -431,29 +476,10 @@ func (a *ArchDistribution) installAURPackages(ctx context.Context, packages []st
|
|||||||
a.log(fmt.Sprintf("Installing AUR packages manually: %s", strings.Join(packages, ", ")))
|
a.log(fmt.Sprintf("Installing AUR packages manually: %s", strings.Join(packages, ", ")))
|
||||||
|
|
||||||
hasNiri := false
|
hasNiri := false
|
||||||
hasQuickshell := false
|
|
||||||
for _, pkg := range packages {
|
for _, pkg := range packages {
|
||||||
if pkg == "niri-git" {
|
if pkg == "niri-git" {
|
||||||
hasNiri = true
|
hasNiri = true
|
||||||
}
|
}
|
||||||
if pkg == "quickshell" || pkg == "quickshell-git" {
|
|
||||||
hasQuickshell = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If quickshell is in the list, always reinstall google-breakpad first
|
|
||||||
if hasQuickshell {
|
|
||||||
progressChan <- InstallProgressMsg{
|
|
||||||
Phase: PhaseAURPackages,
|
|
||||||
Progress: 0.63,
|
|
||||||
Step: "Reinstalling google-breakpad for quickshell...",
|
|
||||||
IsComplete: false,
|
|
||||||
CommandInfo: "Reinstalling prerequisite AUR package for quickshell",
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := a.installSingleAURPackage(ctx, "google-breakpad", sudoPassword, progressChan, 0.63, 0.65); err != nil {
|
|
||||||
return fmt.Errorf("failed to reinstall google-breakpad prerequisite for quickshell: %w", err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If niri is in the list, install makepkg-git-lfs-proto first if not already installed
|
// If niri is in the list, install makepkg-git-lfs-proto first if not already installed
|
||||||
@@ -534,6 +560,16 @@ func (a *ArchDistribution) reorderAURPackages(packages []string) []string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sudoPassword string, progressChan chan<- InstallProgressMsg, startProgress, endProgress float64) error {
|
func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sudoPassword string, progressChan chan<- InstallProgressMsg, startProgress, endProgress float64) error {
|
||||||
|
return a.installSingleAURPackageInternal(ctx, pkg, sudoPassword, progressChan, startProgress, endProgress, make(map[string]bool))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ArchDistribution) installSingleAURPackageInternal(ctx context.Context, pkg, sudoPassword string, progressChan chan<- InstallProgressMsg, startProgress, endProgress float64, visited map[string]bool) error {
|
||||||
|
if visited[pkg] {
|
||||||
|
a.log(fmt.Sprintf("Skipping %s (already being installed, cycle detected)", pkg))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
visited[pkg] = true
|
||||||
|
|
||||||
homeDir, err := os.UserHomeDir()
|
homeDir, err := os.UserHomeDir()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get user home directory: %w", err)
|
return fmt.Errorf("failed to get user home directory: %w", err)
|
||||||
@@ -607,48 +643,8 @@ func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sud
|
|||||||
return fmt.Errorf("failed to remove optdepends from .SRCINFO for %s: %w", pkg, err)
|
return fmt.Errorf("failed to remove optdepends from .SRCINFO for %s: %w", pkg, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip dependency installation for dms-shell-git and dms-shell-bin
|
srcinfoPath = filepath.Join(packageDir, ".SRCINFO")
|
||||||
// since we manually manage those dependencies
|
if pkg == "dms-shell-bin" {
|
||||||
if pkg != "dms-shell-git" && pkg != "dms-shell-bin" {
|
|
||||||
// Pre-install dependencies from .SRCINFO
|
|
||||||
progressChan <- InstallProgressMsg{
|
|
||||||
Phase: PhaseAURPackages,
|
|
||||||
Progress: startProgress + 0.3*(endProgress-startProgress),
|
|
||||||
Step: fmt.Sprintf("Installing dependencies for %s...", pkg),
|
|
||||||
IsComplete: false,
|
|
||||||
CommandInfo: "Installing package dependencies and makedepends",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Install dependencies and makedepends explicitly
|
|
||||||
srcinfoPath = filepath.Join(packageDir, ".SRCINFO")
|
|
||||||
|
|
||||||
depsCmd := exec.CommandContext(ctx, "bash", "-c",
|
|
||||||
fmt.Sprintf(`
|
|
||||||
deps=$(grep "depends = " "%s" | grep -v "makedepends" | sed 's/.*depends = //' | tr '\n' ' ' | sed 's/[[:space:]]*$//')
|
|
||||||
if [[ "%s" == *"quickshell"* ]]; then
|
|
||||||
deps=$(echo "$deps" | sed 's/google-breakpad//g' | sed 's/ / /g' | sed 's/^ *//g' | sed 's/ *$//g')
|
|
||||||
fi
|
|
||||||
if [ ! -z "$deps" ] && [ "$deps" != " " ]; then
|
|
||||||
echo '%s' | sudo -S pacman -S --needed --noconfirm $deps
|
|
||||||
fi
|
|
||||||
`, srcinfoPath, pkg, sudoPassword))
|
|
||||||
|
|
||||||
if err := a.runWithProgress(depsCmd, progressChan, PhaseAURPackages, startProgress+0.3*(endProgress-startProgress), startProgress+0.35*(endProgress-startProgress)); err != nil {
|
|
||||||
return fmt.Errorf("FAILED to install runtime dependencies for %s: %w", pkg, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
makedepsCmd := exec.CommandContext(ctx, "bash", "-c",
|
|
||||||
fmt.Sprintf(`
|
|
||||||
makedeps=$(grep -E "^[[:space:]]*makedepends = " "%s" | sed 's/^[[:space:]]*makedepends = //' | tr '\n' ' ')
|
|
||||||
if [ ! -z "$makedeps" ]; then
|
|
||||||
echo '%s' | sudo -S pacman -S --needed --noconfirm $makedeps
|
|
||||||
fi
|
|
||||||
`, srcinfoPath, sudoPassword))
|
|
||||||
|
|
||||||
if err := a.runWithProgress(makedepsCmd, progressChan, PhaseAURPackages, startProgress+0.35*(endProgress-startProgress), startProgress+0.4*(endProgress-startProgress)); err != nil {
|
|
||||||
return fmt.Errorf("FAILED to install make dependencies for %s: %w", pkg, err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
progressChan <- InstallProgressMsg{
|
progressChan <- InstallProgressMsg{
|
||||||
Phase: PhaseAURPackages,
|
Phase: PhaseAURPackages,
|
||||||
Progress: startProgress + 0.35*(endProgress-startProgress),
|
Progress: startProgress + 0.35*(endProgress-startProgress),
|
||||||
@@ -656,6 +652,66 @@ func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sud
|
|||||||
IsComplete: false,
|
IsComplete: false,
|
||||||
LogOutput: fmt.Sprintf("Dependencies for %s are installed separately", pkg),
|
LogOutput: fmt.Sprintf("Dependencies for %s are installed separately", pkg),
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: PhaseAURPackages,
|
||||||
|
Progress: startProgress + 0.3*(endProgress-startProgress),
|
||||||
|
Step: fmt.Sprintf("Resolving dependencies for %s...", pkg),
|
||||||
|
IsComplete: false,
|
||||||
|
CommandInfo: "Classifying dependencies as system or AUR",
|
||||||
|
}
|
||||||
|
|
||||||
|
runtimeDeps, makeDeps, err := parseSRCINFODeps(srcinfoPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse .SRCINFO for %s: %w", pkg, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
var systemPkgs []string
|
||||||
|
var aurPkgs []string
|
||||||
|
|
||||||
|
for _, dep := range append(runtimeDeps, makeDeps...) {
|
||||||
|
if seen[dep] || a.packageInstalled(dep) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[dep] = true
|
||||||
|
if a.isInSystemRepo(dep) {
|
||||||
|
systemPkgs = append(systemPkgs, dep)
|
||||||
|
} else {
|
||||||
|
aurPkgs = append(aurPkgs, dep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(systemPkgs) > 0 {
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: PhaseAURPackages,
|
||||||
|
Progress: startProgress + 0.32*(endProgress-startProgress),
|
||||||
|
Step: fmt.Sprintf("Installing %d system dependencies for %s...", len(systemPkgs), pkg),
|
||||||
|
IsComplete: false,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo pacman -S --needed --noconfirm %s", strings.Join(systemPkgs, " ")),
|
||||||
|
}
|
||||||
|
if err := a.installSystemPackages(ctx, systemPkgs, sudoPassword, progressChan); err != nil {
|
||||||
|
return fmt.Errorf("failed to install system dependencies for %s: %w", pkg, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, aurDep := range aurPkgs {
|
||||||
|
a.log(fmt.Sprintf("Dependency %s is AUR-only, building from source...", aurDep))
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: PhaseAURPackages,
|
||||||
|
Progress: startProgress + 0.35*(endProgress-startProgress),
|
||||||
|
Step: fmt.Sprintf("Installing AUR dependency %s for %s...", aurDep, pkg),
|
||||||
|
IsComplete: false,
|
||||||
|
CommandInfo: fmt.Sprintf("Building AUR dependency: %s", aurDep),
|
||||||
|
}
|
||||||
|
if err := a.installSingleAURPackageInternal(ctx, aurDep, sudoPassword, progressChan,
|
||||||
|
startProgress+0.35*(endProgress-startProgress),
|
||||||
|
startProgress+0.39*(endProgress-startProgress),
|
||||||
|
visited,
|
||||||
|
); err != nil {
|
||||||
|
return fmt.Errorf("failed to install AUR dependency %s for %s: %w", aurDep, pkg, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
progressChan <- InstallProgressMsg{
|
||||||
@@ -668,7 +724,7 @@ func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sud
|
|||||||
|
|
||||||
buildCmd := exec.CommandContext(ctx, "makepkg", "--noconfirm")
|
buildCmd := exec.CommandContext(ctx, "makepkg", "--noconfirm")
|
||||||
buildCmd.Dir = packageDir
|
buildCmd.Dir = packageDir
|
||||||
buildCmd.Env = append(os.Environ(), "PKGEXT=.pkg.tar") // Disable compression for speed
|
buildCmd.Env = append(os.Environ(), "PKGEXT=.pkg.tar")
|
||||||
|
|
||||||
if err := a.runWithProgress(buildCmd, progressChan, PhaseAURPackages, startProgress+0.4*(endProgress-startProgress), startProgress+0.7*(endProgress-startProgress)); err != nil {
|
if err := a.runWithProgress(buildCmd, progressChan, PhaseAURPackages, startProgress+0.4*(endProgress-startProgress), startProgress+0.7*(endProgress-startProgress)); err != nil {
|
||||||
return fmt.Errorf("failed to build %s: %w", pkg, err)
|
return fmt.Errorf("failed to build %s: %w", pkg, err)
|
||||||
|
|||||||
@@ -102,6 +102,19 @@ func (b *BaseDistribution) detectPackage(name, description string, installed boo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *BaseDistribution) detectOptionalPackage(name, description string, installed bool) deps.Dependency {
|
||||||
|
status := deps.StatusMissing
|
||||||
|
if installed {
|
||||||
|
status = deps.StatusInstalled
|
||||||
|
}
|
||||||
|
return deps.Dependency{
|
||||||
|
Name: name,
|
||||||
|
Status: status,
|
||||||
|
Description: description,
|
||||||
|
Required: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (b *BaseDistribution) detectGit() deps.Dependency {
|
func (b *BaseDistribution) detectGit() deps.Dependency {
|
||||||
return b.detectCommand("git", "Version control system")
|
return b.detectCommand("git", "Version control system")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"runtime"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
|
||||||
@@ -61,6 +60,7 @@ func (d *DebianDistribution) DetectDependenciesWithTerminal(ctx context.Context,
|
|||||||
dependencies = append(dependencies, d.detectGit())
|
dependencies = append(dependencies, d.detectGit())
|
||||||
dependencies = append(dependencies, d.detectWindowManager(wm))
|
dependencies = append(dependencies, d.detectWindowManager(wm))
|
||||||
dependencies = append(dependencies, d.detectQuickshell())
|
dependencies = append(dependencies, d.detectQuickshell())
|
||||||
|
dependencies = append(dependencies, d.detectDMSGreeter())
|
||||||
dependencies = append(dependencies, d.detectXDGPortal())
|
dependencies = append(dependencies, d.detectXDGPortal())
|
||||||
dependencies = append(dependencies, d.detectAccountsService())
|
dependencies = append(dependencies, d.detectAccountsService())
|
||||||
|
|
||||||
@@ -86,10 +86,32 @@ func (d *DebianDistribution) detectAccountsService() deps.Dependency {
|
|||||||
return d.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", d.packageInstalled("accountsservice"))
|
return d.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", d.packageInstalled("accountsservice"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *DebianDistribution) detectDMSGreeter() deps.Dependency {
|
||||||
|
return d.detectOptionalPackage("dms-greeter", "DankMaterialShell greetd greeter", d.packageInstalled("dms-greeter"))
|
||||||
|
}
|
||||||
|
|
||||||
func (d *DebianDistribution) packageInstalled(pkg string) bool {
|
func (d *DebianDistribution) packageInstalled(pkg string) bool {
|
||||||
cmd := exec.Command("dpkg", "-l", pkg)
|
return debianPackageInstalledPrecisely(pkg)
|
||||||
err := cmd.Run()
|
}
|
||||||
return err == nil
|
|
||||||
|
func debianPackageInstalledPrecisely(pkg string) bool {
|
||||||
|
cmd := exec.Command("dpkg-query", "-W", "-f=${db:Status-Status}", pkg)
|
||||||
|
output, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(string(output)) == "installed"
|
||||||
|
}
|
||||||
|
|
||||||
|
func debianRepoArchitecture(arch string) string {
|
||||||
|
switch arch {
|
||||||
|
case "amd64", "x86_64":
|
||||||
|
return "amd64"
|
||||||
|
case "arm64", "aarch64":
|
||||||
|
return "arm64"
|
||||||
|
default:
|
||||||
|
return arch
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DebianDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
func (d *DebianDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
||||||
@@ -108,6 +130,7 @@ func (d *DebianDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
|
|||||||
// DMS packages from OBS with variant support
|
// DMS packages from OBS with variant support
|
||||||
"dms (DankMaterialShell)": d.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
"dms (DankMaterialShell)": d.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
||||||
"quickshell": d.getQuickshellMapping(variants["quickshell"]),
|
"quickshell": d.getQuickshellMapping(variants["quickshell"]),
|
||||||
|
"dms-greeter": {Name: "dms-greeter", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
@@ -188,12 +211,12 @@ func (d *DebianDistribution) InstallPrerequisites(ctx context.Context, sudoPassw
|
|||||||
Step: "Installing development dependencies...",
|
Step: "Installing development dependencies...",
|
||||||
IsComplete: false,
|
IsComplete: false,
|
||||||
NeedsSudo: true,
|
NeedsSudo: true,
|
||||||
CommandInfo: "sudo apt-get install -y curl wget git cmake ninja-build pkg-config libxcb-cursor-dev libglib2.0-dev libpolkit-agent-1-dev",
|
CommandInfo: "sudo apt-get install -y curl wget git cmake ninja-build pkg-config gnupg libxcb-cursor-dev libglib2.0-dev libpolkit-agent-1-dev",
|
||||||
LogOutput: "Installing additional development tools",
|
LogOutput: "Installing additional development tools",
|
||||||
}
|
}
|
||||||
|
|
||||||
devToolsCmd := ExecSudoCommand(ctx, sudoPassword,
|
devToolsCmd := ExecSudoCommand(ctx, sudoPassword,
|
||||||
"DEBIAN_FRONTEND=noninteractive apt-get install -y curl wget git cmake ninja-build pkg-config libxcb-cursor-dev libglib2.0-dev libpolkit-agent-1-dev libjpeg-dev libpugixml-dev")
|
"DEBIAN_FRONTEND=noninteractive apt-get install -y curl wget git cmake ninja-build pkg-config gnupg libxcb-cursor-dev libglib2.0-dev libpolkit-agent-1-dev libjpeg-dev libpugixml-dev")
|
||||||
if err := d.runWithProgress(devToolsCmd, progressChan, PhasePrerequisites, 0.10, 0.12); err != nil {
|
if err := d.runWithProgress(devToolsCmd, progressChan, PhasePrerequisites, 0.10, 0.12); err != nil {
|
||||||
return fmt.Errorf("failed to install development tools: %w", err)
|
return fmt.Errorf("failed to install development tools: %w", err)
|
||||||
}
|
}
|
||||||
@@ -373,6 +396,14 @@ func (d *DebianDistribution) extractPackageNames(packages []PackageMapping) []st
|
|||||||
return names
|
return names
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *DebianDistribution) aptInstallArgs(packages []string, minimal bool) []string {
|
||||||
|
args := []string{"DEBIAN_FRONTEND=noninteractive", "apt-get", "install", "-y"}
|
||||||
|
if minimal {
|
||||||
|
args = append(args, "--no-install-recommends")
|
||||||
|
}
|
||||||
|
return append(args, packages...)
|
||||||
|
}
|
||||||
|
|
||||||
func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []PackageMapping, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []PackageMapping, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
enabledRepos := make(map[string]bool)
|
enabledRepos := make(map[string]bool)
|
||||||
|
|
||||||
@@ -430,7 +461,7 @@ func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Packa
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add repository
|
// Add repository
|
||||||
repoLine := fmt.Sprintf("deb [signed-by=%s, arch=%s] %s/ /", keyringPath, runtime.GOARCH, baseURL)
|
repoLine := fmt.Sprintf("deb [signed-by=%s arch=%s] %s/ /", keyringPath, debianRepoArchitecture(osInfo.Architecture), baseURL)
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
progressChan <- InstallProgressMsg{
|
||||||
Phase: PhaseSystemPackages,
|
Phase: PhaseSystemPackages,
|
||||||
@@ -476,20 +507,46 @@ func (d *DebianDistribution) installAPTPackages(ctx context.Context, packages []
|
|||||||
|
|
||||||
d.log(fmt.Sprintf("Installing APT packages: %s", strings.Join(packages, ", ")))
|
d.log(fmt.Sprintf("Installing APT packages: %s", strings.Join(packages, ", ")))
|
||||||
|
|
||||||
args := []string{"DEBIAN_FRONTEND=noninteractive", "apt-get", "install", "-y"}
|
groups := orderedMinimalInstallGroups(packages)
|
||||||
args = append(args, packages...)
|
totalGroups := len(groups)
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
groupIndex := 0
|
||||||
Phase: PhaseSystemPackages,
|
installGroup := func(groupPackages []string, minimal bool) error {
|
||||||
Progress: 0.40,
|
if len(groupPackages) == 0 {
|
||||||
Step: "Installing system packages...",
|
return nil
|
||||||
IsComplete: false,
|
}
|
||||||
NeedsSudo: true,
|
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
groupIndex++
|
||||||
|
startProgress := 0.40
|
||||||
|
endProgress := 0.60
|
||||||
|
if totalGroups > 1 {
|
||||||
|
if groupIndex == 1 {
|
||||||
|
endProgress = 0.50
|
||||||
|
} else {
|
||||||
|
startProgress = 0.50
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args := d.aptInstallArgs(groupPackages, minimal)
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: PhaseSystemPackages,
|
||||||
|
Progress: startProgress,
|
||||||
|
Step: "Installing system packages...",
|
||||||
|
IsComplete: false,
|
||||||
|
NeedsSudo: true,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
||||||
|
return d.runWithProgress(cmd, progressChan, PhaseSystemPackages, startProgress, endProgress)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
for _, group := range groups {
|
||||||
return d.runWithProgress(cmd, progressChan, PhaseSystemPackages, 0.40, 0.60)
|
if err := installGroup(group.packages, group.minimal); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DebianDistribution) installBuildDependencies(ctx context.Context, manualPkgs []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (d *DebianDistribution) installBuildDependencies(ctx context.Context, manualPkgs []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ func init() {
|
|||||||
Register("fedora", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
Register("fedora", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
return NewFedoraDistribution(config, logChan)
|
return NewFedoraDistribution(config, logChan)
|
||||||
})
|
})
|
||||||
|
Register("evernight", "#72B8DC", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
|
return NewFedoraDistribution(config, logChan)
|
||||||
|
})
|
||||||
Register("nobara", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
Register("nobara", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||||
return NewFedoraDistribution(config, logChan)
|
return NewFedoraDistribution(config, logChan)
|
||||||
})
|
})
|
||||||
@@ -75,6 +78,7 @@ func (f *FedoraDistribution) DetectDependenciesWithTerminal(ctx context.Context,
|
|||||||
dependencies = append(dependencies, f.detectGit())
|
dependencies = append(dependencies, f.detectGit())
|
||||||
dependencies = append(dependencies, f.detectWindowManager(wm))
|
dependencies = append(dependencies, f.detectWindowManager(wm))
|
||||||
dependencies = append(dependencies, f.detectQuickshell())
|
dependencies = append(dependencies, f.detectQuickshell())
|
||||||
|
dependencies = append(dependencies, f.detectDMSGreeter())
|
||||||
dependencies = append(dependencies, f.detectXDGPortal())
|
dependencies = append(dependencies, f.detectXDGPortal())
|
||||||
dependencies = append(dependencies, f.detectAccountsService())
|
dependencies = append(dependencies, f.detectAccountsService())
|
||||||
|
|
||||||
@@ -120,6 +124,7 @@ func (f *FedoraDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
|
|||||||
|
|
||||||
// COPR packages
|
// COPR packages
|
||||||
"quickshell": f.getQuickshellMapping(variants["quickshell"]),
|
"quickshell": f.getQuickshellMapping(variants["quickshell"]),
|
||||||
|
"dms-greeter": {Name: "dms-greeter", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
|
||||||
"matugen": {Name: "matugen", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
|
"matugen": {Name: "matugen", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
|
||||||
"dms (DankMaterialShell)": f.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
"dms (DankMaterialShell)": f.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
||||||
"dgop": {Name: "dgop", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
|
"dgop": {Name: "dgop", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
|
||||||
@@ -191,6 +196,10 @@ func (f *FedoraDistribution) detectAccountsService() deps.Dependency {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *FedoraDistribution) detectDMSGreeter() deps.Dependency {
|
||||||
|
return f.detectOptionalPackage("dms-greeter", "DankMaterialShell greetd greeter", f.packageInstalled("dms-greeter"))
|
||||||
|
}
|
||||||
|
|
||||||
func (f *FedoraDistribution) getPrerequisites() []string {
|
func (f *FedoraDistribution) getPrerequisites() []string {
|
||||||
return []string{
|
return []string{
|
||||||
"dnf-plugins-core",
|
"dnf-plugins-core",
|
||||||
@@ -475,28 +484,7 @@ func (f *FedoraDistribution) installDNFPackages(ctx context.Context, packages []
|
|||||||
|
|
||||||
f.log(fmt.Sprintf("Installing DNF packages: %s", strings.Join(packages, ", ")))
|
f.log(fmt.Sprintf("Installing DNF packages: %s", strings.Join(packages, ", ")))
|
||||||
|
|
||||||
args := []string{"dnf", "install", "-y"}
|
return f.installDNFGroups(ctx, packages, sudoPassword, progressChan, PhaseSystemPackages, "Installing system packages...", 0.40, 0.60)
|
||||||
|
|
||||||
for _, pkg := range packages {
|
|
||||||
if pkg == "niri" || pkg == "niri-git" {
|
|
||||||
args = append(args, "--setopt=install_weak_deps=False")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
args = append(args, packages...)
|
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
|
||||||
Phase: PhaseSystemPackages,
|
|
||||||
Progress: 0.40,
|
|
||||||
Step: "Installing system packages...",
|
|
||||||
IsComplete: false,
|
|
||||||
NeedsSudo: true,
|
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
|
||||||
return f.runWithProgress(cmd, progressChan, PhaseSystemPackages, 0.40, 0.60)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *FedoraDistribution) installCOPRPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (f *FedoraDistribution) installCOPRPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
@@ -506,26 +494,57 @@ func (f *FedoraDistribution) installCOPRPackages(ctx context.Context, packages [
|
|||||||
|
|
||||||
f.log(fmt.Sprintf("Installing COPR packages: %s", strings.Join(packages, ", ")))
|
f.log(fmt.Sprintf("Installing COPR packages: %s", strings.Join(packages, ", ")))
|
||||||
|
|
||||||
args := []string{"dnf", "install", "-y"}
|
return f.installDNFGroups(ctx, packages, sudoPassword, progressChan, PhaseAURPackages, "Installing COPR packages...", 0.70, 0.85)
|
||||||
|
}
|
||||||
|
|
||||||
for _, pkg := range packages {
|
func (f *FedoraDistribution) dnfInstallArgs(packages []string, minimal bool) []string {
|
||||||
if pkg == "niri" || pkg == "niri-git" {
|
args := []string{"dnf", "install", "-y"}
|
||||||
args = append(args, "--setopt=install_weak_deps=False")
|
if minimal {
|
||||||
break
|
args = append(args, "--setopt=install_weak_deps=False")
|
||||||
|
}
|
||||||
|
return append(args, packages...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FedoraDistribution) installDNFGroups(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg, phase InstallPhase, step string, startProgress float64, endProgress float64) error {
|
||||||
|
groups := orderedMinimalInstallGroups(packages)
|
||||||
|
totalGroups := len(groups)
|
||||||
|
|
||||||
|
groupIndex := 0
|
||||||
|
installGroup := func(groupPackages []string, minimal bool) error {
|
||||||
|
if len(groupPackages) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
groupIndex++
|
||||||
|
groupStart := startProgress
|
||||||
|
groupEnd := endProgress
|
||||||
|
if totalGroups > 1 {
|
||||||
|
midpoint := startProgress + ((endProgress - startProgress) / 2)
|
||||||
|
if groupIndex == 1 {
|
||||||
|
groupEnd = midpoint
|
||||||
|
} else {
|
||||||
|
groupStart = midpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args := f.dnfInstallArgs(groupPackages, minimal)
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: phase,
|
||||||
|
Progress: groupStart,
|
||||||
|
Step: step,
|
||||||
|
IsComplete: false,
|
||||||
|
NeedsSudo: true,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
||||||
|
return f.runWithProgress(cmd, progressChan, phase, groupStart, groupEnd)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, group := range groups {
|
||||||
|
if err := installGroup(group.packages, group.minimal); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
args = append(args, packages...)
|
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
|
||||||
Phase: PhaseAURPackages,
|
|
||||||
Progress: 0.70,
|
|
||||||
Step: "Installing COPR packages...",
|
|
||||||
IsComplete: false,
|
|
||||||
NeedsSudo: true,
|
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
|
||||||
return f.runWithProgress(cmd, progressChan, PhaseAURPackages, 0.70, 0.85)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ const (
|
|||||||
PhaseAURPackages
|
PhaseAURPackages
|
||||||
PhaseCursorTheme
|
PhaseCursorTheme
|
||||||
PhaseConfiguration
|
PhaseConfiguration
|
||||||
|
PhaseGreeterSetup
|
||||||
PhaseComplete
|
PhaseComplete
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
44
core/internal/distros/minimal_install.go
Normal file
44
core/internal/distros/minimal_install.go
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
package distros
|
||||||
|
|
||||||
|
type minimalInstallGroup struct {
|
||||||
|
packages []string
|
||||||
|
minimal bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func shouldPreferMinimalInstall(pkg string) bool {
|
||||||
|
switch pkg {
|
||||||
|
case "niri", "niri-git":
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func splitMinimalInstallPackages(packages []string) (normal []string, minimal []string) {
|
||||||
|
for _, pkg := range packages {
|
||||||
|
if shouldPreferMinimalInstall(pkg) {
|
||||||
|
minimal = append(minimal, pkg)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
normal = append(normal, pkg)
|
||||||
|
}
|
||||||
|
return normal, minimal
|
||||||
|
}
|
||||||
|
|
||||||
|
func orderedMinimalInstallGroups(packages []string) []minimalInstallGroup {
|
||||||
|
normal, minimal := splitMinimalInstallPackages(packages)
|
||||||
|
groups := make([]minimalInstallGroup, 0, 2)
|
||||||
|
if len(minimal) > 0 {
|
||||||
|
groups = append(groups, minimalInstallGroup{
|
||||||
|
packages: minimal,
|
||||||
|
minimal: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if len(normal) > 0 {
|
||||||
|
groups = append(groups, minimalInstallGroup{
|
||||||
|
packages: normal,
|
||||||
|
minimal: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return groups
|
||||||
|
}
|
||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
|
||||||
@@ -29,6 +30,8 @@ type OpenSUSEDistribution struct {
|
|||||||
config DistroConfig
|
config DistroConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const openSUSENiriWaylandServerPackage = "libwayland-server0"
|
||||||
|
|
||||||
func NewOpenSUSEDistribution(config DistroConfig, logChan chan<- string) *OpenSUSEDistribution {
|
func NewOpenSUSEDistribution(config DistroConfig, logChan chan<- string) *OpenSUSEDistribution {
|
||||||
base := NewBaseDistribution(logChan)
|
base := NewBaseDistribution(logChan)
|
||||||
return &OpenSUSEDistribution{
|
return &OpenSUSEDistribution{
|
||||||
@@ -71,6 +74,7 @@ func (o *OpenSUSEDistribution) DetectDependenciesWithTerminal(ctx context.Contex
|
|||||||
dependencies = append(dependencies, o.detectGit())
|
dependencies = append(dependencies, o.detectGit())
|
||||||
dependencies = append(dependencies, o.detectWindowManager(wm))
|
dependencies = append(dependencies, o.detectWindowManager(wm))
|
||||||
dependencies = append(dependencies, o.detectQuickshell())
|
dependencies = append(dependencies, o.detectQuickshell())
|
||||||
|
dependencies = append(dependencies, o.detectDMSGreeter())
|
||||||
dependencies = append(dependencies, o.detectXDGPortal())
|
dependencies = append(dependencies, o.detectXDGPortal())
|
||||||
dependencies = append(dependencies, o.detectAccountsService())
|
dependencies = append(dependencies, o.detectAccountsService())
|
||||||
|
|
||||||
@@ -100,6 +104,10 @@ func (o *OpenSUSEDistribution) packageInstalled(pkg string) bool {
|
|||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (o *OpenSUSEDistribution) detectDMSGreeter() deps.Dependency {
|
||||||
|
return o.detectOptionalPackage("dms-greeter", "DankMaterialShell greetd greeter", o.packageInstalled("dms-greeter"))
|
||||||
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
func (o *OpenSUSEDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
||||||
return o.GetPackageMappingWithVariants(wm, make(map[string]deps.PackageVariant))
|
return o.GetPackageMappingWithVariants(wm, make(map[string]deps.PackageVariant))
|
||||||
}
|
}
|
||||||
@@ -116,6 +124,7 @@ func (o *OpenSUSEDistribution) GetPackageMappingWithVariants(wm deps.WindowManag
|
|||||||
// DMS packages from OBS
|
// DMS packages from OBS
|
||||||
"dms (DankMaterialShell)": o.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
"dms (DankMaterialShell)": o.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
||||||
"quickshell": o.getQuickshellMapping(variants["quickshell"]),
|
"quickshell": o.getQuickshellMapping(variants["quickshell"]),
|
||||||
|
"dms-greeter": {Name: "dms-greeter", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
|
||||||
@@ -193,35 +202,7 @@ func (o *OpenSUSEDistribution) detectAccountsService() deps.Dependency {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) getPrerequisites() []string {
|
func (o *OpenSUSEDistribution) getPrerequisites() []string {
|
||||||
return []string{
|
return []string{}
|
||||||
"make",
|
|
||||||
"unzip",
|
|
||||||
"gcc",
|
|
||||||
"gcc-c++",
|
|
||||||
"cmake",
|
|
||||||
"ninja",
|
|
||||||
"pkgconf-pkg-config",
|
|
||||||
"git",
|
|
||||||
"qt6-base-devel",
|
|
||||||
"qt6-declarative-devel",
|
|
||||||
"qt6-declarative-private-devel",
|
|
||||||
"qt6-shadertools",
|
|
||||||
"qt6-shadertools-devel",
|
|
||||||
"qt6-wayland-devel",
|
|
||||||
"qt6-waylandclient-private-devel",
|
|
||||||
"spirv-tools-devel",
|
|
||||||
"cli11-devel",
|
|
||||||
"wayland-protocols-devel",
|
|
||||||
"libgbm-devel",
|
|
||||||
"libdrm-devel",
|
|
||||||
"pipewire-devel",
|
|
||||||
"jemalloc-devel",
|
|
||||||
"wayland-utils",
|
|
||||||
"Mesa-libGLESv3-devel",
|
|
||||||
"pam-devel",
|
|
||||||
"glib2-devel",
|
|
||||||
"polkit-devel",
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) InstallPrerequisites(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (o *OpenSUSEDistribution) InstallPrerequisites(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
@@ -291,6 +272,10 @@ func (o *OpenSUSEDistribution) InstallPackages(ctx context.Context, dependencies
|
|||||||
LogOutput: "Starting prerequisite check...",
|
LogOutput: "Starting prerequisite check...",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := o.disableInstallMediaRepos(ctx, sudoPassword, progressChan); err != nil {
|
||||||
|
return fmt.Errorf("failed to disable install media repositories: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
if err := o.InstallPrerequisites(ctx, sudoPassword, progressChan); err != nil {
|
if err := o.InstallPrerequisites(ctx, sudoPassword, progressChan); err != nil {
|
||||||
return fmt.Errorf("failed to install prerequisites: %w", err)
|
return fmt.Errorf("failed to install prerequisites: %w", err)
|
||||||
}
|
}
|
||||||
@@ -321,7 +306,7 @@ func (o *OpenSUSEDistribution) InstallPackages(ctx context.Context, dependencies
|
|||||||
NeedsSudo: true,
|
NeedsSudo: true,
|
||||||
LogOutput: fmt.Sprintf("Installing system packages: %s", strings.Join(systemPkgs, ", ")),
|
LogOutput: fmt.Sprintf("Installing system packages: %s", strings.Join(systemPkgs, ", ")),
|
||||||
}
|
}
|
||||||
if err := o.installZypperPackages(ctx, systemPkgs, sudoPassword, progressChan); err != nil {
|
if err := o.installZypperPackages(ctx, systemPkgs, sudoPassword, progressChan, PhaseSystemPackages, "Installing system packages...", 0.40, 0.60); err != nil {
|
||||||
return fmt.Errorf("failed to install zypper packages: %w", err)
|
return fmt.Errorf("failed to install zypper packages: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -336,7 +321,7 @@ func (o *OpenSUSEDistribution) InstallPackages(ctx context.Context, dependencies
|
|||||||
IsComplete: false,
|
IsComplete: false,
|
||||||
LogOutput: fmt.Sprintf("Installing OBS packages: %s", strings.Join(obsPkgNames, ", ")),
|
LogOutput: fmt.Sprintf("Installing OBS packages: %s", strings.Join(obsPkgNames, ", ")),
|
||||||
}
|
}
|
||||||
if err := o.installZypperPackages(ctx, obsPkgNames, sudoPassword, progressChan); err != nil {
|
if err := o.installZypperPackages(ctx, obsPkgNames, sudoPassword, progressChan, PhaseAURPackages, "Installing OBS packages...", 0.70, 0.85); err != nil {
|
||||||
return fmt.Errorf("failed to install OBS packages: %w", err)
|
return fmt.Errorf("failed to install OBS packages: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -426,9 +411,32 @@ func (o *OpenSUSEDistribution) categorizePackages(dependencies []deps.Dependency
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
systemPkgs = o.appendMissingSystemPackages(systemPkgs, openSUSENiriRuntimePackages(wm, disabledFlags))
|
||||||
|
|
||||||
return systemPkgs, obsPkgs, manualPkgs, variantMap
|
return systemPkgs, obsPkgs, manualPkgs, variantMap
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func openSUSENiriRuntimePackages(wm deps.WindowManager, disabledFlags map[string]bool) []string {
|
||||||
|
if wm != deps.WindowManagerNiri || disabledFlags["niri"] {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return []string{openSUSENiriWaylandServerPackage}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *OpenSUSEDistribution) appendMissingSystemPackages(systemPkgs []string, extraPkgs []string) []string {
|
||||||
|
for _, pkg := range extraPkgs {
|
||||||
|
if slices.Contains(systemPkgs, pkg) || o.packageInstalled(pkg) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
o.log(fmt.Sprintf("Adding openSUSE runtime package: %s", pkg))
|
||||||
|
systemPkgs = append(systemPkgs, pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return systemPkgs
|
||||||
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) extractPackageNames(packages []PackageMapping) []string {
|
func (o *OpenSUSEDistribution) extractPackageNames(packages []PackageMapping) []string {
|
||||||
names := make([]string, len(packages))
|
names := make([]string, len(packages))
|
||||||
for i, pkg := range packages {
|
for i, pkg := range packages {
|
||||||
@@ -508,27 +516,146 @@ func (o *OpenSUSEDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Pac
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) installZypperPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func isOpenSUSEInstallMediaURI(uri string) bool {
|
||||||
|
normalizedURI := strings.ToLower(strings.TrimSpace(uri))
|
||||||
|
|
||||||
|
return strings.HasPrefix(normalizedURI, "cd:/") ||
|
||||||
|
strings.HasPrefix(normalizedURI, "dvd:/") ||
|
||||||
|
strings.HasPrefix(normalizedURI, "hd:/") ||
|
||||||
|
strings.HasPrefix(normalizedURI, "iso:/")
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseZypperInstallMediaAliases(output string) []string {
|
||||||
|
var aliases []string
|
||||||
|
|
||||||
|
for _, line := range strings.Split(output, "\n") {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if line == "" || !strings.Contains(line, "|") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(line, "|")
|
||||||
|
if len(parts) < 7 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range parts {
|
||||||
|
parts[i] = strings.TrimSpace(parts[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
alias := parts[1]
|
||||||
|
enabled := strings.ToLower(parts[3])
|
||||||
|
uri := parts[len(parts)-1]
|
||||||
|
|
||||||
|
if alias == "" || strings.EqualFold(alias, "alias") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if enabled != "" && enabled != "yes" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !isOpenSUSEInstallMediaURI(uri) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
aliases = append(aliases, alias)
|
||||||
|
}
|
||||||
|
|
||||||
|
return aliases
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *OpenSUSEDistribution) disableInstallMediaRepos(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
|
listCmd := exec.CommandContext(ctx, "zypper", "repos", "-u")
|
||||||
|
output, err := listCmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
o.log(fmt.Sprintf("Warning: failed to list zypper repositories: %s", strings.TrimSpace(string(output))))
|
||||||
|
return fmt.Errorf("failed to list zypper repositories: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
aliases := parseZypperInstallMediaAliases(string(output))
|
||||||
|
if len(aliases) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
o.log(fmt.Sprintf("Disabling install media repositories: %s", strings.Join(aliases, ", ")))
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: PhasePrerequisites,
|
||||||
|
Progress: 0.055,
|
||||||
|
Step: "Disabling install media repositories...",
|
||||||
|
IsComplete: false,
|
||||||
|
NeedsSudo: true,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo zypper modifyrepo -d %s", strings.Join(aliases, " ")),
|
||||||
|
LogOutput: fmt.Sprintf("Disabling install media repositories: %s", strings.Join(aliases, ", ")),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, alias := range aliases {
|
||||||
|
cmd := ExecSudoCommand(ctx, sudoPassword, fmt.Sprintf("zypper modifyrepo -d '%s'", escapeSingleQuotes(alias)))
|
||||||
|
repoOutput, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
o.log(fmt.Sprintf("Failed to disable install media repo %s: %s", alias, strings.TrimSpace(string(repoOutput))))
|
||||||
|
return fmt.Errorf("failed to disable install media repo %s: %w", alias, err)
|
||||||
|
}
|
||||||
|
o.log(fmt.Sprintf("Disabled install media repo %s: %s", alias, strings.TrimSpace(string(repoOutput))))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *OpenSUSEDistribution) zypperInstallArgs(packages []string, minimal bool) []string {
|
||||||
|
args := []string{"zypper", "install", "-y"}
|
||||||
|
if minimal {
|
||||||
|
args = append(args, "--no-recommends")
|
||||||
|
}
|
||||||
|
return append(args, packages...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *OpenSUSEDistribution) installZypperPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg, phase InstallPhase, step string, startProgress float64, endProgress float64) error {
|
||||||
if len(packages) == 0 {
|
if len(packages) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
o.log(fmt.Sprintf("Installing zypper packages: %s", strings.Join(packages, ", ")))
|
o.log(fmt.Sprintf("Installing zypper packages: %s", strings.Join(packages, ", ")))
|
||||||
|
|
||||||
args := []string{"zypper", "install", "-y"}
|
groups := orderedMinimalInstallGroups(packages)
|
||||||
args = append(args, packages...)
|
totalGroups := len(groups)
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
groupIndex := 0
|
||||||
Phase: PhaseSystemPackages,
|
installGroup := func(groupPackages []string, minimal bool) error {
|
||||||
Progress: 0.40,
|
if len(groupPackages) == 0 {
|
||||||
Step: "Installing system packages...",
|
return nil
|
||||||
IsComplete: false,
|
}
|
||||||
NeedsSudo: true,
|
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
groupIndex++
|
||||||
|
groupStart := startProgress
|
||||||
|
groupEnd := endProgress
|
||||||
|
if totalGroups > 1 {
|
||||||
|
midpoint := startProgress + ((endProgress - startProgress) / 2)
|
||||||
|
if groupIndex == 1 {
|
||||||
|
groupEnd = midpoint
|
||||||
|
} else {
|
||||||
|
groupStart = midpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args := o.zypperInstallArgs(groupPackages, minimal)
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: phase,
|
||||||
|
Progress: groupStart,
|
||||||
|
Step: step,
|
||||||
|
IsComplete: false,
|
||||||
|
NeedsSudo: true,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
||||||
|
return o.runWithProgress(cmd, progressChan, phase, groupStart, groupEnd)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
for _, group := range groups {
|
||||||
return o.runWithProgress(cmd, progressChan, PhaseSystemPackages, 0.40, 0.60)
|
if err := installGroup(group.packages, group.minimal); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *OpenSUSEDistribution) installQuickshell(ctx context.Context, variant deps.PackageVariant, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (o *OpenSUSEDistribution) installQuickshell(ctx context.Context, variant deps.PackageVariant, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
|
|||||||
@@ -63,6 +63,7 @@ func (u *UbuntuDistribution) DetectDependenciesWithTerminal(ctx context.Context,
|
|||||||
dependencies = append(dependencies, u.detectGit())
|
dependencies = append(dependencies, u.detectGit())
|
||||||
dependencies = append(dependencies, u.detectWindowManager(wm))
|
dependencies = append(dependencies, u.detectWindowManager(wm))
|
||||||
dependencies = append(dependencies, u.detectQuickshell())
|
dependencies = append(dependencies, u.detectQuickshell())
|
||||||
|
dependencies = append(dependencies, u.detectDMSGreeter())
|
||||||
dependencies = append(dependencies, u.detectXDGPortal())
|
dependencies = append(dependencies, u.detectXDGPortal())
|
||||||
dependencies = append(dependencies, u.detectAccountsService())
|
dependencies = append(dependencies, u.detectAccountsService())
|
||||||
|
|
||||||
@@ -94,10 +95,12 @@ func (u *UbuntuDistribution) detectAccountsService() deps.Dependency {
|
|||||||
return u.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", u.packageInstalled("accountsservice"))
|
return u.detectPackage("accountsservice", "D-Bus interface for user account query and manipulation", u.packageInstalled("accountsservice"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (u *UbuntuDistribution) detectDMSGreeter() deps.Dependency {
|
||||||
|
return u.detectOptionalPackage("dms-greeter", "DankMaterialShell greetd greeter", u.packageInstalled("dms-greeter"))
|
||||||
|
}
|
||||||
|
|
||||||
func (u *UbuntuDistribution) packageInstalled(pkg string) bool {
|
func (u *UbuntuDistribution) packageInstalled(pkg string) bool {
|
||||||
cmd := exec.Command("dpkg", "-l", pkg)
|
return debianPackageInstalledPrecisely(pkg)
|
||||||
err := cmd.Run()
|
|
||||||
return err == nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *UbuntuDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
func (u *UbuntuDistribution) GetPackageMapping(wm deps.WindowManager) map[string]PackageMapping {
|
||||||
@@ -116,6 +119,7 @@ func (u *UbuntuDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
|
|||||||
// DMS packages from PPAs
|
// DMS packages from PPAs
|
||||||
"dms (DankMaterialShell)": u.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
"dms (DankMaterialShell)": u.getDmsMapping(variants["dms (DankMaterialShell)"]),
|
||||||
"quickshell": u.getQuickshellMapping(variants["quickshell"]),
|
"quickshell": u.getQuickshellMapping(variants["quickshell"]),
|
||||||
|
"dms-greeter": {Name: "dms-greeter", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
||||||
"matugen": {Name: "matugen", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
"matugen": {Name: "matugen", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
||||||
"dgop": {Name: "dgop", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
"dgop": {Name: "dgop", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
||||||
"ghostty": {Name: "ghostty", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
"ghostty": {Name: "ghostty", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
|
||||||
@@ -448,21 +452,7 @@ func (u *UbuntuDistribution) installAPTPackages(ctx context.Context, packages []
|
|||||||
}
|
}
|
||||||
|
|
||||||
u.log(fmt.Sprintf("Installing APT packages: %s", strings.Join(packages, ", ")))
|
u.log(fmt.Sprintf("Installing APT packages: %s", strings.Join(packages, ", ")))
|
||||||
|
return u.installAPTGroups(ctx, packages, sudoPassword, progressChan, PhaseSystemPackages, "Installing system packages...", 0.40, 0.60)
|
||||||
args := []string{"apt-get", "install", "-y"}
|
|
||||||
args = append(args, packages...)
|
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
|
||||||
Phase: PhaseSystemPackages,
|
|
||||||
Progress: 0.40,
|
|
||||||
Step: "Installing system packages...",
|
|
||||||
IsComplete: false,
|
|
||||||
NeedsSudo: true,
|
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
|
||||||
return u.runWithProgress(cmd, progressChan, PhaseSystemPackages, 0.40, 0.60)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *UbuntuDistribution) installPPAPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (u *UbuntuDistribution) installPPAPackages(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
@@ -471,21 +461,59 @@ func (u *UbuntuDistribution) installPPAPackages(ctx context.Context, packages []
|
|||||||
}
|
}
|
||||||
|
|
||||||
u.log(fmt.Sprintf("Installing PPA packages: %s", strings.Join(packages, ", ")))
|
u.log(fmt.Sprintf("Installing PPA packages: %s", strings.Join(packages, ", ")))
|
||||||
|
return u.installAPTGroups(ctx, packages, sudoPassword, progressChan, PhaseAURPackages, "Installing PPA packages...", 0.70, 0.85)
|
||||||
|
}
|
||||||
|
|
||||||
args := []string{"apt-get", "install", "-y"}
|
func (u *UbuntuDistribution) aptInstallArgs(packages []string, minimal bool) []string {
|
||||||
args = append(args, packages...)
|
args := []string{"DEBIAN_FRONTEND=noninteractive", "apt-get", "install", "-y"}
|
||||||
|
if minimal {
|
||||||
|
args = append(args, "--no-install-recommends")
|
||||||
|
}
|
||||||
|
return append(args, packages...)
|
||||||
|
}
|
||||||
|
|
||||||
progressChan <- InstallProgressMsg{
|
func (u *UbuntuDistribution) installAPTGroups(ctx context.Context, packages []string, sudoPassword string, progressChan chan<- InstallProgressMsg, phase InstallPhase, step string, startProgress float64, endProgress float64) error {
|
||||||
Phase: PhaseAURPackages,
|
groups := orderedMinimalInstallGroups(packages)
|
||||||
Progress: 0.70,
|
totalGroups := len(groups)
|
||||||
Step: "Installing PPA packages...",
|
|
||||||
IsComplete: false,
|
groupIndex := 0
|
||||||
NeedsSudo: true,
|
installGroup := func(groupPackages []string, minimal bool) error {
|
||||||
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
if len(groupPackages) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
groupIndex++
|
||||||
|
groupStart := startProgress
|
||||||
|
groupEnd := endProgress
|
||||||
|
if totalGroups > 1 {
|
||||||
|
midpoint := startProgress + ((endProgress - startProgress) / 2)
|
||||||
|
if groupIndex == 1 {
|
||||||
|
groupEnd = midpoint
|
||||||
|
} else {
|
||||||
|
groupStart = midpoint
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args := u.aptInstallArgs(groupPackages, minimal)
|
||||||
|
progressChan <- InstallProgressMsg{
|
||||||
|
Phase: phase,
|
||||||
|
Progress: groupStart,
|
||||||
|
Step: step,
|
||||||
|
IsComplete: false,
|
||||||
|
NeedsSudo: true,
|
||||||
|
CommandInfo: fmt.Sprintf("sudo %s", strings.Join(args, " ")),
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
||||||
|
return u.runWithProgress(cmd, progressChan, phase, groupStart, groupEnd)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := ExecSudoCommand(ctx, sudoPassword, strings.Join(args, " "))
|
for _, group := range groups {
|
||||||
return u.runWithProgress(cmd, progressChan, PhaseAURPackages, 0.70, 0.85)
|
if err := installGroup(group.packages, group.minimal); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manualPkgs []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manualPkgs []string, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
|
||||||
|
|||||||
42
core/internal/geolocation/client.go
Normal file
42
core/internal/geolocation/client.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package geolocation
|
||||||
|
|
||||||
|
import "github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
|
|
||||||
|
func NewClient() Client {
|
||||||
|
geoclueClient, err := newGeoClueClient()
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("GeoClue2 unavailable: %v", err)
|
||||||
|
return newSeededIpClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
loc, _ := geoclueClient.GetLocation()
|
||||||
|
if loc.Latitude != 0 || loc.Longitude != 0 {
|
||||||
|
log.Info("Using GeoClue2 location")
|
||||||
|
return geoclueClient
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info("GeoClue2 has no fix yet, seeding with IP location")
|
||||||
|
ipLoc, err := fetchIPLocation()
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("IP location seed failed: %v", err)
|
||||||
|
return geoclueClient
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info("Seeded GeoClue2 with IP location")
|
||||||
|
geoclueClient.SeedLocation(Location{Latitude: ipLoc.Latitude, Longitude: ipLoc.Longitude})
|
||||||
|
return geoclueClient
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSeededIpClient() *IpClient {
|
||||||
|
client := newIpClient()
|
||||||
|
ipLoc, err := fetchIPLocation()
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("IP location also failed: %v", err)
|
||||||
|
return client
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info("Using IP location")
|
||||||
|
client.currLocation.Latitude = ipLoc.Latitude
|
||||||
|
client.currLocation.Longitude = ipLoc.Longitude
|
||||||
|
return client
|
||||||
|
}
|
||||||
243
core/internal/geolocation/client_geoclue.go
Normal file
243
core/internal/geolocation/client_geoclue.go
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
package geolocation
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/pkg/dbusutil"
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/pkg/syncmap"
|
||||||
|
"github.com/godbus/dbus/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
dbusGeoClueService = "org.freedesktop.GeoClue2"
|
||||||
|
dbusGeoCluePath = "/org/freedesktop/GeoClue2"
|
||||||
|
dbusGeoClueInterface = dbusGeoClueService
|
||||||
|
|
||||||
|
dbusGeoClueManagerPath = dbusGeoCluePath + "/Manager"
|
||||||
|
dbusGeoClueManagerInterface = dbusGeoClueInterface + ".Manager"
|
||||||
|
dbusGeoClueManagerGetClient = dbusGeoClueManagerInterface + ".GetClient"
|
||||||
|
|
||||||
|
dbusGeoClueClientInterface = dbusGeoClueInterface + ".Client"
|
||||||
|
dbusGeoClueClientDesktopId = dbusGeoClueClientInterface + ".DesktopId"
|
||||||
|
dbusGeoClueClientTimeThreshold = dbusGeoClueClientInterface + ".TimeThreshold"
|
||||||
|
dbusGeoClueClientTimeStart = dbusGeoClueClientInterface + ".Start"
|
||||||
|
dbusGeoClueClientTimeStop = dbusGeoClueClientInterface + ".Stop"
|
||||||
|
dbusGeoClueClientLocationUpdated = dbusGeoClueClientInterface + ".LocationUpdated"
|
||||||
|
|
||||||
|
dbusGeoClueLocationInterface = dbusGeoClueInterface + ".Location"
|
||||||
|
dbusGeoClueLocationLatitude = dbusGeoClueLocationInterface + ".Latitude"
|
||||||
|
dbusGeoClueLocationLongitude = dbusGeoClueLocationInterface + ".Longitude"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GeoClueClient struct {
|
||||||
|
currLocation *Location
|
||||||
|
locationMutex sync.RWMutex
|
||||||
|
|
||||||
|
dbusConn *dbus.Conn
|
||||||
|
clientPath dbus.ObjectPath
|
||||||
|
signals chan *dbus.Signal
|
||||||
|
|
||||||
|
stopChan chan struct{}
|
||||||
|
sigWG sync.WaitGroup
|
||||||
|
|
||||||
|
subscribers syncmap.Map[string, chan Location]
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGeoClueClient() (*GeoClueClient, error) {
|
||||||
|
dbusConn, err := dbus.ConnectSystemBus()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("system bus connection failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &GeoClueClient{
|
||||||
|
dbusConn: dbusConn,
|
||||||
|
stopChan: make(chan struct{}),
|
||||||
|
signals: make(chan *dbus.Signal, 256),
|
||||||
|
|
||||||
|
currLocation: &Location{
|
||||||
|
Latitude: 0.0,
|
||||||
|
Longitude: 0.0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.setupClient(); err != nil {
|
||||||
|
dbusConn.Close()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.startSignalPump(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) Close() {
|
||||||
|
close(c.stopChan)
|
||||||
|
|
||||||
|
c.sigWG.Wait()
|
||||||
|
|
||||||
|
if c.signals != nil {
|
||||||
|
c.dbusConn.RemoveSignal(c.signals)
|
||||||
|
close(c.signals)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.subscribers.Range(func(key string, ch chan Location) bool {
|
||||||
|
close(ch)
|
||||||
|
c.subscribers.Delete(key)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
if c.dbusConn != nil {
|
||||||
|
c.dbusConn.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) Subscribe(id string) chan Location {
|
||||||
|
ch := make(chan Location, 64)
|
||||||
|
c.subscribers.Store(id, ch)
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) Unsubscribe(id string) {
|
||||||
|
if ch, ok := c.subscribers.LoadAndDelete(id); ok {
|
||||||
|
close(ch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) setupClient() error {
|
||||||
|
managerObj := c.dbusConn.Object(dbusGeoClueService, dbusGeoClueManagerPath)
|
||||||
|
|
||||||
|
if err := managerObj.Call(dbusGeoClueManagerGetClient, 0).Store(&c.clientPath); err != nil {
|
||||||
|
return fmt.Errorf("failed to create GeoClue2 client: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
clientObj := c.dbusConn.Object(dbusGeoClueService, c.clientPath)
|
||||||
|
if err := clientObj.SetProperty(dbusGeoClueClientDesktopId, "dms"); err != nil {
|
||||||
|
return fmt.Errorf("failed to set desktop ID: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := clientObj.SetProperty(dbusGeoClueClientTimeThreshold, uint(10)); err != nil {
|
||||||
|
return fmt.Errorf("failed to set time threshold: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) startSignalPump() error {
|
||||||
|
c.dbusConn.Signal(c.signals)
|
||||||
|
|
||||||
|
if err := c.dbusConn.AddMatchSignal(
|
||||||
|
dbus.WithMatchObjectPath(c.clientPath),
|
||||||
|
dbus.WithMatchInterface(dbusGeoClueClientInterface),
|
||||||
|
dbus.WithMatchSender(dbusGeoClueClientLocationUpdated),
|
||||||
|
); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.sigWG.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer c.sigWG.Done()
|
||||||
|
|
||||||
|
clientObj := c.dbusConn.Object(dbusGeoClueService, c.clientPath)
|
||||||
|
clientObj.Call(dbusGeoClueClientTimeStart, 0)
|
||||||
|
defer clientObj.Call(dbusGeoClueClientTimeStop, 0)
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-c.stopChan:
|
||||||
|
return
|
||||||
|
case sig, ok := <-c.signals:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if sig == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
c.handleSignal(sig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) handleSignal(sig *dbus.Signal) {
|
||||||
|
switch sig.Name {
|
||||||
|
case dbusGeoClueClientLocationUpdated:
|
||||||
|
if len(sig.Body) != 2 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
newLocationPath, ok := sig.Body[1].(dbus.ObjectPath)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.handleLocationUpdated(newLocationPath); err != nil {
|
||||||
|
log.Warn("GeoClue: Failed to handle location update: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) handleLocationUpdated(path dbus.ObjectPath) error {
|
||||||
|
locationObj := c.dbusConn.Object(dbusGeoClueService, path)
|
||||||
|
|
||||||
|
lat, err := locationObj.GetProperty(dbusGeoClueLocationLatitude)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
long, err := locationObj.GetProperty(dbusGeoClueLocationLongitude)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.locationMutex.Lock()
|
||||||
|
c.currLocation.Latitude = dbusutil.AsOr(lat, 0.0)
|
||||||
|
c.currLocation.Longitude = dbusutil.AsOr(long, 0.0)
|
||||||
|
c.locationMutex.Unlock()
|
||||||
|
|
||||||
|
c.notifySubscribers()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) notifySubscribers() {
|
||||||
|
currentLocation, err := c.GetLocation()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.subscribers.Range(func(key string, ch chan Location) bool {
|
||||||
|
select {
|
||||||
|
case ch <- currentLocation:
|
||||||
|
default:
|
||||||
|
log.Warn("GeoClue: subscriber channel full, dropping update")
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) SeedLocation(loc Location) {
|
||||||
|
c.locationMutex.Lock()
|
||||||
|
defer c.locationMutex.Unlock()
|
||||||
|
c.currLocation.Latitude = loc.Latitude
|
||||||
|
c.currLocation.Longitude = loc.Longitude
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *GeoClueClient) GetLocation() (Location, error) {
|
||||||
|
c.locationMutex.RLock()
|
||||||
|
defer c.locationMutex.RUnlock()
|
||||||
|
if c.currLocation == nil {
|
||||||
|
return Location{
|
||||||
|
Latitude: 0.0,
|
||||||
|
Longitude: 0.0,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
stateCopy := *c.currLocation
|
||||||
|
return stateCopy, nil
|
||||||
|
}
|
||||||
91
core/internal/geolocation/client_ip.go
Normal file
91
core/internal/geolocation/client_ip.go
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
package geolocation
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IpClient struct {
|
||||||
|
currLocation *Location
|
||||||
|
}
|
||||||
|
|
||||||
|
type ipLocationResult struct {
|
||||||
|
Location
|
||||||
|
City string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ipAPIResponse struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
Lat float64 `json:"lat"`
|
||||||
|
Lon float64 `json:"lon"`
|
||||||
|
City string `json:"city"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func newIpClient() *IpClient {
|
||||||
|
return &IpClient{
|
||||||
|
currLocation: &Location{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *IpClient) Subscribe(id string) chan Location {
|
||||||
|
ch := make(chan Location, 1)
|
||||||
|
if location, err := c.GetLocation(); err == nil {
|
||||||
|
ch <- location
|
||||||
|
}
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *IpClient) Unsubscribe(id string) {}
|
||||||
|
|
||||||
|
func (c *IpClient) Close() {}
|
||||||
|
|
||||||
|
func (c *IpClient) GetLocation() (Location, error) {
|
||||||
|
if c.currLocation.Latitude != 0 || c.currLocation.Longitude != 0 {
|
||||||
|
return *c.currLocation, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := fetchIPLocation()
|
||||||
|
if err != nil {
|
||||||
|
return Location{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.currLocation.Latitude = result.Latitude
|
||||||
|
c.currLocation.Longitude = result.Longitude
|
||||||
|
return *c.currLocation, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fetchIPLocation() (ipLocationResult, error) {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
resp, err := client.Get("http://ip-api.com/json/")
|
||||||
|
if err != nil {
|
||||||
|
return ipLocationResult{}, fmt.Errorf("failed to fetch IP location: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return ipLocationResult{}, fmt.Errorf("ip-api.com returned status %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return ipLocationResult{}, fmt.Errorf("failed to read response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var data ipAPIResponse
|
||||||
|
if err := json.Unmarshal(body, &data); err != nil {
|
||||||
|
return ipLocationResult{}, fmt.Errorf("failed to parse response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if data.Status == "fail" || (data.Lat == 0 && data.Lon == 0) {
|
||||||
|
return ipLocationResult{}, fmt.Errorf("ip-api.com returned no location data")
|
||||||
|
}
|
||||||
|
|
||||||
|
return ipLocationResult{
|
||||||
|
Location: Location{Latitude: data.Lat, Longitude: data.Lon},
|
||||||
|
City: data.City,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
15
core/internal/geolocation/types.go
Normal file
15
core/internal/geolocation/types.go
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package geolocation
|
||||||
|
|
||||||
|
type Location struct {
|
||||||
|
Latitude float64
|
||||||
|
Longitude float64
|
||||||
|
}
|
||||||
|
|
||||||
|
type Client interface {
|
||||||
|
GetLocation() (Location, error)
|
||||||
|
|
||||||
|
Subscribe(id string) chan Location
|
||||||
|
Unsubscribe(id string)
|
||||||
|
|
||||||
|
Close()
|
||||||
|
}
|
||||||
91
core/internal/greeter/assets/apparmor/usr.bin.dms-greeter
Normal file
91
core/internal/greeter/assets/apparmor/usr.bin.dms-greeter
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# AppArmor profile for dms-greeter
|
||||||
|
#
|
||||||
|
# Managed by DMS — regenerated on every `dms greeter install` / `dms greeter sync`.
|
||||||
|
# Manual edits will be overwritten on next sync.
|
||||||
|
#
|
||||||
|
# Mode: complain (denials are logged, nothing is blocked)
|
||||||
|
# To switch to enforce after validating with `aa-logprof`:
|
||||||
|
# sudo aa-enforce /etc/apparmor.d/usr.bin.dms-greeter
|
||||||
|
#
|
||||||
|
#include <tunables/global>
|
||||||
|
|
||||||
|
profile dms-greeter /usr/bin/dms-greeter flags=(complain) {
|
||||||
|
#include <abstractions/base>
|
||||||
|
#include <abstractions/bash>
|
||||||
|
|
||||||
|
# The launcher script itself
|
||||||
|
/usr/bin/dms-greeter r,
|
||||||
|
|
||||||
|
# Cache directory — created by dms greeter sync/enable with greeter:greeter ownership
|
||||||
|
/var/cache/dms-greeter/ rw,
|
||||||
|
/var/cache/dms-greeter/** rwlk,
|
||||||
|
|
||||||
|
# DMS config — packaged path
|
||||||
|
/usr/share/quickshell/dms-greeter/ r,
|
||||||
|
/usr/share/quickshell/dms-greeter/** r,
|
||||||
|
/usr/share/quickshell/ r,
|
||||||
|
/usr/share/quickshell/** r,
|
||||||
|
|
||||||
|
# DMS config — system and user overrides
|
||||||
|
/etc/dms/ r,
|
||||||
|
/etc/dms/** r,
|
||||||
|
/usr/share/dms/ r,
|
||||||
|
/usr/share/dms/** r,
|
||||||
|
/home/*/.config/quickshell/ r,
|
||||||
|
/home/*/.config/quickshell/** r,
|
||||||
|
/root/.config/quickshell/ r,
|
||||||
|
/root/.config/quickshell/** r,
|
||||||
|
|
||||||
|
# greetd / PAM — read-only for session setup
|
||||||
|
/etc/greetd/ r,
|
||||||
|
/etc/greetd/** r,
|
||||||
|
/etc/pam.d/ r,
|
||||||
|
/etc/pam.d/** r,
|
||||||
|
/usr/lib/pam.d/ r,
|
||||||
|
/usr/lib/pam.d/** r,
|
||||||
|
|
||||||
|
# Compositor binaries — run unconfined so each compositor uses its own profile
|
||||||
|
/usr/bin/niri Ux,
|
||||||
|
/usr/bin/hyprland Ux,
|
||||||
|
/usr/bin/Hyprland Ux,
|
||||||
|
/usr/bin/sway Ux,
|
||||||
|
/usr/bin/labwc Ux,
|
||||||
|
/usr/bin/scroll Ux,
|
||||||
|
/usr/bin/miracle-wm Ux,
|
||||||
|
/usr/bin/mango Ux,
|
||||||
|
|
||||||
|
# Quickshell — run unconfined (has its own compositor profile on some distros)
|
||||||
|
/usr/bin/qs Ux,
|
||||||
|
/usr/bin/quickshell Ux,
|
||||||
|
|
||||||
|
# Wayland / XDG runtime (pipewire, wireplumber, wayland socket)
|
||||||
|
/run/user/[0-9]*/ rw,
|
||||||
|
/run/user/[0-9]*/** rw,
|
||||||
|
|
||||||
|
# DRM / GPU devices (required for Wayland compositor startup)
|
||||||
|
/dev/dri/ r,
|
||||||
|
/dev/dri/* rw,
|
||||||
|
/dev/udmabuf rw,
|
||||||
|
|
||||||
|
# Input devices
|
||||||
|
/dev/input/ r,
|
||||||
|
/dev/input/* r,
|
||||||
|
|
||||||
|
# Systemd journal / logging
|
||||||
|
/run/systemd/journal/socket rw,
|
||||||
|
/dev/log rw,
|
||||||
|
|
||||||
|
# Shell helper binaries invoked by the launcher script
|
||||||
|
/usr/bin/env ix,
|
||||||
|
/usr/bin/mkdir ix,
|
||||||
|
/usr/bin/cat ix,
|
||||||
|
/usr/bin/grep ix,
|
||||||
|
/usr/bin/dirname ix,
|
||||||
|
/usr/bin/basename ix,
|
||||||
|
/usr/bin/command ix,
|
||||||
|
/bin/env ix,
|
||||||
|
/bin/mkdir ix,
|
||||||
|
|
||||||
|
# Signal management (compositor lifecycle)
|
||||||
|
signal (send, receive) set=("term", "int", "hup", "kill"),
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
98
core/internal/greeter/installer_test.go
Normal file
98
core/internal/greeter/installer_test.go
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
package greeter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func writeTestFile(t *testing.T, path string, content string) {
|
||||||
|
t.Helper()
|
||||||
|
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||||
|
t.Fatalf("failed to create parent dir for %s: %v", path, err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||||
|
t.Fatalf("failed to write %s: %v", path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveGreeterThemeSyncState(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
settingsJSON string
|
||||||
|
sessionJSON string
|
||||||
|
wantSourcePath string
|
||||||
|
wantResolvedWallpaper string
|
||||||
|
wantDynamicOverrideUsed bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "dynamic theme with greeter wallpaper override uses generated greeter colors",
|
||||||
|
settingsJSON: `{
|
||||||
|
"currentThemeName": "dynamic",
|
||||||
|
"greeterWallpaperPath": "Pictures/blue.jpg",
|
||||||
|
"matugenScheme": "scheme-tonal-spot",
|
||||||
|
"iconTheme": "Papirus"
|
||||||
|
}`,
|
||||||
|
sessionJSON: `{"isLightMode":true}`,
|
||||||
|
wantSourcePath: filepath.Join(".cache", "DankMaterialShell", "greeter-colors", "dms-colors.json"),
|
||||||
|
wantResolvedWallpaper: filepath.Join("Pictures", "blue.jpg"),
|
||||||
|
wantDynamicOverrideUsed: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dynamic theme without override uses desktop colors",
|
||||||
|
settingsJSON: `{
|
||||||
|
"currentThemeName": "dynamic",
|
||||||
|
"greeterWallpaperPath": ""
|
||||||
|
}`,
|
||||||
|
sessionJSON: `{"isLightMode":false}`,
|
||||||
|
wantSourcePath: filepath.Join(".cache", "DankMaterialShell", "dms-colors.json"),
|
||||||
|
wantResolvedWallpaper: "",
|
||||||
|
wantDynamicOverrideUsed: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non-dynamic theme keeps desktop colors even with override wallpaper",
|
||||||
|
settingsJSON: `{
|
||||||
|
"currentThemeName": "purple",
|
||||||
|
"greeterWallpaperPath": "/tmp/blue.jpg"
|
||||||
|
}`,
|
||||||
|
sessionJSON: `{"isLightMode":false}`,
|
||||||
|
wantSourcePath: filepath.Join(".cache", "DankMaterialShell", "dms-colors.json"),
|
||||||
|
wantResolvedWallpaper: "/tmp/blue.jpg",
|
||||||
|
wantDynamicOverrideUsed: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
tt := tt
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
homeDir := t.TempDir()
|
||||||
|
writeTestFile(t, filepath.Join(homeDir, ".config", "DankMaterialShell", "settings.json"), tt.settingsJSON)
|
||||||
|
writeTestFile(t, filepath.Join(homeDir, ".local", "state", "DankMaterialShell", "session.json"), tt.sessionJSON)
|
||||||
|
|
||||||
|
state, err := resolveGreeterThemeSyncState(homeDir)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("resolveGreeterThemeSyncState returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got := state.effectiveColorsSource(homeDir); got != filepath.Join(homeDir, tt.wantSourcePath) {
|
||||||
|
t.Fatalf("effectiveColorsSource = %q, want %q", got, filepath.Join(homeDir, tt.wantSourcePath))
|
||||||
|
}
|
||||||
|
|
||||||
|
wantResolvedWallpaper := tt.wantResolvedWallpaper
|
||||||
|
if wantResolvedWallpaper != "" && !filepath.IsAbs(wantResolvedWallpaper) {
|
||||||
|
wantResolvedWallpaper = filepath.Join(homeDir, wantResolvedWallpaper)
|
||||||
|
}
|
||||||
|
if state.ResolvedGreeterWallpaperPath != wantResolvedWallpaper {
|
||||||
|
t.Fatalf("ResolvedGreeterWallpaperPath = %q, want %q", state.ResolvedGreeterWallpaperPath, wantResolvedWallpaper)
|
||||||
|
}
|
||||||
|
|
||||||
|
if state.UsesDynamicWallpaperOverride != tt.wantDynamicOverrideUsed {
|
||||||
|
t.Fatalf("UsesDynamicWallpaperOverride = %v, want %v", state.UsesDynamicWallpaperOverride, tt.wantDynamicOverrideUsed)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -341,6 +341,8 @@ func (n *NiriProvider) buildActionFromNode(bindNode *document.Node) string {
|
|||||||
val := arg.ValueString()
|
val := arg.ValueString()
|
||||||
if val == "" {
|
if val == "" {
|
||||||
parts = append(parts, `""`)
|
parts = append(parts, `""`)
|
||||||
|
} else if strings.ContainsAny(val, " \t") {
|
||||||
|
parts = append(parts, `"`+strings.ReplaceAll(val, `"`, `\"`)+`"`)
|
||||||
} else {
|
} else {
|
||||||
parts = append(parts, val)
|
parts = append(parts, val)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package matugen
|
package matugen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
@@ -19,6 +21,8 @@ import (
|
|||||||
"github.com/lucasb-eyer/go-colorful"
|
"github.com/lucasb-eyer/go-colorful"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ErrNoChanges = errors.New("no color changes")
|
||||||
|
|
||||||
type ColorMode string
|
type ColorMode string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -33,6 +37,7 @@ const (
|
|||||||
TemplateKindTerminal
|
TemplateKindTerminal
|
||||||
TemplateKindGTK
|
TemplateKindGTK
|
||||||
TemplateKindVSCode
|
TemplateKindVSCode
|
||||||
|
TemplateKindEmacs
|
||||||
)
|
)
|
||||||
|
|
||||||
type TemplateDef struct {
|
type TemplateDef struct {
|
||||||
@@ -53,7 +58,7 @@ var templateRegistry = []TemplateDef{
|
|||||||
{ID: "qt6ct", Commands: []string{"qt6ct"}, ConfigFile: "qt6ct.toml"},
|
{ID: "qt6ct", Commands: []string{"qt6ct"}, ConfigFile: "qt6ct.toml"},
|
||||||
{ID: "firefox", Commands: []string{"firefox"}, ConfigFile: "firefox.toml"},
|
{ID: "firefox", Commands: []string{"firefox"}, ConfigFile: "firefox.toml"},
|
||||||
{ID: "pywalfox", Commands: []string{"pywalfox"}, ConfigFile: "pywalfox.toml"},
|
{ID: "pywalfox", Commands: []string{"pywalfox"}, ConfigFile: "pywalfox.toml"},
|
||||||
{ID: "zenbrowser", Commands: []string{"zen", "zen-browser"}, Flatpaks: []string{"app.zen_browser.zen"}, ConfigFile: "zenbrowser.toml"},
|
{ID: "zenbrowser", Commands: []string{"zen", "zen-browser", "zen-beta", "zen-twilight"}, Flatpaks: []string{"app.zen_browser.zen"}, ConfigFile: "zenbrowser.toml"},
|
||||||
{ID: "vesktop", Commands: []string{"vesktop"}, Flatpaks: []string{"dev.vencord.Vesktop"}, ConfigFile: "vesktop.toml"},
|
{ID: "vesktop", Commands: []string{"vesktop"}, Flatpaks: []string{"dev.vencord.Vesktop"}, ConfigFile: "vesktop.toml"},
|
||||||
{ID: "equibop", Commands: []string{"equibop"}, ConfigFile: "equibop.toml"},
|
{ID: "equibop", Commands: []string{"equibop"}, ConfigFile: "equibop.toml"},
|
||||||
{ID: "ghostty", Commands: []string{"ghostty"}, ConfigFile: "ghostty.toml", Kind: TemplateKindTerminal},
|
{ID: "ghostty", Commands: []string{"ghostty"}, ConfigFile: "ghostty.toml", Kind: TemplateKindTerminal},
|
||||||
@@ -65,7 +70,8 @@ var templateRegistry = []TemplateDef{
|
|||||||
{ID: "dgop", Commands: []string{"dgop"}, ConfigFile: "dgop.toml"},
|
{ID: "dgop", Commands: []string{"dgop"}, ConfigFile: "dgop.toml"},
|
||||||
{ID: "kcolorscheme", ConfigFile: "kcolorscheme.toml", RunUnconditionally: true},
|
{ID: "kcolorscheme", ConfigFile: "kcolorscheme.toml", RunUnconditionally: true},
|
||||||
{ID: "vscode", Kind: TemplateKindVSCode},
|
{ID: "vscode", Kind: TemplateKindVSCode},
|
||||||
{ID: "emacs", Commands: []string{"emacs"}, ConfigFile: "emacs.toml"},
|
{ID: "emacs", Commands: []string{"emacs"}, ConfigFile: "emacs.toml", Kind: TemplateKindEmacs},
|
||||||
|
{ID: "zed", Commands: []string{"zed", "zeditor", "zedit"}, ConfigFile: "zed.toml"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ColorMode) GTKTheme() string {
|
func (c *ColorMode) GTKTheme() string {
|
||||||
@@ -78,7 +84,8 @@ func (c *ColorMode) GTKTheme() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
matugenVersionOnce sync.Once
|
matugenVersionMu sync.Mutex
|
||||||
|
matugenVersionOK bool
|
||||||
matugenSupportsCOE bool
|
matugenSupportsCOE bool
|
||||||
matugenIsV4 bool
|
matugenIsV4 bool
|
||||||
)
|
)
|
||||||
@@ -92,7 +99,9 @@ type Options struct {
|
|||||||
Mode ColorMode
|
Mode ColorMode
|
||||||
IconTheme string
|
IconTheme string
|
||||||
MatugenType string
|
MatugenType string
|
||||||
|
Contrast float64
|
||||||
RunUserTemplates bool
|
RunUserTemplates bool
|
||||||
|
ColorsOnly bool
|
||||||
StockColors string
|
StockColors string
|
||||||
SyncModeWithPortal bool
|
SyncModeWithPortal bool
|
||||||
TerminalsAlwaysDark bool
|
TerminalsAlwaysDark bool
|
||||||
@@ -158,8 +167,14 @@ func Run(opts Options) error {
|
|||||||
|
|
||||||
log.Infof("Building theme: %s %s (%s)", opts.Kind, opts.Value, opts.Mode)
|
log.Infof("Building theme: %s %s (%s)", opts.Kind, opts.Value, opts.Mode)
|
||||||
|
|
||||||
if err := buildOnce(&opts); err != nil {
|
changed, buildErr := buildOnce(&opts)
|
||||||
return err
|
if buildErr != nil {
|
||||||
|
return buildErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if !changed {
|
||||||
|
log.Info("No color changes detected, skipping refresh")
|
||||||
|
return ErrNoChanges
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.SyncModeWithPortal {
|
if opts.SyncModeWithPortal {
|
||||||
@@ -170,25 +185,27 @@ func Run(opts Options) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildOnce(opts *Options) error {
|
func buildOnce(opts *Options) (bool, error) {
|
||||||
cfgFile, err := os.CreateTemp("", "matugen-config-*.toml")
|
cfgFile, err := os.CreateTemp("", "matugen-config-*.toml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to create temp config: %w", err)
|
return false, fmt.Errorf("failed to create temp config: %w", err)
|
||||||
}
|
}
|
||||||
defer os.Remove(cfgFile.Name())
|
defer os.Remove(cfgFile.Name())
|
||||||
defer cfgFile.Close()
|
defer cfgFile.Close()
|
||||||
|
|
||||||
tmpDir, err := os.MkdirTemp("", "matugen-templates-*")
|
tmpDir, err := os.MkdirTemp("", "matugen-templates-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to create temp dir: %w", err)
|
return false, fmt.Errorf("failed to create temp dir: %w", err)
|
||||||
}
|
}
|
||||||
defer os.RemoveAll(tmpDir)
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
if err := buildMergedConfig(opts, cfgFile, tmpDir); err != nil {
|
if err := buildMergedConfig(opts, cfgFile, tmpDir); err != nil {
|
||||||
return fmt.Errorf("failed to build config: %w", err)
|
return false, fmt.Errorf("failed to build config: %w", err)
|
||||||
}
|
}
|
||||||
cfgFile.Close()
|
cfgFile.Close()
|
||||||
|
|
||||||
|
oldColors, _ := os.ReadFile(opts.ColorsOutput())
|
||||||
|
|
||||||
var primaryDark, primaryLight, surface string
|
var primaryDark, primaryLight, surface string
|
||||||
var dank16JSON string
|
var dank16JSON string
|
||||||
var importArgs []string
|
var importArgs []string
|
||||||
@@ -200,7 +217,7 @@ func buildOnce(opts *Options) error {
|
|||||||
surface = extractNestedColor(opts.StockColors, "surface", "dark")
|
surface = extractNestedColor(opts.StockColors, "surface", "dark")
|
||||||
|
|
||||||
if primaryDark == "" {
|
if primaryDark == "" {
|
||||||
return fmt.Errorf("failed to extract primary dark from stock colors")
|
return false, fmt.Errorf("failed to extract primary dark from stock colors")
|
||||||
}
|
}
|
||||||
if primaryLight == "" {
|
if primaryLight == "" {
|
||||||
primaryLight = primaryDark
|
primaryLight = primaryDark
|
||||||
@@ -212,16 +229,17 @@ func buildOnce(opts *Options) error {
|
|||||||
|
|
||||||
log.Info("Running matugen color hex with stock color overrides")
|
log.Info("Running matugen color hex with stock color overrides")
|
||||||
args := []string{"color", "hex", primaryDark, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name()}
|
args := []string{"color", "hex", primaryDark, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name()}
|
||||||
|
args = appendContrastArg(args, opts.Contrast)
|
||||||
args = append(args, importArgs...)
|
args = append(args, importArgs...)
|
||||||
if err := runMatugen(args); err != nil {
|
if err := runMatugen(args); err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Infof("Using dynamic theme from %s: %s", opts.Kind, opts.Value)
|
log.Infof("Using dynamic theme from %s: %s", opts.Kind, opts.Value)
|
||||||
|
|
||||||
matJSON, err := runMatugenDryRun(opts)
|
matJSON, err := runMatugenDryRun(opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("matugen dry-run failed: %w", err)
|
return false, fmt.Errorf("matugen dry-run failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
primaryDark = extractMatugenColor(matJSON, "primary", "dark")
|
primaryDark = extractMatugenColor(matJSON, "primary", "dark")
|
||||||
@@ -229,7 +247,7 @@ func buildOnce(opts *Options) error {
|
|||||||
surface = extractMatugenColor(matJSON, "surface", "dark")
|
surface = extractMatugenColor(matJSON, "surface", "dark")
|
||||||
|
|
||||||
if primaryDark == "" {
|
if primaryDark == "" {
|
||||||
return fmt.Errorf("failed to extract primary color")
|
return false, fmt.Errorf("failed to extract primary color")
|
||||||
}
|
}
|
||||||
if primaryLight == "" {
|
if primaryLight == "" {
|
||||||
primaryLight = primaryDark
|
primaryLight = primaryDark
|
||||||
@@ -248,12 +266,22 @@ func buildOnce(opts *Options) error {
|
|||||||
args = []string{opts.Kind, opts.Value}
|
args = []string{opts.Kind, opts.Value}
|
||||||
}
|
}
|
||||||
args = append(args, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name())
|
args = append(args, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name())
|
||||||
|
args = appendContrastArg(args, opts.Contrast)
|
||||||
args = append(args, importArgs...)
|
args = append(args, importArgs...)
|
||||||
if err := runMatugen(args); err != nil {
|
if err := runMatugen(args); err != nil {
|
||||||
return err
|
return false, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
newColors, _ := os.ReadFile(opts.ColorsOutput())
|
||||||
|
if bytes.Equal(oldColors, newColors) && len(oldColors) > 0 {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if opts.ColorsOnly {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
if isDMSGTKActive(opts.ConfigDir) {
|
if isDMSGTKActive(opts.ConfigDir) {
|
||||||
switch opts.Mode {
|
switch opts.Mode {
|
||||||
case ColorModeLight:
|
case ColorModeLight:
|
||||||
@@ -271,7 +299,14 @@ func buildOnce(opts *Options) error {
|
|||||||
|
|
||||||
signalTerminals(opts)
|
signalTerminals(opts)
|
||||||
|
|
||||||
return nil
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendContrastArg(args []string, contrast float64) []string {
|
||||||
|
if contrast == 0 {
|
||||||
|
return args
|
||||||
|
}
|
||||||
|
return append(args, "--contrast", strconv.FormatFloat(contrast, 'f', -1, 64))
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildMergedConfig(opts *Options, cfgFile *os.File, tmpDir string) error {
|
func buildMergedConfig(opts *Options, cfgFile *os.File, tmpDir string) error {
|
||||||
@@ -311,6 +346,10 @@ output_path = '%s'
|
|||||||
|
|
||||||
`, opts.ShellDir, opts.ColorsOutput())
|
`, opts.ShellDir, opts.ColorsOutput())
|
||||||
|
|
||||||
|
if opts.ColorsOnly {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
homeDir, _ := os.UserHomeDir()
|
homeDir, _ := os.UserHomeDir()
|
||||||
for _, tmpl := range templateRegistry {
|
for _, tmpl := range templateRegistry {
|
||||||
if opts.ShouldSkipTemplate(tmpl.ID) {
|
if opts.ShouldSkipTemplate(tmpl.ID) {
|
||||||
@@ -334,6 +373,10 @@ output_path = '%s'
|
|||||||
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions"), opts.ShellDir)
|
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions"), opts.ShellDir)
|
||||||
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions"), opts.ShellDir)
|
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions"), opts.ShellDir)
|
||||||
appendVSCodeConfig(cfgFile, "vscode-insiders", filepath.Join(homeDir, ".vscode-insiders/extensions"), opts.ShellDir)
|
appendVSCodeConfig(cfgFile, "vscode-insiders", filepath.Join(homeDir, ".vscode-insiders/extensions"), opts.ShellDir)
|
||||||
|
case TemplateKindEmacs:
|
||||||
|
if utils.EmacsConfigDir() != "" {
|
||||||
|
appendConfig(opts, cfgFile, tmpl.Commands, tmpl.Flatpaks, tmpl.ConfigFile)
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
appendConfig(opts, cfgFile, tmpl.Commands, tmpl.Flatpaks, tmpl.ConfigFile)
|
appendConfig(opts, cfgFile, tmpl.Commands, tmpl.Flatpaks, tmpl.ConfigFile)
|
||||||
}
|
}
|
||||||
@@ -491,6 +534,9 @@ func substituteVars(content, shellDir string) string {
|
|||||||
result = strings.ReplaceAll(result, "'CONFIG_DIR/", "'"+utils.XDGConfigHome()+"/")
|
result = strings.ReplaceAll(result, "'CONFIG_DIR/", "'"+utils.XDGConfigHome()+"/")
|
||||||
result = strings.ReplaceAll(result, "'DATA_DIR/", "'"+utils.XDGDataHome()+"/")
|
result = strings.ReplaceAll(result, "'DATA_DIR/", "'"+utils.XDGDataHome()+"/")
|
||||||
result = strings.ReplaceAll(result, "'CACHE_DIR/", "'"+utils.XDGCacheHome()+"/")
|
result = strings.ReplaceAll(result, "'CACHE_DIR/", "'"+utils.XDGCacheHome()+"/")
|
||||||
|
if emacsDir := utils.EmacsConfigDir(); emacsDir != "" {
|
||||||
|
result = strings.ReplaceAll(result, "'EMACS_DIR/", "'"+emacsDir+"/")
|
||||||
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -511,79 +557,161 @@ func extractTOMLSection(content, startMarker, endMarker string) string {
|
|||||||
return content[startIdx : startIdx+endIdx]
|
return content[startIdx : startIdx+endIdx]
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkMatugenVersion() {
|
type matugenFlags struct {
|
||||||
matugenVersionOnce.Do(func() {
|
supportsCOE bool
|
||||||
cmd := exec.Command("matugen", "--version")
|
isV4 bool
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
versionStr := strings.TrimSpace(string(output))
|
|
||||||
versionStr = strings.TrimPrefix(versionStr, "matugen ")
|
|
||||||
|
|
||||||
parts := strings.Split(versionStr, ".")
|
|
||||||
if len(parts) < 2 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
major, err := strconv.Atoi(parts[0])
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
minor, err := strconv.Atoi(parts[1])
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
matugenSupportsCOE = major > 3 || (major == 3 && minor >= 1)
|
|
||||||
matugenIsV4 = major >= 4
|
|
||||||
if matugenSupportsCOE {
|
|
||||||
log.Infof("Matugen %s supports --continue-on-error", versionStr)
|
|
||||||
}
|
|
||||||
if matugenIsV4 {
|
|
||||||
log.Infof("Matugen %s: using v4 flags", versionStr)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func runMatugen(args []string) error {
|
func detectMatugenVersion() (matugenFlags, error) {
|
||||||
checkMatugenVersion()
|
matugenVersionMu.Lock()
|
||||||
|
defer matugenVersionMu.Unlock()
|
||||||
|
|
||||||
|
if matugenVersionOK {
|
||||||
|
return matugenFlags{matugenSupportsCOE, matugenIsV4}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return detectMatugenVersionLocked()
|
||||||
|
}
|
||||||
|
|
||||||
|
func redetectMatugenVersion(old matugenFlags) (matugenFlags, bool) {
|
||||||
|
matugenVersionMu.Lock()
|
||||||
|
defer matugenVersionMu.Unlock()
|
||||||
|
|
||||||
|
matugenVersionOK = false
|
||||||
|
flags, err := detectMatugenVersionLocked()
|
||||||
|
if err != nil {
|
||||||
|
return old, false
|
||||||
|
}
|
||||||
|
changed := flags.supportsCOE != old.supportsCOE || flags.isV4 != old.isV4
|
||||||
|
return flags, changed
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectMatugenVersionLocked() (matugenFlags, error) {
|
||||||
|
cmd := exec.Command("matugen", "--version")
|
||||||
|
output, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return matugenFlags{}, fmt.Errorf("failed to get matugen version: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
versionStr := strings.TrimSpace(string(output))
|
||||||
|
versionStr = strings.TrimPrefix(versionStr, "matugen ")
|
||||||
|
|
||||||
|
parts := strings.Split(versionStr, ".")
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return matugenFlags{}, fmt.Errorf("unexpected matugen version format: %q", versionStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
major, err := strconv.Atoi(parts[0])
|
||||||
|
if err != nil {
|
||||||
|
return matugenFlags{}, fmt.Errorf("failed to parse matugen major version %q: %w", parts[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
minor, err := strconv.Atoi(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return matugenFlags{}, fmt.Errorf("failed to parse matugen minor version %q: %w", parts[1], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
matugenSupportsCOE = major > 3 || (major == 3 && minor >= 1)
|
||||||
|
matugenIsV4 = major >= 4
|
||||||
|
matugenVersionOK = true
|
||||||
|
|
||||||
if matugenSupportsCOE {
|
if matugenSupportsCOE {
|
||||||
args = append([]string{"--continue-on-error"}, args...)
|
log.Debugf("Matugen %s detected: continue-on-error support enabled", versionStr)
|
||||||
}
|
}
|
||||||
if matugenIsV4 {
|
if matugenIsV4 {
|
||||||
|
log.Debugf("Matugen %s detected: using v4 compatibility flags", versionStr)
|
||||||
|
}
|
||||||
|
return matugenFlags{matugenSupportsCOE, matugenIsV4}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildMatugenArgs(baseArgs []string, flags matugenFlags) []string {
|
||||||
|
args := make([]string, 0, len(baseArgs)+4)
|
||||||
|
if flags.supportsCOE {
|
||||||
|
args = append(args, "--continue-on-error")
|
||||||
|
}
|
||||||
|
args = append(args, baseArgs...)
|
||||||
|
if flags.isV4 {
|
||||||
args = append(args, "--source-color-index", "0")
|
args = append(args, "--source-color-index", "0")
|
||||||
}
|
}
|
||||||
|
return args
|
||||||
|
}
|
||||||
|
|
||||||
|
func runMatugen(baseArgs []string) error {
|
||||||
|
flags, err := detectMatugenVersion()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
args := buildMatugenArgs(baseArgs, flags)
|
||||||
cmd := exec.Command("matugen", args...)
|
cmd := exec.Command("matugen", args...)
|
||||||
cmd.Stdout = os.Stdout
|
cmd.Stdout = os.Stdout
|
||||||
cmd.Stderr = os.Stderr
|
cmd.Stderr = os.Stderr
|
||||||
return cmd.Run()
|
runErr := cmd.Run()
|
||||||
|
if runErr == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warnf("Matugen failed (v4=%v): %v", flags.isV4, runErr)
|
||||||
|
|
||||||
|
newFlags, changed := redetectMatugenVersion(flags)
|
||||||
|
if !changed {
|
||||||
|
return runErr
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warnf("Matugen version changed (v4: %v -> %v), retrying", flags.isV4, newFlags.isV4)
|
||||||
|
args = buildMatugenArgs(baseArgs, newFlags)
|
||||||
|
retryCmd := exec.Command("matugen", args...)
|
||||||
|
retryCmd.Stdout = os.Stdout
|
||||||
|
retryCmd.Stderr = os.Stderr
|
||||||
|
return retryCmd.Run()
|
||||||
}
|
}
|
||||||
|
|
||||||
func runMatugenDryRun(opts *Options) (string, error) {
|
func runMatugenDryRun(opts *Options) (string, error) {
|
||||||
checkMatugenVersion()
|
flags, err := detectMatugenVersion()
|
||||||
|
|
||||||
var args []string
|
|
||||||
switch opts.Kind {
|
|
||||||
case "hex":
|
|
||||||
args = []string{"color", "hex", opts.Value}
|
|
||||||
default:
|
|
||||||
args = []string{opts.Kind, opts.Value}
|
|
||||||
}
|
|
||||||
args = append(args, "-m", "dark", "-t", opts.MatugenType, "--json", "hex", "--dry-run")
|
|
||||||
if matugenIsV4 {
|
|
||||||
args = append(args, "--source-color-index", "0", "--old-json-output")
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd := exec.Command("matugen", args...)
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
output, dryErr := execDryRun(opts, flags)
|
||||||
|
if dryErr == nil {
|
||||||
|
return output, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warnf("Matugen dry-run failed (v4=%v): %v", flags.isV4, dryErr)
|
||||||
|
|
||||||
|
newFlags, changed := redetectMatugenVersion(flags)
|
||||||
|
if !changed {
|
||||||
|
return "", dryErr
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warnf("Matugen version changed (v4: %v -> %v), retrying dry-run", flags.isV4, newFlags.isV4)
|
||||||
|
return execDryRun(opts, newFlags)
|
||||||
|
}
|
||||||
|
|
||||||
|
func execDryRun(opts *Options, flags matugenFlags) (string, error) {
|
||||||
|
var baseArgs []string
|
||||||
|
switch opts.Kind {
|
||||||
|
case "hex":
|
||||||
|
baseArgs = []string{"color", "hex", opts.Value}
|
||||||
|
default:
|
||||||
|
baseArgs = []string{opts.Kind, opts.Value}
|
||||||
|
}
|
||||||
|
baseArgs = append(baseArgs, "-m", "dark", "-t", opts.MatugenType, "--json", "hex", "--dry-run")
|
||||||
|
baseArgs = appendContrastArg(baseArgs, opts.Contrast)
|
||||||
|
if flags.isV4 {
|
||||||
|
baseArgs = append(baseArgs, "--source-color-index", "0", "--old-json-output")
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command("matugen", baseArgs...)
|
||||||
|
var stderr strings.Builder
|
||||||
|
cmd.Stderr = &stderr
|
||||||
|
output, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
if stderr.Len() > 0 {
|
||||||
|
return "", fmt.Errorf("matugen %v failed (v4=%v): %s", baseArgs, flags.isV4, strings.TrimSpace(stderr.String()))
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("matugen %v failed (v4=%v): %w", baseArgs, flags.isV4, err)
|
||||||
|
}
|
||||||
return strings.ReplaceAll(string(output), "\n", ""), nil
|
return strings.ReplaceAll(string(output), "\n", ""), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -819,6 +947,8 @@ func CheckTemplates(checker utils.AppChecker) []TemplateCheck {
|
|||||||
detected = true
|
detected = true
|
||||||
case tmpl.Kind == TemplateKindVSCode:
|
case tmpl.Kind == TemplateKindVSCode:
|
||||||
detected = checkVSCodeExtension(homeDir)
|
detected = checkVSCodeExtension(homeDir)
|
||||||
|
case tmpl.Kind == TemplateKindEmacs:
|
||||||
|
detected = appExists(checker, tmpl.Commands, tmpl.Flatpaks) && utils.EmacsConfigDir() != ""
|
||||||
default:
|
default:
|
||||||
detected = appExists(checker, tmpl.Commands, tmpl.Flatpaks)
|
detected = appExists(checker, tmpl.Commands, tmpl.Flatpaks)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package matugen
|
|||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
mocks_utils "github.com/AvengeMedia/DankMaterialShell/core/internal/mocks/utils"
|
mocks_utils "github.com/AvengeMedia/DankMaterialShell/core/internal/mocks/utils"
|
||||||
@@ -392,3 +393,51 @@ func TestSubstituteVars(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestBuildMergedConfigColorsOnly(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
shellDir := filepath.Join(tempDir, "shell")
|
||||||
|
configsDir := filepath.Join(shellDir, "matugen", "configs")
|
||||||
|
if err := os.MkdirAll(configsDir, 0o755); err != nil {
|
||||||
|
t.Fatalf("failed to create configs dir: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
baseConfig := "[config]\ncustom_keywords = []\n"
|
||||||
|
if err := os.WriteFile(filepath.Join(configsDir, "base.toml"), []byte(baseConfig), 0o644); err != nil {
|
||||||
|
t.Fatalf("failed to write base config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfgFile, err := os.CreateTemp(tempDir, "merged-*.toml")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create temp config: %v", err)
|
||||||
|
}
|
||||||
|
defer os.Remove(cfgFile.Name())
|
||||||
|
defer cfgFile.Close()
|
||||||
|
|
||||||
|
opts := &Options{
|
||||||
|
ShellDir: shellDir,
|
||||||
|
ConfigDir: filepath.Join(tempDir, "config"),
|
||||||
|
StateDir: filepath.Join(tempDir, "state"),
|
||||||
|
ColorsOnly: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := buildMergedConfig(opts, cfgFile, filepath.Join(tempDir, "templates")); err != nil {
|
||||||
|
t.Fatalf("buildMergedConfig failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cfgFile.Close(); err != nil {
|
||||||
|
t.Fatalf("failed to close merged config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := os.ReadFile(cfgFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to read merged config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
content := string(output)
|
||||||
|
assert.Contains(t, content, "[templates.dank]")
|
||||||
|
assert.Contains(t, content, "output_path = '"+filepath.Join(opts.StateDir, "dms-colors.json")+"'")
|
||||||
|
assert.NotContains(t, content, "[templates.gtk]")
|
||||||
|
assert.False(t, strings.Contains(content, "output_path = 'CONFIG_DIR/"), "colors-only config should not emit app template outputs")
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package matugen
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
@@ -93,10 +94,13 @@ func (q *Queue) runWorker() {
|
|||||||
err := Run(job.Options)
|
err := Run(job.Options)
|
||||||
|
|
||||||
var result Result
|
var result Result
|
||||||
if err != nil {
|
switch {
|
||||||
result = Result{Success: false, Error: err}
|
case err == nil:
|
||||||
} else {
|
|
||||||
result = Result{Success: true}
|
result = Result{Success: true}
|
||||||
|
case errors.Is(err, ErrNoChanges):
|
||||||
|
result = Result{Success: true}
|
||||||
|
default:
|
||||||
|
result = Result{Success: false, Error: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
q.finishJob(result)
|
q.finishJob(result)
|
||||||
|
|||||||
203
core/internal/mocks/geolocation/mock_Client.go
Normal file
203
core/internal/mocks/geolocation/mock_Client.go
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
// Code generated by mockery v2.53.5. DO NOT EDIT.
|
||||||
|
|
||||||
|
package mocks_geolocation
|
||||||
|
|
||||||
|
import (
|
||||||
|
geolocation "github.com/AvengeMedia/DankMaterialShell/core/internal/geolocation"
|
||||||
|
mock "github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MockClient is an autogenerated mock type for the Client type
|
||||||
|
type MockClient struct {
|
||||||
|
mock.Mock
|
||||||
|
}
|
||||||
|
|
||||||
|
type MockClient_Expecter struct {
|
||||||
|
mock *mock.Mock
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_m *MockClient) EXPECT() *MockClient_Expecter {
|
||||||
|
return &MockClient_Expecter{mock: &_m.Mock}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close provides a mock function with no fields
|
||||||
|
func (_m *MockClient) Close() {
|
||||||
|
_m.Called()
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockClient_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close'
|
||||||
|
type MockClient_Close_Call struct {
|
||||||
|
*mock.Call
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close is a helper method to define mock.On call
|
||||||
|
func (_e *MockClient_Expecter) Close() *MockClient_Close_Call {
|
||||||
|
return &MockClient_Close_Call{Call: _e.mock.On("Close")}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Close_Call) Run(run func()) *MockClient_Close_Call {
|
||||||
|
_c.Call.Run(func(args mock.Arguments) {
|
||||||
|
run()
|
||||||
|
})
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Close_Call) Return() *MockClient_Close_Call {
|
||||||
|
_c.Call.Return()
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Close_Call) RunAndReturn(run func()) *MockClient_Close_Call {
|
||||||
|
_c.Run(run)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLocation provides a mock function with no fields
|
||||||
|
func (_m *MockClient) GetLocation() (geolocation.Location, error) {
|
||||||
|
ret := _m.Called()
|
||||||
|
|
||||||
|
if len(ret) == 0 {
|
||||||
|
panic("no return value specified for GetLocation")
|
||||||
|
}
|
||||||
|
|
||||||
|
var r0 geolocation.Location
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(0).(func() (geolocation.Location, error)); ok {
|
||||||
|
return rf()
|
||||||
|
}
|
||||||
|
if rf, ok := ret.Get(0).(func() geolocation.Location); ok {
|
||||||
|
r0 = rf()
|
||||||
|
} else {
|
||||||
|
r0 = ret.Get(0).(geolocation.Location)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rf, ok := ret.Get(1).(func() error); ok {
|
||||||
|
r1 = rf()
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockClient_GetLocation_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLocation'
|
||||||
|
type MockClient_GetLocation_Call struct {
|
||||||
|
*mock.Call
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLocation is a helper method to define mock.On call
|
||||||
|
func (_e *MockClient_Expecter) GetLocation() *MockClient_GetLocation_Call {
|
||||||
|
return &MockClient_GetLocation_Call{Call: _e.mock.On("GetLocation")}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_GetLocation_Call) Run(run func()) *MockClient_GetLocation_Call {
|
||||||
|
_c.Call.Run(func(args mock.Arguments) {
|
||||||
|
run()
|
||||||
|
})
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_GetLocation_Call) Return(_a0 geolocation.Location, _a1 error) *MockClient_GetLocation_Call {
|
||||||
|
_c.Call.Return(_a0, _a1)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_GetLocation_Call) RunAndReturn(run func() (geolocation.Location, error)) *MockClient_GetLocation_Call {
|
||||||
|
_c.Call.Return(run)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe provides a mock function with given fields: id
|
||||||
|
func (_m *MockClient) Subscribe(id string) chan geolocation.Location {
|
||||||
|
ret := _m.Called(id)
|
||||||
|
|
||||||
|
if len(ret) == 0 {
|
||||||
|
panic("no return value specified for Subscribe")
|
||||||
|
}
|
||||||
|
|
||||||
|
var r0 chan geolocation.Location
|
||||||
|
if rf, ok := ret.Get(0).(func(string) chan geolocation.Location); ok {
|
||||||
|
r0 = rf(id)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(chan geolocation.Location)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockClient_Subscribe_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Subscribe'
|
||||||
|
type MockClient_Subscribe_Call struct {
|
||||||
|
*mock.Call
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe is a helper method to define mock.On call
|
||||||
|
// - id string
|
||||||
|
func (_e *MockClient_Expecter) Subscribe(id interface{}) *MockClient_Subscribe_Call {
|
||||||
|
return &MockClient_Subscribe_Call{Call: _e.mock.On("Subscribe", id)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Subscribe_Call) Run(run func(id string)) *MockClient_Subscribe_Call {
|
||||||
|
_c.Call.Run(func(args mock.Arguments) {
|
||||||
|
run(args[0].(string))
|
||||||
|
})
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Subscribe_Call) Return(_a0 chan geolocation.Location) *MockClient_Subscribe_Call {
|
||||||
|
_c.Call.Return(_a0)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Subscribe_Call) RunAndReturn(run func(string) chan geolocation.Location) *MockClient_Subscribe_Call {
|
||||||
|
_c.Call.Return(run)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unsubscribe provides a mock function with given fields: id
|
||||||
|
func (_m *MockClient) Unsubscribe(id string) {
|
||||||
|
_m.Called(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockClient_Unsubscribe_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Unsubscribe'
|
||||||
|
type MockClient_Unsubscribe_Call struct {
|
||||||
|
*mock.Call
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unsubscribe is a helper method to define mock.On call
|
||||||
|
// - id string
|
||||||
|
func (_e *MockClient_Expecter) Unsubscribe(id interface{}) *MockClient_Unsubscribe_Call {
|
||||||
|
return &MockClient_Unsubscribe_Call{Call: _e.mock.On("Unsubscribe", id)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Unsubscribe_Call) Run(run func(id string)) *MockClient_Unsubscribe_Call {
|
||||||
|
_c.Call.Run(func(args mock.Arguments) {
|
||||||
|
run(args[0].(string))
|
||||||
|
})
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Unsubscribe_Call) Return() *MockClient_Unsubscribe_Call {
|
||||||
|
_c.Call.Return()
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockClient_Unsubscribe_Call) RunAndReturn(run func(string)) *MockClient_Unsubscribe_Call {
|
||||||
|
_c.Run(run)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMockClient creates a new instance of MockClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||||
|
// The first argument is typically a *testing.T value.
|
||||||
|
func NewMockClient(t interface {
|
||||||
|
mock.TestingT
|
||||||
|
Cleanup(func())
|
||||||
|
}) *MockClient {
|
||||||
|
mock := &MockClient{}
|
||||||
|
mock.Mock.Test(t)
|
||||||
|
|
||||||
|
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||||
|
|
||||||
|
return mock
|
||||||
|
}
|
||||||
@@ -1062,6 +1062,62 @@ func (_c *MockBackend_GetWiFiNetworkDetails_Call) RunAndReturn(run func(string)
|
|||||||
return _c
|
return _c
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetWiFiQRCodeContent provides a mock function with given fields: ssid
|
||||||
|
func (_m *MockBackend) GetWiFiQRCodeContent(ssid string) (string, error) {
|
||||||
|
ret := _m.Called(ssid)
|
||||||
|
|
||||||
|
if len(ret) == 0 {
|
||||||
|
panic("no return value specified for GetWiFiQRCodeContent")
|
||||||
|
}
|
||||||
|
|
||||||
|
var r0 string
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(0).(func(string) (string, error)); ok {
|
||||||
|
return rf(ssid)
|
||||||
|
}
|
||||||
|
if rf, ok := ret.Get(0).(func(string) string); ok {
|
||||||
|
r0 = rf(ssid)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Get(0).(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(ssid)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// MockBackend_GetWiFiQRCodeContent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWiFiQRCodeContent'
|
||||||
|
type MockBackend_GetWiFiQRCodeContent_Call struct {
|
||||||
|
*mock.Call
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetWiFiQRCodeContent is a helper method to define mock.On call
|
||||||
|
// - ssid string
|
||||||
|
func (_e *MockBackend_Expecter) GetWiFiQRCodeContent(ssid interface{}) *MockBackend_GetWiFiQRCodeContent_Call {
|
||||||
|
return &MockBackend_GetWiFiQRCodeContent_Call{Call: _e.mock.On("GetWiFiQRCodeContent", ssid)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockBackend_GetWiFiQRCodeContent_Call) Run(run func(ssid string)) *MockBackend_GetWiFiQRCodeContent_Call {
|
||||||
|
_c.Call.Run(func(args mock.Arguments) {
|
||||||
|
run(args[0].(string))
|
||||||
|
})
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockBackend_GetWiFiQRCodeContent_Call) Return(_a0 string, _a1 error) *MockBackend_GetWiFiQRCodeContent_Call {
|
||||||
|
_c.Call.Return(_a0, _a1)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_c *MockBackend_GetWiFiQRCodeContent_Call) RunAndReturn(run func(string) (string, error)) *MockBackend_GetWiFiQRCodeContent_Call {
|
||||||
|
_c.Call.Return(run)
|
||||||
|
return _c
|
||||||
|
}
|
||||||
|
|
||||||
// GetWiredConnections provides a mock function with no fields
|
// GetWiredConnections provides a mock function with no fields
|
||||||
func (_m *MockBackend) GetWiredConnections() ([]network.WiredConnection, error) {
|
func (_m *MockBackend) GetWiredConnections() ([]network.WiredConnection, error) {
|
||||||
ret := _m.Called()
|
ret := _m.Called()
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
||||||
"github.com/godbus/dbus/v5"
|
"github.com/godbus/dbus/v5"
|
||||||
@@ -59,7 +60,11 @@ func Send(n Notification) error {
|
|||||||
|
|
||||||
hints := map[string]dbus.Variant{}
|
hints := map[string]dbus.Variant{}
|
||||||
if n.FilePath != "" {
|
if n.FilePath != "" {
|
||||||
hints["image_path"] = dbus.MakeVariant(n.FilePath)
|
imgPath := n.FilePath
|
||||||
|
if !strings.HasPrefix(imgPath, "file://") {
|
||||||
|
imgPath = "file://" + imgPath
|
||||||
|
}
|
||||||
|
hints["image_path"] = dbus.MakeVariant(imgPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
obj := conn.Object(notifyDest, notifyPath)
|
obj := conn.Object(notifyDest, notifyPath)
|
||||||
|
|||||||
892
core/internal/pam/pam.go
Normal file
892
core/internal/pam/pam.go
Normal file
@@ -0,0 +1,892 @@
|
|||||||
|
package pam
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/distros"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
GreeterPamManagedBlockStart = "# BEGIN DMS GREETER AUTH (managed by dms greeter sync)"
|
||||||
|
GreeterPamManagedBlockEnd = "# END DMS GREETER AUTH"
|
||||||
|
|
||||||
|
LockscreenPamManagedBlockStart = "# BEGIN DMS LOCKSCREEN AUTH (managed by dms greeter sync)"
|
||||||
|
LockscreenPamManagedBlockEnd = "# END DMS LOCKSCREEN AUTH"
|
||||||
|
|
||||||
|
LockscreenU2FPamManagedBlockStart = "# BEGIN DMS LOCKSCREEN U2F AUTH (managed by dms auth sync)"
|
||||||
|
LockscreenU2FPamManagedBlockEnd = "# END DMS LOCKSCREEN U2F AUTH"
|
||||||
|
|
||||||
|
legacyGreeterPamFprintComment = "# DMS greeter fingerprint"
|
||||||
|
legacyGreeterPamU2FComment = "# DMS greeter U2F"
|
||||||
|
|
||||||
|
GreetdPamPath = "/etc/pam.d/greetd"
|
||||||
|
DankshellPamPath = "/etc/pam.d/dankshell"
|
||||||
|
DankshellU2FPamPath = "/etc/pam.d/dankshell-u2f"
|
||||||
|
)
|
||||||
|
|
||||||
|
var includedPamAuthFiles = []string{
|
||||||
|
"system-auth",
|
||||||
|
"common-auth",
|
||||||
|
"password-auth",
|
||||||
|
"system-login",
|
||||||
|
"system-local-login",
|
||||||
|
"common-auth-pc",
|
||||||
|
"login",
|
||||||
|
}
|
||||||
|
|
||||||
|
type AuthSettings struct {
|
||||||
|
EnableFprint bool `json:"enableFprint"`
|
||||||
|
EnableU2f bool `json:"enableU2f"`
|
||||||
|
GreeterEnableFprint bool `json:"greeterEnableFprint"`
|
||||||
|
GreeterEnableU2f bool `json:"greeterEnableU2f"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SyncAuthOptions struct {
|
||||||
|
HomeDir string
|
||||||
|
ForceGreeterAuth bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type syncDeps struct {
|
||||||
|
pamDir string
|
||||||
|
greetdPath string
|
||||||
|
dankshellPath string
|
||||||
|
dankshellU2fPath string
|
||||||
|
isNixOS func() bool
|
||||||
|
readFile func(string) ([]byte, error)
|
||||||
|
stat func(string) (os.FileInfo, error)
|
||||||
|
createTemp func(string, string) (*os.File, error)
|
||||||
|
removeFile func(string) error
|
||||||
|
runSudoCmd func(string, string, ...string) error
|
||||||
|
pamModuleExists func(string) bool
|
||||||
|
fingerprintAvailableForCurrentUser func() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type lockscreenPamIncludeDirective struct {
|
||||||
|
target string
|
||||||
|
filterType string
|
||||||
|
}
|
||||||
|
|
||||||
|
type lockscreenPamResolver struct {
|
||||||
|
pamDir string
|
||||||
|
readFile func(string) ([]byte, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func defaultSyncDeps() syncDeps {
|
||||||
|
return syncDeps{
|
||||||
|
pamDir: "/etc/pam.d",
|
||||||
|
greetdPath: GreetdPamPath,
|
||||||
|
dankshellPath: DankshellPamPath,
|
||||||
|
dankshellU2fPath: DankshellU2FPamPath,
|
||||||
|
isNixOS: IsNixOS,
|
||||||
|
readFile: os.ReadFile,
|
||||||
|
stat: os.Stat,
|
||||||
|
createTemp: os.CreateTemp,
|
||||||
|
removeFile: os.Remove,
|
||||||
|
runSudoCmd: runSudoCmd,
|
||||||
|
pamModuleExists: pamModuleExists,
|
||||||
|
fingerprintAvailableForCurrentUser: FingerprintAuthAvailableForCurrentUser,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsNixOS() bool {
|
||||||
|
_, err := os.Stat("/etc/NIXOS")
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadAuthSettings(homeDir string) (AuthSettings, error) {
|
||||||
|
settingsPath := filepath.Join(homeDir, ".config", "DankMaterialShell", "settings.json")
|
||||||
|
data, err := os.ReadFile(settingsPath)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return AuthSettings{}, nil
|
||||||
|
}
|
||||||
|
return AuthSettings{}, fmt.Errorf("failed to read settings at %s: %w", settingsPath, err)
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(string(data)) == "" {
|
||||||
|
return AuthSettings{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var settings AuthSettings
|
||||||
|
if err := json.Unmarshal(data, &settings); err != nil {
|
||||||
|
return AuthSettings{}, fmt.Errorf("failed to parse settings at %s: %w", settingsPath, err)
|
||||||
|
}
|
||||||
|
return settings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReadGreeterAuthToggles(homeDir string) (enableFprint bool, enableU2f bool, err error) {
|
||||||
|
settings, err := ReadAuthSettings(homeDir)
|
||||||
|
if err != nil {
|
||||||
|
return false, false, err
|
||||||
|
}
|
||||||
|
return settings.GreeterEnableFprint, settings.GreeterEnableU2f, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SyncAuthConfig(logFunc func(string), sudoPassword string, options SyncAuthOptions) error {
|
||||||
|
return syncAuthConfigWithDeps(logFunc, sudoPassword, options, defaultSyncDeps())
|
||||||
|
}
|
||||||
|
|
||||||
|
func RemoveManagedGreeterPamBlock(logFunc func(string), sudoPassword string) error {
|
||||||
|
return removeManagedGreeterPamBlockWithDeps(logFunc, sudoPassword, defaultSyncDeps())
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncAuthConfigWithDeps(logFunc func(string), sudoPassword string, options SyncAuthOptions, deps syncDeps) error {
|
||||||
|
homeDir := strings.TrimSpace(options.HomeDir)
|
||||||
|
if homeDir == "" {
|
||||||
|
var err error
|
||||||
|
homeDir, err = os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get user home directory: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
settings, err := ReadAuthSettings(homeDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := syncLockscreenPamConfigWithDeps(logFunc, sudoPassword, deps); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := syncLockscreenU2FPamConfigWithDeps(logFunc, sudoPassword, settings.EnableU2f, deps); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := deps.stat(deps.greetdPath); err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
logFunc("ℹ /etc/pam.d/greetd not found. Skipping greeter PAM sync.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to inspect %s: %w", deps.greetdPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := syncGreeterPamConfigWithDeps(logFunc, sudoPassword, settings, options.ForceGreeterAuth, deps); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeManagedGreeterPamBlockWithDeps(logFunc func(string), sudoPassword string, deps syncDeps) error {
|
||||||
|
if deps.isNixOS() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := deps.readFile(deps.greetdPath)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to read %s: %w", deps.greetdPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
originalContent := string(data)
|
||||||
|
stripped, removed := stripManagedGreeterPamBlock(originalContent)
|
||||||
|
strippedAgain, removedLegacy := stripLegacyGreeterPamLines(stripped)
|
||||||
|
if !removed && !removedLegacy {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeManagedPamFile(strippedAgain, deps.greetdPath, sudoPassword, deps); err != nil {
|
||||||
|
return fmt.Errorf("failed to write %s: %w", deps.greetdPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logFunc("✓ Removed DMS managed PAM block from " + deps.greetdPath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseManagedGreeterPamAuth(pamText string) (managed bool, fingerprint bool, u2f bool, legacy bool) {
|
||||||
|
if pamText == "" {
|
||||||
|
return false, false, false, false
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := strings.Split(pamText, "\n")
|
||||||
|
inManaged := false
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
switch trimmed {
|
||||||
|
case GreeterPamManagedBlockStart:
|
||||||
|
managed = true
|
||||||
|
inManaged = true
|
||||||
|
continue
|
||||||
|
case GreeterPamManagedBlockEnd:
|
||||||
|
inManaged = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(trimmed, legacyGreeterPamFprintComment) || strings.HasPrefix(trimmed, legacyGreeterPamU2FComment) {
|
||||||
|
legacy = true
|
||||||
|
}
|
||||||
|
if !inManaged {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.Contains(trimmed, "pam_fprintd") {
|
||||||
|
fingerprint = true
|
||||||
|
}
|
||||||
|
if strings.Contains(trimmed, "pam_u2f") {
|
||||||
|
u2f = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return managed, fingerprint, u2f, legacy
|
||||||
|
}
|
||||||
|
|
||||||
|
func StripManagedGreeterPamContent(pamText string) (string, bool) {
|
||||||
|
stripped, removed := stripManagedGreeterPamBlock(pamText)
|
||||||
|
stripped, removedLegacy := stripLegacyGreeterPamLines(stripped)
|
||||||
|
return stripped, removed || removedLegacy
|
||||||
|
}
|
||||||
|
|
||||||
|
func PamTextIncludesFile(pamText, filename string) bool {
|
||||||
|
lines := strings.Split(pamText, "\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.Contains(trimmed, filename) &&
|
||||||
|
(strings.Contains(trimmed, "include") || strings.Contains(trimmed, "substack") || strings.HasPrefix(trimmed, "@include")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func PamFileHasModule(pamFilePath, module string) bool {
|
||||||
|
data, err := os.ReadFile(pamFilePath)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return pamContentHasModule(string(data), module)
|
||||||
|
}
|
||||||
|
|
||||||
|
func DetectIncludedPamModule(pamText, module string) string {
|
||||||
|
return detectIncludedPamModule(pamText, module, defaultSyncDeps())
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectIncludedPamModule(pamText, module string, deps syncDeps) string {
|
||||||
|
for _, includedFile := range includedPamAuthFiles {
|
||||||
|
if !PamTextIncludesFile(pamText, includedFile) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path := filepath.Join(deps.pamDir, includedFile)
|
||||||
|
data, err := deps.readFile(path)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if pamContentHasModule(string(data), module) {
|
||||||
|
return includedFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func pamContentHasModule(content, module string) bool {
|
||||||
|
lines := strings.Split(content, "\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.Contains(trimmed, module) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasManagedLockscreenPamFile(content string) bool {
|
||||||
|
return strings.Contains(content, LockscreenPamManagedBlockStart) &&
|
||||||
|
strings.Contains(content, LockscreenPamManagedBlockEnd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasManagedLockscreenU2FPamFile(content string) bool {
|
||||||
|
return strings.Contains(content, LockscreenU2FPamManagedBlockStart) &&
|
||||||
|
strings.Contains(content, LockscreenU2FPamManagedBlockEnd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pamDirectiveType(line string) string {
|
||||||
|
fields := strings.Fields(line)
|
||||||
|
if len(fields) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
directiveType := strings.TrimPrefix(fields[0], "-")
|
||||||
|
switch directiveType {
|
||||||
|
case "auth", "account", "password", "session":
|
||||||
|
return directiveType
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isExcludedLockscreenPamLine(line string) bool {
|
||||||
|
for _, field := range strings.Fields(line) {
|
||||||
|
if strings.HasPrefix(field, "#") {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if strings.Contains(field, "pam_u2f") || strings.Contains(field, "pam_fprintd") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseLockscreenPamIncludeDirective(trimmed string, inheritedFilter string) (lockscreenPamIncludeDirective, bool) {
|
||||||
|
fields := strings.Fields(trimmed)
|
||||||
|
if len(fields) >= 2 && fields[0] == "@include" {
|
||||||
|
return lockscreenPamIncludeDirective{
|
||||||
|
target: fields[1],
|
||||||
|
filterType: inheritedFilter,
|
||||||
|
}, true
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fields) >= 3 && (fields[1] == "include" || fields[1] == "substack") {
|
||||||
|
lineType := pamDirectiveType(trimmed)
|
||||||
|
if lineType == "" {
|
||||||
|
return lockscreenPamIncludeDirective{}, false
|
||||||
|
}
|
||||||
|
return lockscreenPamIncludeDirective{
|
||||||
|
target: fields[2],
|
||||||
|
filterType: lineType,
|
||||||
|
}, true
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fields) >= 3 && fields[1] == "@include" {
|
||||||
|
lineType := pamDirectiveType(trimmed)
|
||||||
|
if lineType == "" {
|
||||||
|
return lockscreenPamIncludeDirective{}, false
|
||||||
|
}
|
||||||
|
return lockscreenPamIncludeDirective{
|
||||||
|
target: fields[2],
|
||||||
|
filterType: lineType,
|
||||||
|
}, true
|
||||||
|
}
|
||||||
|
|
||||||
|
return lockscreenPamIncludeDirective{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveLockscreenPamIncludePath(pamDir, target string) (string, error) {
|
||||||
|
if strings.TrimSpace(target) == "" {
|
||||||
|
return "", fmt.Errorf("empty PAM include target")
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanPamDir := filepath.Clean(pamDir)
|
||||||
|
if filepath.IsAbs(target) {
|
||||||
|
cleanTarget := filepath.Clean(target)
|
||||||
|
if filepath.Dir(cleanTarget) != cleanPamDir {
|
||||||
|
return "", fmt.Errorf("unsupported PAM include outside %s: %s", cleanPamDir, target)
|
||||||
|
}
|
||||||
|
return cleanTarget, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanTarget := filepath.Clean(target)
|
||||||
|
if cleanTarget == "." || cleanTarget == ".." || strings.HasPrefix(cleanTarget, ".."+string(os.PathSeparator)) {
|
||||||
|
return "", fmt.Errorf("invalid PAM include target: %s", target)
|
||||||
|
}
|
||||||
|
|
||||||
|
return filepath.Join(cleanPamDir, cleanTarget), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r lockscreenPamResolver) resolveService(serviceName string, filterType string, stack []string) ([]string, error) {
|
||||||
|
path, err := resolveLockscreenPamIncludePath(r.pamDir, serviceName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, seen := range stack {
|
||||||
|
if seen == path {
|
||||||
|
chain := append(append([]string{}, stack...), path)
|
||||||
|
display := make([]string, 0, len(chain))
|
||||||
|
for _, item := range chain {
|
||||||
|
display = append(display, filepath.Base(item))
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("cyclic PAM include detected: %s", strings.Join(display, " -> "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := r.readFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to read PAM file %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var resolved []string
|
||||||
|
for _, rawLine := range strings.Split(strings.ReplaceAll(string(data), "\r\n", "\n"), "\n") {
|
||||||
|
rawLine = strings.TrimRight(rawLine, "\r")
|
||||||
|
trimmed := strings.TrimSpace(rawLine)
|
||||||
|
if trimmed == "" || strings.HasPrefix(trimmed, "#") || trimmed == "#%PAM-1.0" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if include, ok := parseLockscreenPamIncludeDirective(trimmed, filterType); ok {
|
||||||
|
lineType := pamDirectiveType(trimmed)
|
||||||
|
if filterType != "" && lineType != "" && lineType != filterType {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
nested, err := r.resolveService(include.target, include.filterType, append(stack, path))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
resolved = append(resolved, nested...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
lineType := pamDirectiveType(trimmed)
|
||||||
|
if lineType == "" {
|
||||||
|
return nil, fmt.Errorf("unsupported PAM directive in %s: %s", filepath.Base(path), trimmed)
|
||||||
|
}
|
||||||
|
if filterType != "" && lineType != filterType {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isExcludedLockscreenPamLine(trimmed) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resolved = append(resolved, rawLine)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildManagedLockscreenPamContent(pamDir string, readFile func(string) ([]byte, error)) (string, error) {
|
||||||
|
resolver := lockscreenPamResolver{
|
||||||
|
pamDir: pamDir,
|
||||||
|
readFile: readFile,
|
||||||
|
}
|
||||||
|
|
||||||
|
resolvedLines, err := resolver.resolveService("login", "", nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if len(resolvedLines) == 0 {
|
||||||
|
return "", fmt.Errorf("no auth directives remained after filtering %s", filepath.Join(pamDir, "login"))
|
||||||
|
}
|
||||||
|
|
||||||
|
hasAuth := false
|
||||||
|
for _, line := range resolvedLines {
|
||||||
|
if pamDirectiveType(strings.TrimSpace(line)) == "auth" {
|
||||||
|
hasAuth = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !hasAuth {
|
||||||
|
return "", fmt.Errorf("no auth directives remained after filtering %s", filepath.Join(pamDir, "login"))
|
||||||
|
}
|
||||||
|
|
||||||
|
var b strings.Builder
|
||||||
|
b.WriteString("#%PAM-1.0\n")
|
||||||
|
b.WriteString(LockscreenPamManagedBlockStart + "\n")
|
||||||
|
for _, line := range resolvedLines {
|
||||||
|
b.WriteString(line)
|
||||||
|
b.WriteByte('\n')
|
||||||
|
}
|
||||||
|
b.WriteString(LockscreenPamManagedBlockEnd + "\n")
|
||||||
|
return b.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildManagedLockscreenU2FPamContent() string {
|
||||||
|
var b strings.Builder
|
||||||
|
b.WriteString("#%PAM-1.0\n")
|
||||||
|
b.WriteString(LockscreenU2FPamManagedBlockStart + "\n")
|
||||||
|
b.WriteString("auth required pam_u2f.so cue nouserok timeout=10\n")
|
||||||
|
b.WriteString(LockscreenU2FPamManagedBlockEnd + "\n")
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncLockscreenPamConfigWithDeps(logFunc func(string), sudoPassword string, deps syncDeps) error {
|
||||||
|
if deps.isNixOS() {
|
||||||
|
logFunc("ℹ NixOS detected. DMS continues to use /etc/pam.d/login for lock screen password auth on NixOS unless you declare security.pam.services.dankshell yourself. U2F and fingerprint are handled separately and should not be included in dankshell.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
existingData, err := deps.readFile(deps.dankshellPath)
|
||||||
|
if err == nil {
|
||||||
|
if !hasManagedLockscreenPamFile(string(existingData)) {
|
||||||
|
logFunc("ℹ Custom /etc/pam.d/dankshell found (no DMS block). Skipping.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else if !os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("failed to read %s: %w", deps.dankshellPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := buildManagedLockscreenPamContent(deps.pamDir, deps.readFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to build %s from %s: %w", deps.dankshellPath, filepath.Join(deps.pamDir, "login"), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeManagedPamFile(content, deps.dankshellPath, sudoPassword, deps); err != nil {
|
||||||
|
return fmt.Errorf("failed to write %s: %w", deps.dankshellPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logFunc("✓ Created or updated /etc/pam.d/dankshell for lock screen authentication")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncLockscreenU2FPamConfigWithDeps(logFunc func(string), sudoPassword string, enabled bool, deps syncDeps) error {
|
||||||
|
if deps.isNixOS() {
|
||||||
|
logFunc("ℹ NixOS detected. DMS does not manage /etc/pam.d/dankshell-u2f on NixOS. Keep using the bundled U2F helper or configure a custom PAM service yourself.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
existingData, err := deps.readFile(deps.dankshellU2fPath)
|
||||||
|
if err != nil && !os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("failed to read %s: %w", deps.dankshellU2fPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if enabled {
|
||||||
|
if err == nil && !hasManagedLockscreenU2FPamFile(string(existingData)) {
|
||||||
|
logFunc("ℹ Custom /etc/pam.d/dankshell-u2f found (no DMS block). Skipping.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err := writeManagedPamFile(buildManagedLockscreenU2FPamContent(), deps.dankshellU2fPath, sudoPassword, deps); err != nil {
|
||||||
|
return fmt.Errorf("failed to write %s: %w", deps.dankshellU2fPath, err)
|
||||||
|
}
|
||||||
|
logFunc("✓ Created or updated /etc/pam.d/dankshell-u2f for lock screen security-key authentication")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err == nil && !hasManagedLockscreenU2FPamFile(string(existingData)) {
|
||||||
|
logFunc("ℹ Custom /etc/pam.d/dankshell-u2f found (no DMS block). Leaving it untouched.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := deps.runSudoCmd(sudoPassword, "rm", "-f", deps.dankshellU2fPath); err != nil {
|
||||||
|
return fmt.Errorf("failed to remove %s: %w", deps.dankshellU2fPath, err)
|
||||||
|
}
|
||||||
|
logFunc("✓ Removed DMS-managed /etc/pam.d/dankshell-u2f")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripManagedGreeterPamBlock(content string) (string, bool) {
|
||||||
|
lines := strings.Split(content, "\n")
|
||||||
|
filtered := make([]string, 0, len(lines))
|
||||||
|
inManagedBlock := false
|
||||||
|
removed := false
|
||||||
|
|
||||||
|
for _, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed == GreeterPamManagedBlockStart {
|
||||||
|
inManagedBlock = true
|
||||||
|
removed = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if trimmed == GreeterPamManagedBlockEnd {
|
||||||
|
inManagedBlock = false
|
||||||
|
removed = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if inManagedBlock {
|
||||||
|
removed = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filtered = append(filtered, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(filtered, "\n"), removed
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripLegacyGreeterPamLines(content string) (string, bool) {
|
||||||
|
lines := strings.Split(content, "\n")
|
||||||
|
filtered := make([]string, 0, len(lines))
|
||||||
|
removed := false
|
||||||
|
|
||||||
|
for i := 0; i < len(lines); i++ {
|
||||||
|
trimmed := strings.TrimSpace(lines[i])
|
||||||
|
if strings.HasPrefix(trimmed, legacyGreeterPamFprintComment) || strings.HasPrefix(trimmed, legacyGreeterPamU2FComment) {
|
||||||
|
removed = true
|
||||||
|
if i+1 < len(lines) {
|
||||||
|
nextLine := strings.TrimSpace(lines[i+1])
|
||||||
|
if strings.HasPrefix(nextLine, "auth") &&
|
||||||
|
(strings.Contains(nextLine, "pam_fprintd") || strings.Contains(nextLine, "pam_u2f")) {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filtered = append(filtered, lines[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(filtered, "\n"), removed
|
||||||
|
}
|
||||||
|
|
||||||
|
func insertManagedGreeterPamBlock(content string, blockLines []string, greetdPamPath string) (string, error) {
|
||||||
|
lines := strings.Split(content, "\n")
|
||||||
|
for i, line := range lines {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if trimmed != "" && !strings.HasPrefix(trimmed, "#") && strings.HasPrefix(trimmed, "auth") {
|
||||||
|
block := strings.Join(blockLines, "\n")
|
||||||
|
prefix := strings.Join(lines[:i], "\n")
|
||||||
|
suffix := strings.Join(lines[i:], "\n")
|
||||||
|
switch {
|
||||||
|
case prefix == "":
|
||||||
|
return block + "\n" + suffix, nil
|
||||||
|
case suffix == "":
|
||||||
|
return prefix + "\n" + block, nil
|
||||||
|
default:
|
||||||
|
return prefix + "\n" + block + "\n" + suffix, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("no auth directive found in %s", greetdPamPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func syncGreeterPamConfigWithDeps(logFunc func(string), sudoPassword string, settings AuthSettings, forceAuth bool, deps syncDeps) error {
|
||||||
|
var wantFprint, wantU2f bool
|
||||||
|
fprintToggleEnabled := forceAuth
|
||||||
|
u2fToggleEnabled := forceAuth
|
||||||
|
if forceAuth {
|
||||||
|
wantFprint = deps.pamModuleExists("pam_fprintd.so")
|
||||||
|
wantU2f = deps.pamModuleExists("pam_u2f.so")
|
||||||
|
} else {
|
||||||
|
fprintToggleEnabled = settings.GreeterEnableFprint
|
||||||
|
u2fToggleEnabled = settings.GreeterEnableU2f
|
||||||
|
fprintModule := deps.pamModuleExists("pam_fprintd.so")
|
||||||
|
u2fModule := deps.pamModuleExists("pam_u2f.so")
|
||||||
|
wantFprint = settings.GreeterEnableFprint && fprintModule
|
||||||
|
wantU2f = settings.GreeterEnableU2f && u2fModule
|
||||||
|
if settings.GreeterEnableFprint && !fprintModule {
|
||||||
|
logFunc("⚠ Warning: greeter fingerprint toggle is enabled, but pam_fprintd.so was not found.")
|
||||||
|
}
|
||||||
|
if settings.GreeterEnableU2f && !u2fModule {
|
||||||
|
logFunc("⚠ Warning: greeter security key toggle is enabled, but pam_u2f.so was not found.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if deps.isNixOS() {
|
||||||
|
logFunc("ℹ NixOS detected: PAM config is managed by NixOS modules. Skipping DMS PAM block write.")
|
||||||
|
logFunc(" Configure fingerprint/U2F auth via your greetd NixOS module options (e.g. security.pam.services.greetd).")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
pamData, err := deps.readFile(deps.greetdPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to read %s: %w", deps.greetdPath, err)
|
||||||
|
}
|
||||||
|
originalContent := string(pamData)
|
||||||
|
content, _ := stripManagedGreeterPamBlock(originalContent)
|
||||||
|
content, _ = stripLegacyGreeterPamLines(content)
|
||||||
|
|
||||||
|
includedFprintFile := detectIncludedPamModule(content, "pam_fprintd.so", deps)
|
||||||
|
includedU2fFile := detectIncludedPamModule(content, "pam_u2f.so", deps)
|
||||||
|
fprintAvailableForCurrentUser := deps.fingerprintAvailableForCurrentUser()
|
||||||
|
if wantFprint && includedFprintFile != "" {
|
||||||
|
logFunc("⚠ pam_fprintd already present in included " + includedFprintFile + " (managed by authselect/pam-auth-update). Skipping DMS fprint block to avoid double-fingerprint auth.")
|
||||||
|
wantFprint = false
|
||||||
|
}
|
||||||
|
if wantU2f && includedU2fFile != "" {
|
||||||
|
logFunc("⚠ pam_u2f already present in included " + includedU2fFile + " (managed by authselect/pam-auth-update). Skipping DMS U2F block to avoid double security-key auth.")
|
||||||
|
wantU2f = false
|
||||||
|
}
|
||||||
|
if !wantFprint && includedFprintFile != "" {
|
||||||
|
if fprintToggleEnabled {
|
||||||
|
logFunc("ℹ Fingerprint auth is still enabled via included " + includedFprintFile + ".")
|
||||||
|
if fprintAvailableForCurrentUser {
|
||||||
|
logFunc(" DMS toggle is enabled, and effective auth is provided by the included PAM stack.")
|
||||||
|
} else {
|
||||||
|
logFunc(" No enrolled fingerprints detected for the current user; password auth remains the effective path.")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if fprintAvailableForCurrentUser {
|
||||||
|
logFunc("ℹ Fingerprint auth is active via included " + includedFprintFile + " while DMS fingerprint toggle is off.")
|
||||||
|
logFunc(" Password login will work but may be delayed while the fingerprint module runs first.")
|
||||||
|
logFunc(" To eliminate the delay, " + pamManagerHintForCurrentDistro())
|
||||||
|
} else {
|
||||||
|
logFunc("ℹ pam_fprintd is present via included " + includedFprintFile + ", but no enrolled fingerprints were detected for the current user.")
|
||||||
|
logFunc(" Password auth remains the effective login path.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !wantU2f && includedU2fFile != "" {
|
||||||
|
if u2fToggleEnabled {
|
||||||
|
logFunc("ℹ Security-key auth is still enabled via included " + includedU2fFile + ".")
|
||||||
|
logFunc(" DMS toggle is enabled, but effective auth is provided by the included PAM stack.")
|
||||||
|
} else {
|
||||||
|
logFunc("⚠ Security-key auth is active via included " + includedU2fFile + " while DMS security-key toggle is off.")
|
||||||
|
logFunc(" " + pamManagerHintForCurrentDistro())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if wantFprint || wantU2f {
|
||||||
|
blockLines := []string{GreeterPamManagedBlockStart}
|
||||||
|
if wantFprint {
|
||||||
|
blockLines = append(blockLines, "auth sufficient pam_fprintd.so max-tries=1 timeout=5")
|
||||||
|
}
|
||||||
|
if wantU2f {
|
||||||
|
blockLines = append(blockLines, "auth sufficient pam_u2f.so cue nouserok timeout=10")
|
||||||
|
}
|
||||||
|
blockLines = append(blockLines, GreeterPamManagedBlockEnd)
|
||||||
|
|
||||||
|
content, err = insertManagedGreeterPamBlock(content, blockLines, deps.greetdPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if content == originalContent {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeManagedPamFile(content, deps.greetdPath, sudoPassword, deps); err != nil {
|
||||||
|
return fmt.Errorf("failed to install updated PAM config at %s: %w", deps.greetdPath, err)
|
||||||
|
}
|
||||||
|
if wantFprint || wantU2f {
|
||||||
|
logFunc("✓ Configured greetd PAM for fingerprint/U2F")
|
||||||
|
} else {
|
||||||
|
logFunc("✓ Cleared DMS-managed greeter PAM auth block")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeManagedPamFile(content string, destPath string, sudoPassword string, deps syncDeps) error {
|
||||||
|
tmpFile, err := deps.createTemp("", "dms-pam-*.conf")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
tmpPath := tmpFile.Name()
|
||||||
|
defer func() {
|
||||||
|
_ = deps.removeFile(tmpPath)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if _, err := tmpFile.WriteString(content); err != nil {
|
||||||
|
tmpFile.Close()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := tmpFile.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := deps.runSudoCmd(sudoPassword, "cp", tmpPath, destPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := deps.runSudoCmd(sudoPassword, "chmod", "644", destPath); err != nil {
|
||||||
|
return fmt.Errorf("failed to set permissions on %s: %w", destPath, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pamManagerHintForCurrentDistro() string {
|
||||||
|
osInfo, err := distros.GetOSInfo()
|
||||||
|
if err != nil {
|
||||||
|
return "Disable it in your PAM manager (authselect/pam-auth-update) or in the included PAM stack to force password-only greeter login."
|
||||||
|
}
|
||||||
|
config, exists := distros.Registry[osInfo.Distribution.ID]
|
||||||
|
if !exists {
|
||||||
|
return "Disable it in your PAM manager (authselect/pam-auth-update) or in the included PAM stack to force password-only greeter login."
|
||||||
|
}
|
||||||
|
|
||||||
|
switch config.Family {
|
||||||
|
case distros.FamilyFedora:
|
||||||
|
return "Disable it in authselect to force password-only greeter login."
|
||||||
|
case distros.FamilyDebian, distros.FamilyUbuntu:
|
||||||
|
return "Disable it in pam-auth-update to force password-only greeter login."
|
||||||
|
default:
|
||||||
|
return "Disable it in your distro PAM manager (authselect/pam-auth-update) or in the included PAM stack to force password-only greeter login."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func pamModuleExists(module string) bool {
|
||||||
|
for _, libDir := range []string{
|
||||||
|
"/usr/lib64/security",
|
||||||
|
"/usr/lib/security",
|
||||||
|
"/lib64/security",
|
||||||
|
"/lib/security",
|
||||||
|
"/lib/x86_64-linux-gnu/security",
|
||||||
|
"/usr/lib/x86_64-linux-gnu/security",
|
||||||
|
"/lib/aarch64-linux-gnu/security",
|
||||||
|
"/usr/lib/aarch64-linux-gnu/security",
|
||||||
|
"/run/current-system/sw/lib64/security",
|
||||||
|
"/run/current-system/sw/lib/security",
|
||||||
|
} {
|
||||||
|
if _, err := os.Stat(filepath.Join(libDir, module)); err == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasEnrolledFingerprintOutput(output string) bool {
|
||||||
|
lower := strings.ToLower(output)
|
||||||
|
if strings.Contains(lower, "no fingers enrolled") ||
|
||||||
|
strings.Contains(lower, "no fingerprints enrolled") ||
|
||||||
|
strings.Contains(lower, "no prints enrolled") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if strings.Contains(lower, "has fingers enrolled") ||
|
||||||
|
strings.Contains(lower, "has fingerprints enrolled") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, line := range strings.Split(lower, "\n") {
|
||||||
|
trimmed := strings.TrimSpace(line)
|
||||||
|
if strings.HasPrefix(trimmed, "finger:") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(trimmed, "- ") && strings.Contains(trimmed, "finger") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func FingerprintAuthAvailableForCurrentUser() bool {
|
||||||
|
username := strings.TrimSpace(os.Getenv("SUDO_USER"))
|
||||||
|
if username == "" {
|
||||||
|
username = strings.TrimSpace(os.Getenv("USER"))
|
||||||
|
}
|
||||||
|
if username == "" {
|
||||||
|
out, err := exec.Command("id", "-un").Output()
|
||||||
|
if err == nil {
|
||||||
|
username = strings.TrimSpace(string(out))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fingerprintAuthAvailableForUser(username)
|
||||||
|
}
|
||||||
|
|
||||||
|
func fingerprintAuthAvailableForUser(username string) bool {
|
||||||
|
username = strings.TrimSpace(username)
|
||||||
|
if username == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !pamModuleExists("pam_fprintd.so") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if _, err := exec.LookPath("fprintd-list"); err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
out, err := exec.CommandContext(ctx, "fprintd-list", username).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return hasEnrolledFingerprintOutput(string(out))
|
||||||
|
}
|
||||||
|
|
||||||
|
func runSudoCmd(sudoPassword string, command string, args ...string) error {
|
||||||
|
var cmd *exec.Cmd
|
||||||
|
|
||||||
|
if sudoPassword != "" {
|
||||||
|
fullArgs := append([]string{command}, args...)
|
||||||
|
quotedArgs := make([]string, len(fullArgs))
|
||||||
|
for i, arg := range fullArgs {
|
||||||
|
quotedArgs[i] = "'" + strings.ReplaceAll(arg, "'", "'\\''") + "'"
|
||||||
|
}
|
||||||
|
cmdStr := strings.Join(quotedArgs, " ")
|
||||||
|
|
||||||
|
cmd = distros.ExecSudoCommand(context.Background(), sudoPassword, cmdStr)
|
||||||
|
} else {
|
||||||
|
cmd = exec.Command("sudo", append([]string{command}, args...)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
return cmd.Run()
|
||||||
|
}
|
||||||
671
core/internal/pam/pam_test.go
Normal file
671
core/internal/pam/pam_test.go
Normal file
@@ -0,0 +1,671 @@
|
|||||||
|
package pam
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func writeTestFile(t *testing.T, path string, content string) {
|
||||||
|
t.Helper()
|
||||||
|
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||||
|
t.Fatalf("failed to create parent dir for %s: %v", path, err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, []byte(content), 0o644); err != nil {
|
||||||
|
t.Fatalf("failed to write %s: %v", path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type pamTestEnv struct {
|
||||||
|
pamDir string
|
||||||
|
greetdPath string
|
||||||
|
dankshellPath string
|
||||||
|
dankshellU2fPath string
|
||||||
|
tmpDir string
|
||||||
|
homeDir string
|
||||||
|
availableModules map[string]bool
|
||||||
|
fingerprintAvailable bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPamTestEnv(t *testing.T) *pamTestEnv {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
root := t.TempDir()
|
||||||
|
pamDir := filepath.Join(root, "pam.d")
|
||||||
|
tmpDir := filepath.Join(root, "tmp")
|
||||||
|
homeDir := filepath.Join(root, "home")
|
||||||
|
|
||||||
|
for _, dir := range []string{pamDir, tmpDir, homeDir} {
|
||||||
|
if err := os.MkdirAll(dir, 0o755); err != nil {
|
||||||
|
t.Fatalf("failed to create %s: %v", dir, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &pamTestEnv{
|
||||||
|
pamDir: pamDir,
|
||||||
|
greetdPath: filepath.Join(pamDir, "greetd"),
|
||||||
|
dankshellPath: filepath.Join(pamDir, "dankshell"),
|
||||||
|
dankshellU2fPath: filepath.Join(pamDir, "dankshell-u2f"),
|
||||||
|
tmpDir: tmpDir,
|
||||||
|
homeDir: homeDir,
|
||||||
|
availableModules: map[string]bool{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *pamTestEnv) writePamFile(t *testing.T, name string, content string) {
|
||||||
|
t.Helper()
|
||||||
|
writeTestFile(t, filepath.Join(e.pamDir, name), content)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *pamTestEnv) writeSettings(t *testing.T, content string) {
|
||||||
|
t.Helper()
|
||||||
|
writeTestFile(t, filepath.Join(e.homeDir, ".config", "DankMaterialShell", "settings.json"), content)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *pamTestEnv) deps(isNixOS bool) syncDeps {
|
||||||
|
return syncDeps{
|
||||||
|
pamDir: e.pamDir,
|
||||||
|
greetdPath: e.greetdPath,
|
||||||
|
dankshellPath: e.dankshellPath,
|
||||||
|
dankshellU2fPath: e.dankshellU2fPath,
|
||||||
|
isNixOS: func() bool { return isNixOS },
|
||||||
|
readFile: os.ReadFile,
|
||||||
|
stat: os.Stat,
|
||||||
|
createTemp: func(_ string, pattern string) (*os.File, error) {
|
||||||
|
return os.CreateTemp(e.tmpDir, pattern)
|
||||||
|
},
|
||||||
|
removeFile: os.Remove,
|
||||||
|
runSudoCmd: func(_ string, command string, args ...string) error {
|
||||||
|
switch command {
|
||||||
|
case "cp":
|
||||||
|
if len(args) != 2 {
|
||||||
|
return fmt.Errorf("unexpected cp args: %v", args)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(args[1]), 0o755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.WriteFile(args[1], data, 0o644)
|
||||||
|
case "chmod":
|
||||||
|
if len(args) != 2 {
|
||||||
|
return fmt.Errorf("unexpected chmod args: %v", args)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case "rm":
|
||||||
|
if len(args) != 2 || args[0] != "-f" {
|
||||||
|
return fmt.Errorf("unexpected rm args: %v", args)
|
||||||
|
}
|
||||||
|
if err := os.Remove(args[1]); err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unexpected sudo command: %s %v", command, args)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
pamModuleExists: func(module string) bool {
|
||||||
|
return e.availableModules[module]
|
||||||
|
},
|
||||||
|
fingerprintAvailableForCurrentUser: func() bool {
|
||||||
|
return e.fingerprintAvailable
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readFileString(t *testing.T, path string) string {
|
||||||
|
t.Helper()
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to read %s: %v", path, err)
|
||||||
|
}
|
||||||
|
return string(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHasManagedLockscreenPamFile(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
content string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "both markers present",
|
||||||
|
content: "#%PAM-1.0\n" +
|
||||||
|
LockscreenPamManagedBlockStart + "\n" +
|
||||||
|
"auth sufficient pam_unix.so\n" +
|
||||||
|
LockscreenPamManagedBlockEnd + "\n",
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing end marker is not managed",
|
||||||
|
content: "#%PAM-1.0\n" +
|
||||||
|
LockscreenPamManagedBlockStart + "\n" +
|
||||||
|
"auth sufficient pam_unix.so\n",
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "custom file is not managed",
|
||||||
|
content: "#%PAM-1.0\nauth sufficient pam_unix.so\n",
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
tt := tt
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
if got := hasManagedLockscreenPamFile(tt.content); got != tt.want {
|
||||||
|
t.Fatalf("hasManagedLockscreenPamFile() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildManagedLockscreenPamContent(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
files map[string]string
|
||||||
|
wantContains []string
|
||||||
|
wantNotContains []string
|
||||||
|
wantCounts map[string]int
|
||||||
|
wantErr string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "preserves custom modules and strips direct u2f and fprint directives",
|
||||||
|
files: map[string]string{
|
||||||
|
"login": "#%PAM-1.0\n" +
|
||||||
|
"auth include system-auth\n" +
|
||||||
|
"account include system-auth\n" +
|
||||||
|
"session include system-auth\n",
|
||||||
|
"system-auth": "auth requisite pam_nologin.so\n" +
|
||||||
|
"auth sufficient pam_unix.so try_first_pass nullok\n" +
|
||||||
|
"auth sufficient pam_u2f.so cue\n" +
|
||||||
|
"auth sufficient pam_fprintd.so max-tries=1\n" +
|
||||||
|
"auth required pam_radius_auth.so conf=/etc/raddb/server\n" +
|
||||||
|
"account required pam_access.so\n" +
|
||||||
|
"session optional pam_lastlog.so silent\n",
|
||||||
|
},
|
||||||
|
wantContains: []string{
|
||||||
|
"#%PAM-1.0",
|
||||||
|
LockscreenPamManagedBlockStart,
|
||||||
|
LockscreenPamManagedBlockEnd,
|
||||||
|
"auth requisite pam_nologin.so",
|
||||||
|
"auth sufficient pam_unix.so try_first_pass nullok",
|
||||||
|
"auth required pam_radius_auth.so conf=/etc/raddb/server",
|
||||||
|
"account required pam_access.so",
|
||||||
|
"session optional pam_lastlog.so silent",
|
||||||
|
},
|
||||||
|
wantNotContains: []string{
|
||||||
|
"pam_u2f",
|
||||||
|
"pam_fprintd",
|
||||||
|
},
|
||||||
|
wantCounts: map[string]int{
|
||||||
|
"auth required pam_radius_auth.so conf=/etc/raddb/server": 1,
|
||||||
|
"account required pam_access.so": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "resolves nested include substack and @include transitively",
|
||||||
|
files: map[string]string{
|
||||||
|
"login": "#%PAM-1.0\n" +
|
||||||
|
"auth include system-auth\n" +
|
||||||
|
"account include system-auth\n" +
|
||||||
|
"password include system-auth\n" +
|
||||||
|
"session include system-auth\n",
|
||||||
|
"system-auth": "auth substack custom-auth\n" +
|
||||||
|
"account include custom-auth\n" +
|
||||||
|
"password include custom-auth\n" +
|
||||||
|
"session @include common-session\n",
|
||||||
|
"custom-auth": "auth required pam_custom.so one=two\n" +
|
||||||
|
"account required pam_custom_account.so\n" +
|
||||||
|
"password required pam_custom_password.so\n",
|
||||||
|
"common-session": "session optional pam_fprintd.so max-tries=1\n" +
|
||||||
|
"session optional pam_lastlog.so silent\n",
|
||||||
|
},
|
||||||
|
wantContains: []string{
|
||||||
|
"auth required pam_custom.so one=two",
|
||||||
|
"account required pam_custom_account.so",
|
||||||
|
"password required pam_custom_password.so",
|
||||||
|
"session optional pam_lastlog.so silent",
|
||||||
|
},
|
||||||
|
wantNotContains: []string{
|
||||||
|
"pam_fprintd",
|
||||||
|
},
|
||||||
|
wantCounts: map[string]int{
|
||||||
|
"auth required pam_custom.so one=two": 1,
|
||||||
|
"account required pam_custom_account.so": 1,
|
||||||
|
"password required pam_custom_password.so": 1,
|
||||||
|
"session optional pam_lastlog.so silent": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "missing include fails",
|
||||||
|
files: map[string]string{
|
||||||
|
"login": "#%PAM-1.0\nauth include missing-auth\n",
|
||||||
|
},
|
||||||
|
wantErr: "failed to read PAM file",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "cyclic include fails",
|
||||||
|
files: map[string]string{
|
||||||
|
"login": "#%PAM-1.0\nauth include system-auth\n",
|
||||||
|
"system-auth": "auth include login\n",
|
||||||
|
},
|
||||||
|
wantErr: "cyclic PAM include detected",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no auth directives remain after filtering fails",
|
||||||
|
files: map[string]string{
|
||||||
|
"login": "#%PAM-1.0\nauth include system-auth\n",
|
||||||
|
"system-auth": "auth sufficient pam_u2f.so cue\n",
|
||||||
|
},
|
||||||
|
wantErr: "no auth directives remained after filtering",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
tt := tt
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
for name, content := range tt.files {
|
||||||
|
env.writePamFile(t, name, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := buildManagedLockscreenPamContent(env.pamDir, os.ReadFile)
|
||||||
|
if tt.wantErr != "" {
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("expected error containing %q, got nil", tt.wantErr)
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), tt.wantErr) {
|
||||||
|
t.Fatalf("error = %q, want substring %q", err.Error(), tt.wantErr)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildManagedLockscreenPamContent returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, want := range tt.wantContains {
|
||||||
|
if !strings.Contains(content, want) {
|
||||||
|
t.Errorf("missing expected string %q in output:\n%s", want, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, notWant := range tt.wantNotContains {
|
||||||
|
if strings.Contains(content, notWant) {
|
||||||
|
t.Errorf("unexpected string %q found in output:\n%s", notWant, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for want, wantCount := range tt.wantCounts {
|
||||||
|
if gotCount := strings.Count(content, want); gotCount != wantCount {
|
||||||
|
t.Errorf("count for %q = %d, want %d\noutput:\n%s", want, gotCount, wantCount, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSyncLockscreenPamConfigWithDeps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
t.Run("custom dankshell file is skipped untouched", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
customContent := "#%PAM-1.0\nauth required pam_unix.so\n"
|
||||||
|
env.writePamFile(t, "dankshell", customContent)
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncLockscreenPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got := readFileString(t, env.dankshellPath); got != customContent {
|
||||||
|
t.Fatalf("custom dankshell content changed\ngot:\n%s\nwant:\n%s", got, customContent)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[0], "Custom /etc/pam.d/dankshell found") {
|
||||||
|
t.Fatalf("expected custom-file skip log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("managed dankshell file is rewritten from resolved login stack", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.writePamFile(t, "login", "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n")
|
||||||
|
env.writePamFile(t, "system-auth", "auth sufficient pam_unix.so try_first_pass nullok\nauth sufficient pam_u2f.so cue\naccount required pam_access.so\n")
|
||||||
|
env.writePamFile(t, "dankshell", "#%PAM-1.0\n"+LockscreenPamManagedBlockStart+"\nauth required pam_env.so\n"+LockscreenPamManagedBlockEnd+"\n")
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncLockscreenPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
output := readFileString(t, env.dankshellPath)
|
||||||
|
for _, want := range []string{
|
||||||
|
LockscreenPamManagedBlockStart,
|
||||||
|
"auth sufficient pam_unix.so try_first_pass nullok",
|
||||||
|
"account required pam_access.so",
|
||||||
|
LockscreenPamManagedBlockEnd,
|
||||||
|
} {
|
||||||
|
if !strings.Contains(output, want) {
|
||||||
|
t.Errorf("missing expected string %q in rewritten dankshell:\n%s", want, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if strings.Contains(output, "pam_u2f") {
|
||||||
|
t.Errorf("rewritten dankshell still contains pam_u2f:\n%s", output)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[len(logs)-1], "Created or updated /etc/pam.d/dankshell") {
|
||||||
|
t.Fatalf("expected success log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("mutable systems fail when login stack cannot be converted safely", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
err := syncLockscreenPamConfigWithDeps(func(string) {}, "", env.deps(false))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("expected error when login PAM file is missing, got nil")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "failed to build") {
|
||||||
|
t.Fatalf("error = %q, want substring %q", err.Error(), "failed to build")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("NixOS remains informational and does not write dankshell", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
var logs []string
|
||||||
|
|
||||||
|
err := syncLockscreenPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", env.deps(true))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenPamConfigWithDeps returned error on NixOS path: %v", err)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[0], "NixOS detected") || !strings.Contains(logs[0], "/etc/pam.d/login") {
|
||||||
|
t.Fatalf("expected NixOS informational log mentioning /etc/pam.d/login, got %v", logs)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(env.dankshellPath); !os.IsNotExist(err) {
|
||||||
|
t.Fatalf("expected no dankshell file to be written on NixOS path, stat err = %v", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSyncLockscreenU2FPamConfigWithDeps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
t.Run("enabled creates managed file", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
var logs []string
|
||||||
|
|
||||||
|
err := syncLockscreenU2FPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", true, env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenU2FPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got := readFileString(t, env.dankshellU2fPath)
|
||||||
|
if got != buildManagedLockscreenU2FPamContent() {
|
||||||
|
t.Fatalf("unexpected managed dankshell-u2f content:\n%s", got)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[len(logs)-1], "Created or updated /etc/pam.d/dankshell-u2f") {
|
||||||
|
t.Fatalf("expected create log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("enabled rewrites existing managed file", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.writePamFile(t, "dankshell-u2f", "#%PAM-1.0\n"+LockscreenU2FPamManagedBlockStart+"\nauth required pam_u2f.so old\n"+LockscreenU2FPamManagedBlockEnd+"\n")
|
||||||
|
|
||||||
|
if err := syncLockscreenU2FPamConfigWithDeps(func(string) {}, "", true, env.deps(false)); err != nil {
|
||||||
|
t.Fatalf("syncLockscreenU2FPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
if got := readFileString(t, env.dankshellU2fPath); got != buildManagedLockscreenU2FPamContent() {
|
||||||
|
t.Fatalf("managed dankshell-u2f was not rewritten:\n%s", got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("disabled removes DMS-managed file", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.writePamFile(t, "dankshell-u2f", buildManagedLockscreenU2FPamContent())
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncLockscreenU2FPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", false, env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenU2FPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(env.dankshellU2fPath); !os.IsNotExist(err) {
|
||||||
|
t.Fatalf("expected managed dankshell-u2f to be removed, stat err = %v", err)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[len(logs)-1], "Removed DMS-managed /etc/pam.d/dankshell-u2f") {
|
||||||
|
t.Fatalf("expected removal log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("disabled preserves custom file", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
customContent := "#%PAM-1.0\nauth required pam_u2f.so cue\n"
|
||||||
|
env.writePamFile(t, "dankshell-u2f", customContent)
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncLockscreenU2FPamConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", false, env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncLockscreenU2FPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
if got := readFileString(t, env.dankshellU2fPath); got != customContent {
|
||||||
|
t.Fatalf("custom dankshell-u2f content changed\ngot:\n%s\nwant:\n%s", got, customContent)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[0], "Custom /etc/pam.d/dankshell-u2f found") {
|
||||||
|
t.Fatalf("expected custom-file log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSyncGreeterPamConfigWithDeps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
t.Run("adds managed block for enabled auth modules", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.availableModules["pam_fprintd.so"] = true
|
||||||
|
env.availableModules["pam_u2f.so"] = true
|
||||||
|
env.writePamFile(t, "greetd", "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n")
|
||||||
|
env.writePamFile(t, "system-auth", "auth sufficient pam_unix.so\naccount required pam_unix.so\n")
|
||||||
|
|
||||||
|
settings := AuthSettings{GreeterEnableFprint: true, GreeterEnableU2f: true}
|
||||||
|
if err := syncGreeterPamConfigWithDeps(func(string) {}, "", settings, false, env.deps(false)); err != nil {
|
||||||
|
t.Fatalf("syncGreeterPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got := readFileString(t, env.greetdPath)
|
||||||
|
for _, want := range []string{
|
||||||
|
GreeterPamManagedBlockStart,
|
||||||
|
"auth sufficient pam_fprintd.so max-tries=1 timeout=5",
|
||||||
|
"auth sufficient pam_u2f.so cue nouserok timeout=10",
|
||||||
|
GreeterPamManagedBlockEnd,
|
||||||
|
} {
|
||||||
|
if !strings.Contains(got, want) {
|
||||||
|
t.Errorf("missing expected string %q in greetd PAM:\n%s", want, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if strings.Index(got, GreeterPamManagedBlockStart) > strings.Index(got, "auth include system-auth") {
|
||||||
|
t.Fatalf("managed block was not inserted before first auth line:\n%s", got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("avoids duplicate fingerprint when included stack already provides it", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.availableModules["pam_fprintd.so"] = true
|
||||||
|
env.fingerprintAvailable = true
|
||||||
|
original := "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n"
|
||||||
|
env.writePamFile(t, "greetd", original)
|
||||||
|
env.writePamFile(t, "system-auth", "auth sufficient pam_fprintd.so max-tries=1\nauth sufficient pam_unix.so\n")
|
||||||
|
|
||||||
|
settings := AuthSettings{GreeterEnableFprint: true}
|
||||||
|
if err := syncGreeterPamConfigWithDeps(func(string) {}, "", settings, false, env.deps(false)); err != nil {
|
||||||
|
t.Fatalf("syncGreeterPamConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got := readFileString(t, env.greetdPath)
|
||||||
|
if got != original {
|
||||||
|
t.Fatalf("greetd PAM changed despite included pam_fprintd stack\ngot:\n%s\nwant:\n%s", got, original)
|
||||||
|
}
|
||||||
|
if strings.Contains(got, GreeterPamManagedBlockStart) {
|
||||||
|
t.Fatalf("managed block should not be inserted when included stack already has pam_fprintd:\n%s", got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRemoveManagedGreeterPamBlockWithDeps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.writePamFile(t, "greetd", "#%PAM-1.0\n"+
|
||||||
|
legacyGreeterPamFprintComment+"\n"+
|
||||||
|
"auth sufficient pam_fprintd.so max-tries=1\n"+
|
||||||
|
GreeterPamManagedBlockStart+"\n"+
|
||||||
|
"auth sufficient pam_u2f.so cue nouserok timeout=10\n"+
|
||||||
|
GreeterPamManagedBlockEnd+"\n"+
|
||||||
|
"auth include system-auth\n")
|
||||||
|
|
||||||
|
if err := removeManagedGreeterPamBlockWithDeps(func(string) {}, "", env.deps(false)); err != nil {
|
||||||
|
t.Fatalf("removeManagedGreeterPamBlockWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got := readFileString(t, env.greetdPath)
|
||||||
|
if strings.Contains(got, GreeterPamManagedBlockStart) || strings.Contains(got, legacyGreeterPamFprintComment) {
|
||||||
|
t.Fatalf("managed or legacy DMS auth lines remained in greetd PAM:\n%s", got)
|
||||||
|
}
|
||||||
|
if !strings.Contains(got, "auth include system-auth") {
|
||||||
|
t.Fatalf("expected non-DMS greetd auth lines to remain:\n%s", got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSyncAuthConfigWithDeps(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
t.Run("creates lockscreen targets and skips greetd when greeter is not installed", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.writeSettings(t, `{"enableU2f":true}`)
|
||||||
|
env.writePamFile(t, "login", "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n")
|
||||||
|
env.writePamFile(t, "system-auth", "auth sufficient pam_unix.so try_first_pass nullok\naccount required pam_access.so\n")
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncAuthConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", SyncAuthOptions{HomeDir: env.homeDir}, env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncAuthConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(env.dankshellPath); err != nil {
|
||||||
|
t.Fatalf("expected dankshell to be created: %v", err)
|
||||||
|
}
|
||||||
|
if got := readFileString(t, env.dankshellU2fPath); got != buildManagedLockscreenU2FPamContent() {
|
||||||
|
t.Fatalf("unexpected dankshell-u2f content:\n%s", got)
|
||||||
|
}
|
||||||
|
if len(logs) == 0 || !strings.Contains(logs[len(logs)-1], "greetd not found") {
|
||||||
|
t.Fatalf("expected greetd skip log, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("separate greeter and lockscreen toggles are respected", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.availableModules["pam_fprintd.so"] = true
|
||||||
|
env.writeSettings(t, `{"enableU2f":false,"greeterEnableFprint":true,"greeterEnableU2f":false}`)
|
||||||
|
env.writePamFile(t, "login", "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n")
|
||||||
|
env.writePamFile(t, "system-auth", "auth sufficient pam_unix.so try_first_pass nullok\naccount required pam_access.so\n")
|
||||||
|
env.writePamFile(t, "greetd", "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n")
|
||||||
|
|
||||||
|
err := syncAuthConfigWithDeps(func(string) {}, "", SyncAuthOptions{HomeDir: env.homeDir}, env.deps(false))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncAuthConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dankshell := readFileString(t, env.dankshellPath)
|
||||||
|
if strings.Contains(dankshell, "pam_fprintd") || strings.Contains(dankshell, "pam_u2f") {
|
||||||
|
t.Fatalf("lockscreen PAM should strip fingerprint and U2F modules:\n%s", dankshell)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(env.dankshellU2fPath); !os.IsNotExist(err) {
|
||||||
|
t.Fatalf("expected dankshell-u2f to remain absent when enableU2f is false, stat err = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
greetd := readFileString(t, env.greetdPath)
|
||||||
|
if !strings.Contains(greetd, "auth sufficient pam_fprintd.so max-tries=1 timeout=5") {
|
||||||
|
t.Fatalf("expected greetd PAM to receive fingerprint auth block:\n%s", greetd)
|
||||||
|
}
|
||||||
|
if strings.Contains(greetd, "auth sufficient pam_u2f.so cue nouserok timeout=10") {
|
||||||
|
t.Fatalf("did not expect greetd PAM to receive U2F auth block:\n%s", greetd)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("NixOS remains informational and non-mutating", func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
env := newPamTestEnv(t)
|
||||||
|
env.availableModules["pam_fprintd.so"] = true
|
||||||
|
env.availableModules["pam_u2f.so"] = true
|
||||||
|
env.writeSettings(t, `{"enableU2f":true,"greeterEnableFprint":true,"greeterEnableU2f":true}`)
|
||||||
|
originalGreetd := "#%PAM-1.0\nauth include system-auth\naccount include system-auth\n"
|
||||||
|
env.writePamFile(t, "greetd", originalGreetd)
|
||||||
|
|
||||||
|
var logs []string
|
||||||
|
err := syncAuthConfigWithDeps(func(msg string) {
|
||||||
|
logs = append(logs, msg)
|
||||||
|
}, "", SyncAuthOptions{HomeDir: env.homeDir}, env.deps(true))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("syncAuthConfigWithDeps returned error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(env.dankshellPath); !os.IsNotExist(err) {
|
||||||
|
t.Fatalf("expected dankshell to remain absent on NixOS path, stat err = %v", err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(env.dankshellU2fPath); !os.IsNotExist(err) {
|
||||||
|
t.Fatalf("expected dankshell-u2f to remain absent on NixOS path, stat err = %v", err)
|
||||||
|
}
|
||||||
|
if got := readFileString(t, env.greetdPath); got != originalGreetd {
|
||||||
|
t.Fatalf("expected greetd PAM to remain unchanged on NixOS path\ngot:\n%s\nwant:\n%s", got, originalGreetd)
|
||||||
|
}
|
||||||
|
if len(logs) < 2 || !strings.Contains(strings.Join(logs, "\n"), "NixOS detected") {
|
||||||
|
t.Fatalf("expected informational NixOS logs, got %v", logs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -258,7 +258,7 @@ func (i *ExtWorkspaceManagerV1) Dispatch(opcode uint32, fd int, data []byte) {
|
|||||||
l := 0
|
l := 0
|
||||||
objectID := client.Uint32(data[l : l+4])
|
objectID := client.Uint32(data[l : l+4])
|
||||||
proxy := i.Context().GetProxy(objectID)
|
proxy := i.Context().GetProxy(objectID)
|
||||||
if proxy != nil {
|
if proxy != nil && !proxy.IsZombie() {
|
||||||
e.WorkspaceGroup = proxy.(*ExtWorkspaceGroupHandleV1)
|
e.WorkspaceGroup = proxy.(*ExtWorkspaceGroupHandleV1)
|
||||||
} else {
|
} else {
|
||||||
groupHandle := &ExtWorkspaceGroupHandleV1{}
|
groupHandle := &ExtWorkspaceGroupHandleV1{}
|
||||||
@@ -278,7 +278,7 @@ func (i *ExtWorkspaceManagerV1) Dispatch(opcode uint32, fd int, data []byte) {
|
|||||||
l := 0
|
l := 0
|
||||||
objectID := client.Uint32(data[l : l+4])
|
objectID := client.Uint32(data[l : l+4])
|
||||||
proxy := i.Context().GetProxy(objectID)
|
proxy := i.Context().GetProxy(objectID)
|
||||||
if proxy != nil {
|
if proxy != nil && !proxy.IsZombie() {
|
||||||
e.Workspace = proxy.(*ExtWorkspaceHandleV1)
|
e.Workspace = proxy.(*ExtWorkspaceHandleV1)
|
||||||
} else {
|
} else {
|
||||||
wsHandle := &ExtWorkspaceHandleV1{}
|
wsHandle := &ExtWorkspaceHandleV1{}
|
||||||
|
|||||||
@@ -444,20 +444,21 @@ func GetFocusedMonitor() string {
|
|||||||
|
|
||||||
type outputInfo struct {
|
type outputInfo struct {
|
||||||
x, y int32
|
x, y int32
|
||||||
|
scale float64
|
||||||
transform int32
|
transform int32
|
||||||
}
|
}
|
||||||
|
|
||||||
func getOutputInfo(outputName string) (*outputInfo, bool) {
|
func getAllOutputInfos() map[string]*outputInfo {
|
||||||
display, err := client.Connect("")
|
display, err := client.Connect("")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
ctx := display.Context()
|
ctx := display.Context()
|
||||||
defer ctx.Close()
|
defer ctx.Close()
|
||||||
|
|
||||||
registry, err := display.GetRegistry()
|
registry, err := display.GetRegistry()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var outputManager *wlr_output_management.ZwlrOutputManagerV1
|
var outputManager *wlr_output_management.ZwlrOutputManagerV1
|
||||||
@@ -476,16 +477,17 @@ func getOutputInfo(outputName string) (*outputInfo, bool) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if err := wlhelpers.Roundtrip(display, ctx); err != nil {
|
if err := wlhelpers.Roundtrip(display, ctx); err != nil {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if outputManager == nil {
|
if outputManager == nil {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type headState struct {
|
type headState struct {
|
||||||
name string
|
name string
|
||||||
x, y int32
|
x, y int32
|
||||||
|
scale float64
|
||||||
transform int32
|
transform int32
|
||||||
}
|
}
|
||||||
heads := make(map[*wlr_output_management.ZwlrOutputHeadV1]*headState)
|
heads := make(map[*wlr_output_management.ZwlrOutputHeadV1]*headState)
|
||||||
@@ -501,6 +503,9 @@ func getOutputInfo(outputName string) (*outputInfo, bool) {
|
|||||||
state.x = pe.X
|
state.x = pe.X
|
||||||
state.y = pe.Y
|
state.y = pe.Y
|
||||||
})
|
})
|
||||||
|
e.Head.SetScaleHandler(func(se wlr_output_management.ZwlrOutputHeadV1ScaleEvent) {
|
||||||
|
state.scale = se.Scale
|
||||||
|
})
|
||||||
e.Head.SetTransformHandler(func(te wlr_output_management.ZwlrOutputHeadV1TransformEvent) {
|
e.Head.SetTransformHandler(func(te wlr_output_management.ZwlrOutputHeadV1TransformEvent) {
|
||||||
state.transform = te.Transform
|
state.transform = te.Transform
|
||||||
})
|
})
|
||||||
@@ -511,21 +516,32 @@ func getOutputInfo(outputName string) (*outputInfo, bool) {
|
|||||||
|
|
||||||
for !done {
|
for !done {
|
||||||
if err := ctx.Dispatch(); err != nil {
|
if err := ctx.Dispatch(); err != nil {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
result := make(map[string]*outputInfo, len(heads))
|
||||||
for _, state := range heads {
|
for _, state := range heads {
|
||||||
if state.name == outputName {
|
if state.name == "" {
|
||||||
return &outputInfo{
|
continue
|
||||||
x: state.x,
|
}
|
||||||
y: state.y,
|
result[state.name] = &outputInfo{
|
||||||
transform: state.transform,
|
x: state.x,
|
||||||
}, true
|
y: state.y,
|
||||||
|
scale: state.scale,
|
||||||
|
transform: state.transform,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
return nil, false
|
func getOutputInfo(outputName string) (*outputInfo, bool) {
|
||||||
|
infos := getAllOutputInfos()
|
||||||
|
if infos == nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
info, ok := infos[outputName]
|
||||||
|
return info, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
func getDWLActiveWindow() (*WindowGeometry, error) {
|
func getDWLActiveWindow() (*WindowGeometry, error) {
|
||||||
|
|||||||
@@ -108,12 +108,16 @@ func NewRegionSelector(s *Screenshoter) *RegionSelector {
|
|||||||
screenshoter: s,
|
screenshoter: s,
|
||||||
outputs: make(map[uint32]*WaylandOutput),
|
outputs: make(map[uint32]*WaylandOutput),
|
||||||
preCapture: make(map[*WaylandOutput]*PreCapture),
|
preCapture: make(map[*WaylandOutput]*PreCapture),
|
||||||
showCapturedCursor: true,
|
showCapturedCursor: s.config.Cursor == CursorOn,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RegionSelector) Run() (*CaptureResult, bool, error) {
|
func (r *RegionSelector) Run() (*CaptureResult, bool, error) {
|
||||||
r.preSelect = GetLastRegion()
|
if r.screenshoter != nil && r.screenshoter.config.Reset {
|
||||||
|
r.preSelect = Region{}
|
||||||
|
} else {
|
||||||
|
r.preSelect = GetLastRegion()
|
||||||
|
}
|
||||||
|
|
||||||
if err := r.connect(); err != nil {
|
if err := r.connect(); err != nil {
|
||||||
return nil, false, fmt.Errorf("wayland connect: %w", err)
|
return nil, false, fmt.Errorf("wayland connect: %w", err)
|
||||||
|
|||||||
@@ -114,6 +114,9 @@ func (r *RegionSelector) setupPointerHandlers() {
|
|||||||
for _, os := range r.surfaces {
|
for _, os := range r.surfaces {
|
||||||
r.redrawSurface(os)
|
r.redrawSurface(os)
|
||||||
}
|
}
|
||||||
|
if r.screenshoter != nil && r.screenshoter.config.NoConfirm && r.selection.hasSelection {
|
||||||
|
r.finishSelection()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
r.cancelled = true
|
r.cancelled = true
|
||||||
|
|||||||
@@ -138,9 +138,13 @@ func (r *RegionSelector) drawHUD(data []byte, stride, bufW, bufH int, format uin
|
|||||||
if !r.showCapturedCursor {
|
if !r.showCapturedCursor {
|
||||||
cursorLabel = "show"
|
cursorLabel = "show"
|
||||||
}
|
}
|
||||||
|
captureKey := "Space/Enter"
|
||||||
|
if r.screenshoter != nil && r.screenshoter.config.NoConfirm {
|
||||||
|
captureKey = "Drag+Release"
|
||||||
|
}
|
||||||
|
|
||||||
items := []struct{ key, desc string }{
|
items := []struct{ key, desc string }{
|
||||||
{"Space/Enter", "capture"},
|
{captureKey, "capture"},
|
||||||
{"P", cursorLabel + " cursor"},
|
{"P", cursorLabel + " cursor"},
|
||||||
{"Esc", "cancel"},
|
{"Esc", "cancel"},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package screenshot
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
@@ -106,6 +107,12 @@ func (s *Screenshoter) captureLastRegion() (*CaptureResult, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Screenshoter) captureRegion() (*CaptureResult, error) {
|
func (s *Screenshoter) captureRegion() (*CaptureResult, error) {
|
||||||
|
if s.config.Reset {
|
||||||
|
if err := SaveLastRegion(Region{}); err != nil {
|
||||||
|
log.Debug("failed to reset last region", "err", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
selector := NewRegionSelector(s)
|
selector := NewRegionSelector(s)
|
||||||
result, cancelled, err := selector.Run()
|
result, cancelled, err := selector.Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -298,22 +305,20 @@ func (s *Screenshoter) captureAllScreens() (*CaptureResult, error) {
|
|||||||
if len(outputs) == 0 {
|
if len(outputs) == 0 {
|
||||||
return nil, fmt.Errorf("no outputs available")
|
return nil, fmt.Errorf("no outputs available")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(outputs) == 1 {
|
if len(outputs) == 1 {
|
||||||
return s.captureWholeOutput(outputs[0])
|
return s.captureWholeOutput(outputs[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
// Capture all outputs first to get actual buffer sizes
|
wlrInfos := getAllOutputInfos()
|
||||||
type capturedOutput struct {
|
|
||||||
output *WaylandOutput
|
|
||||||
result *CaptureResult
|
|
||||||
physX int
|
|
||||||
physY int
|
|
||||||
}
|
|
||||||
captured := make([]capturedOutput, 0, len(outputs))
|
|
||||||
|
|
||||||
var minX, minY, maxX, maxY int
|
type pendingOutput struct {
|
||||||
first := true
|
result *CaptureResult
|
||||||
|
logX float64
|
||||||
|
logY float64
|
||||||
|
scale float64
|
||||||
|
}
|
||||||
|
var pending []pendingOutput
|
||||||
|
maxScale := 1.0
|
||||||
|
|
||||||
for _, output := range outputs {
|
for _, output := range outputs {
|
||||||
result, err := s.captureWholeOutput(output)
|
result, err := s.captureWholeOutput(output)
|
||||||
@@ -322,50 +327,74 @@ func (s *Screenshoter) captureAllScreens() (*CaptureResult, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
outX, outY := output.x, output.y
|
logX, logY := float64(output.x), float64(output.y)
|
||||||
scale := float64(output.scale)
|
scale := float64(output.scale)
|
||||||
|
|
||||||
switch DetectCompositor() {
|
switch DetectCompositor() {
|
||||||
case CompositorHyprland:
|
case CompositorHyprland:
|
||||||
if hx, hy, _, _, ok := GetHyprlandMonitorGeometry(output.name); ok {
|
if hx, hy, _, _, ok := GetHyprlandMonitorGeometry(output.name); ok {
|
||||||
outX, outY = hx, hy
|
logX, logY = float64(hx), float64(hy)
|
||||||
}
|
}
|
||||||
if s := GetHyprlandMonitorScale(output.name); s > 0 {
|
if hs := GetHyprlandMonitorScale(output.name); hs > 0 {
|
||||||
scale = s
|
scale = hs
|
||||||
}
|
}
|
||||||
case CompositorDWL:
|
default:
|
||||||
if info, ok := getOutputInfo(output.name); ok {
|
if wlrInfos != nil {
|
||||||
outX, outY = info.x, info.y
|
if info, ok := wlrInfos[output.name]; ok {
|
||||||
|
logX, logY = float64(info.x), float64(info.y)
|
||||||
|
if info.scale > 0 {
|
||||||
|
scale = info.scale
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if scale <= 0 {
|
if scale <= 0 {
|
||||||
scale = 1.0
|
scale = 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
physX := int(float64(outX) * scale)
|
pending = append(pending, pendingOutput{result: result, logX: logX, logY: logY, scale: scale})
|
||||||
physY := int(float64(outY) * scale)
|
if scale > maxScale {
|
||||||
|
maxScale = scale
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
captured = append(captured, capturedOutput{
|
if len(pending) == 0 {
|
||||||
output: output,
|
return nil, fmt.Errorf("failed to capture any outputs")
|
||||||
result: result,
|
}
|
||||||
physX: physX,
|
if len(pending) == 1 {
|
||||||
physY: physY,
|
return pending[0].result, nil
|
||||||
})
|
}
|
||||||
|
|
||||||
right := physX + result.Buffer.Width
|
type layoutEntry struct {
|
||||||
bottom := physY + result.Buffer.Height
|
result *CaptureResult
|
||||||
|
canvasX int
|
||||||
|
canvasY int
|
||||||
|
canvasW int
|
||||||
|
canvasH int
|
||||||
|
}
|
||||||
|
entries := make([]layoutEntry, len(pending))
|
||||||
|
var minX, minY, maxX, maxY int
|
||||||
|
|
||||||
if first {
|
for i, p := range pending {
|
||||||
minX, minY = physX, physY
|
cx := int(math.Round(p.logX * maxScale))
|
||||||
maxX, maxY = right, bottom
|
cy := int(math.Round(p.logY * maxScale))
|
||||||
first = false
|
cw := int(math.Round(float64(p.result.Buffer.Width) * maxScale / p.scale))
|
||||||
|
ch := int(math.Round(float64(p.result.Buffer.Height) * maxScale / p.scale))
|
||||||
|
|
||||||
|
entries[i] = layoutEntry{result: p.result, canvasX: cx, canvasY: cy, canvasW: cw, canvasH: ch}
|
||||||
|
|
||||||
|
right := cx + cw
|
||||||
|
bottom := cy + ch
|
||||||
|
if i == 0 {
|
||||||
|
minX, minY, maxX, maxY = cx, cy, right, bottom
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if cx < minX {
|
||||||
if physX < minX {
|
minX = cx
|
||||||
minX = physX
|
|
||||||
}
|
}
|
||||||
if physY < minY {
|
if cy < minY {
|
||||||
minY = physY
|
minY = cy
|
||||||
}
|
}
|
||||||
if right > maxX {
|
if right > maxX {
|
||||||
maxX = right
|
maxX = right
|
||||||
@@ -375,35 +404,26 @@ func (s *Screenshoter) captureAllScreens() (*CaptureResult, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(captured) == 0 {
|
|
||||||
return nil, fmt.Errorf("failed to capture any outputs")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(captured) == 1 {
|
|
||||||
return captured[0].result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
totalW := maxX - minX
|
totalW := maxX - minX
|
||||||
totalH := maxY - minY
|
totalH := maxY - minY
|
||||||
|
composite, err := CreateShmBuffer(totalW, totalH, totalW*4)
|
||||||
compositeStride := totalW * 4
|
|
||||||
composite, err := CreateShmBuffer(totalW, totalH, compositeStride)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
for _, c := range captured {
|
for _, e := range entries {
|
||||||
c.result.Buffer.Close()
|
e.result.Buffer.Close()
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("create composite buffer: %w", err)
|
return nil, fmt.Errorf("create composite buffer: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
composite.Clear()
|
composite.Clear()
|
||||||
|
|
||||||
var format uint32
|
var format uint32
|
||||||
for _, c := range captured {
|
for _, e := range entries {
|
||||||
if format == 0 {
|
if format == 0 {
|
||||||
format = c.result.Format
|
format = e.result.Format
|
||||||
}
|
}
|
||||||
s.blitBuffer(composite, c.result.Buffer, c.physX-minX, c.physY-minY, c.result.YInverted)
|
s.blitBufferScaled(composite, e.result.Buffer,
|
||||||
c.result.Buffer.Close()
|
e.canvasX-minX, e.canvasY-minY, e.canvasW, e.canvasH,
|
||||||
|
e.result.YInverted)
|
||||||
|
e.result.Buffer.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
return &CaptureResult{
|
return &CaptureResult{
|
||||||
@@ -413,32 +433,44 @@ func (s *Screenshoter) captureAllScreens() (*CaptureResult, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Screenshoter) blitBuffer(dst, src *ShmBuffer, dstX, dstY int, yInverted bool) {
|
func (s *Screenshoter) blitBufferScaled(dst, src *ShmBuffer, dstX, dstY, dstW, dstH int, yInverted bool) {
|
||||||
|
if dstW <= 0 || dstH <= 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
srcData := src.Data()
|
srcData := src.Data()
|
||||||
dstData := dst.Data()
|
dstData := dst.Data()
|
||||||
|
|
||||||
for srcY := 0; srcY < src.Height; srcY++ {
|
for dy := 0; dy < dstH; dy++ {
|
||||||
actualSrcY := srcY
|
canvasY := dstY + dy
|
||||||
if yInverted {
|
if canvasY < 0 || canvasY >= dst.Height {
|
||||||
actualSrcY = src.Height - 1 - srcY
|
|
||||||
}
|
|
||||||
|
|
||||||
dy := dstY + srcY
|
|
||||||
if dy < 0 || dy >= dst.Height {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
srcRowOff := actualSrcY * src.Stride
|
srcY := dy * src.Height / dstH
|
||||||
dstRowOff := dy * dst.Stride
|
if yInverted {
|
||||||
|
srcY = src.Height - 1 - srcY
|
||||||
|
}
|
||||||
|
if srcY < 0 || srcY >= src.Height {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
for srcX := 0; srcX < src.Width; srcX++ {
|
srcRowOff := srcY * src.Stride
|
||||||
dx := dstX + srcX
|
dstRowOff := canvasY * dst.Stride
|
||||||
if dx < 0 || dx >= dst.Width {
|
|
||||||
|
for dx := 0; dx < dstW; dx++ {
|
||||||
|
canvasX := dstX + dx
|
||||||
|
if canvasX < 0 || canvasX >= dst.Width {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
srcX := dx * src.Width / dstW
|
||||||
|
if srcX >= src.Width {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
si := srcRowOff + srcX*4
|
si := srcRowOff + srcX*4
|
||||||
di := dstRowOff + dx*4
|
di := dstRowOff + canvasX*4
|
||||||
|
|
||||||
if si+3 >= len(srcData) || di+3 >= len(dstData) {
|
if si+3 >= len(srcData) || di+3 >= len(dstData) {
|
||||||
continue
|
continue
|
||||||
@@ -453,10 +485,7 @@ func (s *Screenshoter) blitBuffer(dst, src *ShmBuffer, dstX, dstY int, yInverted
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (s *Screenshoter) captureWholeOutput(output *WaylandOutput) (*CaptureResult, error) {
|
func (s *Screenshoter) captureWholeOutput(output *WaylandOutput) (*CaptureResult, error) {
|
||||||
cursor := int32(0)
|
cursor := int32(s.config.Cursor)
|
||||||
if s.config.IncludeCursor {
|
|
||||||
cursor = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
frame, err := s.screencopy.CaptureOutput(cursor, output.wlOutput)
|
frame, err := s.screencopy.CaptureOutput(cursor, output.wlOutput)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -624,10 +653,7 @@ func (s *Screenshoter) captureRegionOnOutput(output *WaylandOutput, region Regio
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor := int32(0)
|
cursor := int32(s.config.Cursor)
|
||||||
if s.config.IncludeCursor {
|
|
||||||
cursor = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
frame, err := s.screencopy.CaptureOutputRegion(cursor, output.wlOutput, localX, localY, w, h)
|
frame, err := s.screencopy.CaptureOutputRegion(cursor, output.wlOutput, localX, localY, w, h)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -19,6 +19,13 @@ const (
|
|||||||
FormatPPM
|
FormatPPM
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type CursorMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
CursorOff CursorMode = iota
|
||||||
|
CursorOn
|
||||||
|
)
|
||||||
|
|
||||||
type Region struct {
|
type Region struct {
|
||||||
X int32 `json:"x"`
|
X int32 `json:"x"`
|
||||||
Y int32 `json:"y"`
|
Y int32 `json:"y"`
|
||||||
@@ -42,29 +49,33 @@ type Output struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Mode Mode
|
Mode Mode
|
||||||
OutputName string
|
OutputName string
|
||||||
IncludeCursor bool
|
Cursor CursorMode
|
||||||
Format Format
|
NoConfirm bool
|
||||||
Quality int
|
Reset bool
|
||||||
OutputDir string
|
Format Format
|
||||||
Filename string
|
Quality int
|
||||||
Clipboard bool
|
OutputDir string
|
||||||
SaveFile bool
|
Filename string
|
||||||
Notify bool
|
Clipboard bool
|
||||||
Stdout bool
|
SaveFile bool
|
||||||
|
Notify bool
|
||||||
|
Stdout bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultConfig() Config {
|
func DefaultConfig() Config {
|
||||||
return Config{
|
return Config{
|
||||||
Mode: ModeRegion,
|
Mode: ModeRegion,
|
||||||
IncludeCursor: false,
|
Cursor: CursorOff,
|
||||||
Format: FormatPNG,
|
NoConfirm: false,
|
||||||
Quality: 90,
|
Reset: false,
|
||||||
OutputDir: "",
|
Format: FormatPNG,
|
||||||
Filename: "",
|
Quality: 90,
|
||||||
Clipboard: true,
|
OutputDir: "",
|
||||||
SaveFile: true,
|
Filename: "",
|
||||||
Notify: true,
|
Clipboard: true,
|
||||||
|
SaveFile: true,
|
||||||
|
Notify: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -311,6 +311,10 @@ func (m *Manager) handleDevicePropertiesChanged(path dbus.ObjectPath, changed ma
|
|||||||
select {
|
select {
|
||||||
case m.eventQueue <- func() {
|
case m.eventQueue <- func() {
|
||||||
time.Sleep(300 * time.Millisecond)
|
time.Sleep(300 * time.Millisecond)
|
||||||
|
log.Infof("[Bluetooth] Auto-trusting newly paired device: %s", devicePath)
|
||||||
|
if err := m.TrustDevice(devicePath, true); err != nil {
|
||||||
|
log.Warnf("[Bluetooth] Auto-trust failed: %v", err)
|
||||||
|
}
|
||||||
log.Infof("[Bluetooth] Auto-connecting newly paired device: %s", devicePath)
|
log.Infof("[Bluetooth] Auto-connecting newly paired device: %s", devicePath)
|
||||||
if err := m.ConnectDevice(devicePath); err != nil {
|
if err := m.ConnectDevice(devicePath); err != nil {
|
||||||
log.Warnf("[Bluetooth] Auto-connect failed: %v", err)
|
log.Warnf("[Bluetooth] Auto-connect failed: %v", err)
|
||||||
|
|||||||
@@ -6,12 +6,20 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
"github.com/pilebones/go-udev/netlink"
|
"github.com/pilebones/go-udev/netlink"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
udevRecvBufSize = 8 * 1024 * 1024
|
||||||
|
udevMaxRetries = 5
|
||||||
|
udevBaseDelay = 2 * time.Second
|
||||||
|
udevMaxDelay = 60 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
type UdevMonitor struct {
|
type UdevMonitor struct {
|
||||||
stop chan struct{}
|
stop chan struct{}
|
||||||
rescanMutex sync.Mutex
|
rescanMutex sync.Mutex
|
||||||
@@ -29,13 +37,6 @@ func NewUdevMonitor(manager *Manager) *UdevMonitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (m *UdevMonitor) run(manager *Manager) {
|
func (m *UdevMonitor) run(manager *Manager) {
|
||||||
conn := &netlink.UEventConn{}
|
|
||||||
if err := conn.Connect(netlink.UdevEvent); err != nil {
|
|
||||||
log.Errorf("Failed to connect to udev netlink: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer conn.Close()
|
|
||||||
|
|
||||||
matcher := &netlink.RuleDefinitions{
|
matcher := &netlink.RuleDefinitions{
|
||||||
Rules: []netlink.RuleDefinition{
|
Rules: []netlink.RuleDefinition{
|
||||||
{Env: map[string]string{"SUBSYSTEM": "backlight"}},
|
{Env: map[string]string{"SUBSYSTEM": "backlight"}},
|
||||||
@@ -48,6 +49,46 @@ func (m *UdevMonitor) run(manager *Manager) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
failures := 0
|
||||||
|
for {
|
||||||
|
if err := m.monitorLoop(manager, matcher); err != nil {
|
||||||
|
log.Errorf("Udev monitor error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-m.stop:
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
|
||||||
|
failures++
|
||||||
|
if failures > udevMaxRetries {
|
||||||
|
log.Errorf("Udev monitor exceeded %d retries, giving up", udevMaxRetries)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
delay := min(udevBaseDelay*time.Duration(1<<(failures-1)), udevMaxDelay)
|
||||||
|
log.Infof("Udev monitor reconnecting in %v (attempt %d/%d)", delay, failures, udevMaxRetries)
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-m.stop:
|
||||||
|
return
|
||||||
|
case <-time.After(delay):
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *UdevMonitor) monitorLoop(manager *Manager, matcher *netlink.RuleDefinitions) error {
|
||||||
|
conn := &netlink.UEventConn{}
|
||||||
|
if err := conn.Connect(netlink.UdevEvent); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer conn.Close()
|
||||||
|
|
||||||
|
if err := syscall.SetsockoptInt(conn.Fd, syscall.SOL_SOCKET, syscall.SO_RCVBUF, udevRecvBufSize); err != nil {
|
||||||
|
log.Warnf("Failed to set udev socket receive buffer: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
events := make(chan netlink.UEvent)
|
events := make(chan netlink.UEvent)
|
||||||
errs := make(chan error)
|
errs := make(chan error)
|
||||||
conn.Monitor(events, errs, matcher)
|
conn.Monitor(events, errs, matcher)
|
||||||
@@ -57,10 +98,9 @@ func (m *UdevMonitor) run(manager *Manager) {
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-m.stop:
|
case <-m.stop:
|
||||||
return
|
return nil
|
||||||
case err := <-errs:
|
case err := <-errs:
|
||||||
log.Errorf("Udev monitor error: %v", err)
|
return err
|
||||||
return
|
|
||||||
case event := <-events:
|
case event := <-events:
|
||||||
m.handleEvent(manager, event)
|
m.handleEvent(manager, event)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -232,8 +232,15 @@ func (m *Manager) setupDataDeviceSync() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prevOffer := m.currentOffer
|
||||||
m.currentOffer = offer
|
m.currentOffer = offer
|
||||||
|
|
||||||
|
if prevOffer != nil && prevOffer != offer {
|
||||||
|
m.offerMutex.Lock()
|
||||||
|
delete(m.offerMimeTypes, prevOffer)
|
||||||
|
m.offerMutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
m.offerMutex.RLock()
|
m.offerMutex.RLock()
|
||||||
mimes := m.offerMimeTypes[offer]
|
mimes := m.offerMimeTypes[offer]
|
||||||
m.offerMutex.RUnlock()
|
m.offerMutex.RUnlock()
|
||||||
@@ -587,20 +594,26 @@ func (m *Manager) uriListPreview(data []byte) (string, bool) {
|
|||||||
uris = strings.Split(text, "\n")
|
uris = strings.Split(text, "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(uris) > 1 {
|
||||||
|
return fmt.Sprintf("[[ %d files ]]", len(uris)), false
|
||||||
|
}
|
||||||
|
|
||||||
if len(uris) == 1 && strings.HasPrefix(uris[0], "file://") {
|
if len(uris) == 1 && strings.HasPrefix(uris[0], "file://") {
|
||||||
filePath := strings.TrimPrefix(uris[0], "file://")
|
filePath := strings.TrimPrefix(uris[0], "file://")
|
||||||
if info, err := os.Stat(filePath); err == nil && !info.IsDir() {
|
info, err := os.Stat(filePath)
|
||||||
|
if err != nil || info.IsDir() {
|
||||||
|
return m.textPreview(data), false
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := m.getConfig()
|
||||||
|
if info.Size() <= cfg.MaxEntrySize {
|
||||||
if imgData, err := os.ReadFile(filePath); err == nil {
|
if imgData, err := os.ReadFile(filePath); err == nil {
|
||||||
if config, imgFmt, err := image.DecodeConfig(bytes.NewReader(imgData)); err == nil {
|
if config, imgFmt, err := image.DecodeConfig(bytes.NewReader(imgData)); err == nil {
|
||||||
return fmt.Sprintf("[[ file %s %s %dx%d ]]", filepath.Base(filePath), imgFmt, config.Width, config.Height), true
|
return fmt.Sprintf("[[ file %s %s %dx%d ]]", filepath.Base(filePath), imgFmt, config.Width, config.Height), true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("[[ file %s ]]", filepath.Base(filePath)), false
|
|
||||||
}
|
}
|
||||||
}
|
return fmt.Sprintf("[[ file %s ]]", filepath.Base(filePath)), false
|
||||||
|
|
||||||
if len(uris) > 1 {
|
|
||||||
return fmt.Sprintf("[[ %d files ]]", len(uris)), false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return m.textPreview(data), false
|
return m.textPreview(data), false
|
||||||
@@ -623,6 +636,11 @@ func (m *Manager) tryReadImageFromURI(data []byte) ([]byte, string, bool) {
|
|||||||
return nil, "", false
|
return nil, "", false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cfg := m.getConfig()
|
||||||
|
if info.Size() > cfg.MaxEntrySize {
|
||||||
|
return nil, "", false
|
||||||
|
}
|
||||||
|
|
||||||
imgData, err := os.ReadFile(filePath)
|
imgData, err := os.ReadFile(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", false
|
return nil, "", false
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ package cups
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"os/exec"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -275,13 +277,42 @@ func (m *Manager) GetClasses() ([]PrinterClass, error) {
|
|||||||
return classes, nil
|
return classes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func createPrinterViaLpadmin(name, deviceURI, ppd, information, location string) error {
|
||||||
|
args := []string{"-p", name, "-E", "-v", deviceURI, "-m", ppd}
|
||||||
|
if information != "" {
|
||||||
|
args = append(args, "-D", information)
|
||||||
|
}
|
||||||
|
if location != "" {
|
||||||
|
args = append(args, "-L", location)
|
||||||
|
}
|
||||||
|
out, err := exec.Command("lpadmin", args...).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("lpadmin failed: %s: %w", strings.TrimSpace(string(out)), err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func deletePrinterViaLpadmin(name string) error {
|
||||||
|
out, err := exec.Command("lpadmin", "-x", name).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("lpadmin failed: %s: %w", strings.TrimSpace(string(out)), err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (m *Manager) CreatePrinter(name, deviceURI, ppd string, shared bool, errorPolicy, information, location string) error {
|
func (m *Manager) CreatePrinter(name, deviceURI, ppd string, shared bool, errorPolicy, information, location string) error {
|
||||||
usedPkHelper := false
|
usedPkHelper := false
|
||||||
|
|
||||||
err := m.client.CreatePrinter(name, deviceURI, ppd, shared, errorPolicy, information, location)
|
err := m.client.CreatePrinter(name, deviceURI, ppd, shared, errorPolicy, information, location)
|
||||||
if isAuthError(err) && m.pkHelper != nil {
|
if isAuthError(err) && m.pkHelper != nil {
|
||||||
if err = m.pkHelper.PrinterAdd(name, deviceURI, ppd, information, location); err != nil {
|
if err = m.pkHelper.PrinterAdd(name, deviceURI, ppd, information, location); err != nil {
|
||||||
return err
|
// pkHelper failed (e.g., no polkit agent), try lpadmin as last resort.
|
||||||
|
// lpadmin -E enables the printer, so no further setup needed.
|
||||||
|
if lpadminErr := createPrinterViaLpadmin(name, deviceURI, ppd, information, location); lpadminErr != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
m.RefreshState()
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
usedPkHelper = true
|
usedPkHelper = true
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
@@ -308,6 +339,12 @@ func (m *Manager) DeletePrinter(printerName string) error {
|
|||||||
err := m.client.DeletePrinter(printerName)
|
err := m.client.DeletePrinter(printerName)
|
||||||
if isAuthError(err) && m.pkHelper != nil {
|
if isAuthError(err) && m.pkHelper != nil {
|
||||||
err = m.pkHelper.PrinterDelete(printerName)
|
err = m.pkHelper.PrinterDelete(printerName)
|
||||||
|
if err != nil {
|
||||||
|
// pkHelper failed, try lpadmin as last resort
|
||||||
|
if lpadminErr := deletePrinterViaLpadmin(printerName); lpadminErr == nil {
|
||||||
|
err = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if err == nil {
|
if err == nil {
|
||||||
m.RefreshState()
|
m.RefreshState()
|
||||||
|
|||||||
@@ -70,6 +70,8 @@ func HandleRequest(conn net.Conn, req models.Request, manager *Manager) {
|
|||||||
handleRestartJob(conn, req, manager)
|
handleRestartJob(conn, req, manager)
|
||||||
case "cups.holdJob":
|
case "cups.holdJob":
|
||||||
handleHoldJob(conn, req, manager)
|
handleHoldJob(conn, req, manager)
|
||||||
|
case "cups.testConnection":
|
||||||
|
handleTestConnection(conn, req, manager)
|
||||||
default:
|
default:
|
||||||
models.RespondError(conn, req.ID, fmt.Sprintf("unknown method: %s", req.Method))
|
models.RespondError(conn, req.ID, fmt.Sprintf("unknown method: %s", req.Method))
|
||||||
}
|
}
|
||||||
@@ -464,3 +466,22 @@ func handleHoldJob(conn net.Conn, req models.Request, manager *Manager) {
|
|||||||
}
|
}
|
||||||
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "job held"})
|
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "job held"})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func handleTestConnection(conn net.Conn, req models.Request, manager *Manager) {
|
||||||
|
host, err := params.StringNonEmpty(req.Params, "host")
|
||||||
|
if err != nil {
|
||||||
|
models.RespondError(conn, req.ID, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
port := params.IntOpt(req.Params, "port", 631)
|
||||||
|
protocol := params.StringOpt(req.Params, "protocol", "ipp")
|
||||||
|
|
||||||
|
result, err := manager.TestRemotePrinter(host, port, protocol)
|
||||||
|
if err != nil {
|
||||||
|
models.RespondError(conn, req.ID, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
models.Respond(conn, req.ID, result)
|
||||||
|
}
|
||||||
|
|||||||
176
core/internal/server/cups/test_connection.go
Normal file
176
core/internal/server/cups/test_connection.go
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
package cups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/pkg/ipp"
|
||||||
|
)
|
||||||
|
|
||||||
|
var validProtocols = map[string]bool{
|
||||||
|
"ipp": true,
|
||||||
|
"ipps": true,
|
||||||
|
"lpd": true,
|
||||||
|
"socket": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTestConnectionParams(host string, port int, protocol string) error {
|
||||||
|
if host == "" {
|
||||||
|
return errors.New("host is required")
|
||||||
|
}
|
||||||
|
if strings.ContainsAny(host, " \t\n\r/\\") {
|
||||||
|
return errors.New("host contains invalid characters")
|
||||||
|
}
|
||||||
|
if port < 1 || port > 65535 {
|
||||||
|
return errors.New("port must be between 1 and 65535")
|
||||||
|
}
|
||||||
|
if protocol != "" && !validProtocols[protocol] {
|
||||||
|
return errors.New("protocol must be one of: ipp, ipps, lpd, socket")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const probeTimeout = 10 * time.Second
|
||||||
|
|
||||||
|
func probeRemotePrinter(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
addr := net.JoinHostPort(host, fmt.Sprintf("%d", port))
|
||||||
|
|
||||||
|
// Fast fail: TCP reachability check
|
||||||
|
conn, err := net.DialTimeout("tcp", addr, probeTimeout)
|
||||||
|
if err != nil {
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: false,
|
||||||
|
Error: fmt.Sprintf("cannot reach %s: %s", addr, err.Error()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
conn.Close()
|
||||||
|
|
||||||
|
// Create a temporary IPP client pointing at the remote host.
|
||||||
|
// The TCP dial above provides fast-fail for unreachable hosts.
|
||||||
|
// The IPP adapter's ResponseHeaderTimeout (90s) bounds stalling servers.
|
||||||
|
client := ipp.NewIPPClient(host, port, "", "", useTLS)
|
||||||
|
|
||||||
|
// Try /ipp/print first (modern driverless printers), then / (legacy)
|
||||||
|
info, err := probeIPPEndpoint(client, host, port, useTLS, "/ipp/print")
|
||||||
|
if err != nil {
|
||||||
|
// If we got an auth error, the printer exists but requires credentials.
|
||||||
|
// Report it as reachable with the URI that triggered the auth challenge.
|
||||||
|
if isAuthError(err) {
|
||||||
|
proto := "ipp"
|
||||||
|
if useTLS {
|
||||||
|
proto = "ipps"
|
||||||
|
}
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
URI: fmt.Sprintf("%s://%s:%d/ipp/print", proto, host, port),
|
||||||
|
Info: "authentication required",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
info, err = probeIPPEndpoint(client, host, port, useTLS, "/")
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
if isAuthError(err) {
|
||||||
|
proto := "ipp"
|
||||||
|
if useTLS {
|
||||||
|
proto = "ipps"
|
||||||
|
}
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
URI: fmt.Sprintf("%s://%s:%d/", proto, host, port),
|
||||||
|
Info: "authentication required",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
// TCP reachable but not an IPP printer
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
Error: fmt.Sprintf("host is reachable but does not appear to be an IPP printer: %s", err.Error()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func probeIPPEndpoint(client *ipp.IPPClient, host string, port int, useTLS bool, resourcePath string) (*RemotePrinterInfo, error) {
|
||||||
|
proto := "ipp"
|
||||||
|
if useTLS {
|
||||||
|
proto = "ipps"
|
||||||
|
}
|
||||||
|
printerURI := fmt.Sprintf("%s://%s:%d%s", proto, host, port, resourcePath)
|
||||||
|
|
||||||
|
httpProto := "http"
|
||||||
|
if useTLS {
|
||||||
|
httpProto = "https"
|
||||||
|
}
|
||||||
|
httpURL := fmt.Sprintf("%s://%s:%d%s", httpProto, host, port, resourcePath)
|
||||||
|
|
||||||
|
req := ipp.NewRequest(ipp.OperationGetPrinterAttributes, 1)
|
||||||
|
req.OperationAttributes[ipp.AttributePrinterURI] = printerURI
|
||||||
|
req.OperationAttributes[ipp.AttributeRequestedAttributes] = []string{
|
||||||
|
ipp.AttributePrinterName,
|
||||||
|
ipp.AttributePrinterMakeAndModel,
|
||||||
|
ipp.AttributePrinterState,
|
||||||
|
ipp.AttributePrinterInfo,
|
||||||
|
ipp.AttributePrinterUriSupported,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.SendRequest(httpURL, req, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.PrinterAttributes) == 0 {
|
||||||
|
return nil, errors.New("no printer attributes returned")
|
||||||
|
}
|
||||||
|
|
||||||
|
attrs := resp.PrinterAttributes[0]
|
||||||
|
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
MakeModel: getStringAttr(attrs, ipp.AttributePrinterMakeAndModel),
|
||||||
|
Name: getStringAttr(attrs, ipp.AttributePrinterName),
|
||||||
|
Info: getStringAttr(attrs, ipp.AttributePrinterInfo),
|
||||||
|
State: parsePrinterState(attrs),
|
||||||
|
URI: printerURI,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestRemotePrinter validates inputs and probes a remote printer via IPP.
|
||||||
|
// For lpd/socket protocols, only TCP reachability is tested.
|
||||||
|
func (m *Manager) TestRemotePrinter(host string, port int, protocol string) (*RemotePrinterInfo, error) {
|
||||||
|
if protocol == "" {
|
||||||
|
protocol = "ipp"
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := validateTestConnectionParams(host, port, protocol); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// For non-IPP protocols, only check TCP reachability
|
||||||
|
if protocol == "lpd" || protocol == "socket" {
|
||||||
|
addr := net.JoinHostPort(host, fmt.Sprintf("%d", port))
|
||||||
|
conn, err := net.DialTimeout("tcp", addr, probeTimeout)
|
||||||
|
if err != nil {
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: false,
|
||||||
|
Error: fmt.Sprintf("cannot reach %s: %s", addr, err.Error()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
conn.Close()
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
URI: fmt.Sprintf("%s://%s:%d", protocol, host, port),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
useTLS := protocol == "ipps"
|
||||||
|
|
||||||
|
probeFn := m.probeRemoteFn
|
||||||
|
if probeFn == nil {
|
||||||
|
probeFn = probeRemotePrinter
|
||||||
|
}
|
||||||
|
|
||||||
|
return probeFn(host, port, useTLS)
|
||||||
|
}
|
||||||
397
core/internal/server/cups/test_connection_test.go
Normal file
397
core/internal/server/cups/test_connection_test.go
Normal file
@@ -0,0 +1,397 @@
|
|||||||
|
package cups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/pkg/ipp"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestValidateTestConnectionParams(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
host string
|
||||||
|
port int
|
||||||
|
protocol string
|
||||||
|
wantErr string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid ipp",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid ipps",
|
||||||
|
host: "printer.local",
|
||||||
|
port: 443,
|
||||||
|
protocol: "ipps",
|
||||||
|
wantErr: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid lpd",
|
||||||
|
host: "10.0.0.1",
|
||||||
|
port: 515,
|
||||||
|
protocol: "lpd",
|
||||||
|
wantErr: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid socket",
|
||||||
|
host: "10.0.0.1",
|
||||||
|
port: 9100,
|
||||||
|
protocol: "socket",
|
||||||
|
wantErr: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty host",
|
||||||
|
host: "",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "host is required",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "port too low",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 0,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "port must be between 1 and 65535",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "port too high",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 70000,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "port must be between 1 and 65535",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid protocol",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ftp",
|
||||||
|
wantErr: "protocol must be one of: ipp, ipps, lpd, socket",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty protocol treated as ipp",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 631,
|
||||||
|
protocol: "",
|
||||||
|
wantErr: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "host with slash",
|
||||||
|
host: "192.168.0.5/admin",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "host contains invalid characters",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "host with space",
|
||||||
|
host: "192.168.0.5 ",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "host contains invalid characters",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "host with newline",
|
||||||
|
host: "192.168.0.5\n",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "host contains invalid characters",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := validateTestConnectionParams(tt.host, tt.port, tt.protocol)
|
||||||
|
if tt.wantErr == "" {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
} else {
|
||||||
|
assert.EqualError(t, err, tt.wantErr)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestManager_TestRemotePrinter_Validation(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
host string
|
||||||
|
port int
|
||||||
|
protocol string
|
||||||
|
wantErr string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "empty host returns error",
|
||||||
|
host: "",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "host is required",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid port returns error",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 0,
|
||||||
|
protocol: "ipp",
|
||||||
|
wantErr: "port must be between 1 and 65535",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid protocol returns error",
|
||||||
|
host: "192.168.0.5",
|
||||||
|
port: 631,
|
||||||
|
protocol: "ftp",
|
||||||
|
wantErr: "protocol must be one of: ipp, ipps, lpd, socket",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
_, err := m.TestRemotePrinter(tt.host, tt.port, tt.protocol)
|
||||||
|
assert.EqualError(t, err, tt.wantErr)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestManager_TestRemotePrinter_IPP(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
protocol string
|
||||||
|
probeRet *RemotePrinterInfo
|
||||||
|
probeErr error
|
||||||
|
wantTLS bool
|
||||||
|
wantReach bool
|
||||||
|
wantModel string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "successful ipp probe",
|
||||||
|
protocol: "ipp",
|
||||||
|
probeRet: &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
MakeModel: "HP OfficeJet 8010",
|
||||||
|
Name: "OfficeJet",
|
||||||
|
State: "idle",
|
||||||
|
URI: "ipp://192.168.0.5:631/ipp/print",
|
||||||
|
},
|
||||||
|
wantTLS: false,
|
||||||
|
wantReach: true,
|
||||||
|
wantModel: "HP OfficeJet 8010",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "successful ipps probe",
|
||||||
|
protocol: "ipps",
|
||||||
|
probeRet: &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
MakeModel: "HP OfficeJet 8010",
|
||||||
|
URI: "ipps://192.168.0.5:631/ipp/print",
|
||||||
|
},
|
||||||
|
wantTLS: true,
|
||||||
|
wantReach: true,
|
||||||
|
wantModel: "HP OfficeJet 8010",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unreachable host",
|
||||||
|
protocol: "ipp",
|
||||||
|
probeRet: &RemotePrinterInfo{
|
||||||
|
Reachable: false,
|
||||||
|
Error: "cannot reach 192.168.0.5:631: connection refused",
|
||||||
|
},
|
||||||
|
wantReach: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty protocol defaults to ipp",
|
||||||
|
protocol: "",
|
||||||
|
probeRet: &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
MakeModel: "Test Printer",
|
||||||
|
},
|
||||||
|
wantTLS: false,
|
||||||
|
wantReach: true,
|
||||||
|
wantModel: "Test Printer",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
var capturedTLS bool
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
capturedTLS = useTLS
|
||||||
|
return tt.probeRet, tt.probeErr
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := m.TestRemotePrinter("192.168.0.5", 631, tt.protocol)
|
||||||
|
if tt.probeErr != nil {
|
||||||
|
assert.Error(t, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, tt.wantReach, result.Reachable)
|
||||||
|
assert.Equal(t, tt.wantModel, result.MakeModel)
|
||||||
|
assert.Equal(t, tt.wantTLS, capturedTLS)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestManager_TestRemotePrinter_AuthRequired(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
// Simulate what happens when the printer returns HTTP 401
|
||||||
|
return probeRemotePrinterWithAuthError(host, port, useTLS)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := m.TestRemotePrinter("192.168.0.107", 631, "ipp")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, result.Reachable)
|
||||||
|
assert.Equal(t, "authentication required", result.Info)
|
||||||
|
assert.Contains(t, result.URI, "ipp://192.168.0.107:631")
|
||||||
|
}
|
||||||
|
|
||||||
|
// probeRemotePrinterWithAuthError simulates a probe where the printer
|
||||||
|
// returns HTTP 401 on both endpoints.
|
||||||
|
func probeRemotePrinterWithAuthError(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
// This simulates what probeRemotePrinter does when both endpoints
|
||||||
|
// return auth errors. We test the auth detection logic directly.
|
||||||
|
err := ipp.HTTPError{Code: 401}
|
||||||
|
if isAuthError(err) {
|
||||||
|
proto := "ipp"
|
||||||
|
if useTLS {
|
||||||
|
proto = "ipps"
|
||||||
|
}
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
URI: fmt.Sprintf("%s://%s:%d/ipp/print", proto, host, port),
|
||||||
|
Info: "authentication required",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestManager_TestRemotePrinter_NonIPPProtocol(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
probeCalled := false
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
probeCalled = true
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// These will fail at TCP dial (no real server), but the important
|
||||||
|
// thing is that probeRemoteFn is NOT called for lpd/socket.
|
||||||
|
m.TestRemotePrinter("192.168.0.5", 9100, "socket")
|
||||||
|
assert.False(t, probeCalled, "probe function should not be called for socket protocol")
|
||||||
|
|
||||||
|
m.TestRemotePrinter("192.168.0.5", 515, "lpd")
|
||||||
|
assert.False(t, probeCalled, "probe function should not be called for lpd protocol")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleTestConnection_Success(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
return &RemotePrinterInfo{
|
||||||
|
Reachable: true,
|
||||||
|
MakeModel: "HP OfficeJet 8010",
|
||||||
|
Name: "OfficeJet",
|
||||||
|
State: "idle",
|
||||||
|
URI: "ipp://192.168.0.5:631/ipp/print",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
conn := &mockConn{Buffer: buf}
|
||||||
|
|
||||||
|
req := models.Request{
|
||||||
|
ID: 1,
|
||||||
|
Method: "cups.testConnection",
|
||||||
|
Params: map[string]any{
|
||||||
|
"host": "192.168.0.5",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
handleTestConnection(conn, req, m)
|
||||||
|
|
||||||
|
var resp models.Response[RemotePrinterInfo]
|
||||||
|
err := json.NewDecoder(buf).Decode(&resp)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, resp.Result)
|
||||||
|
assert.True(t, resp.Result.Reachable)
|
||||||
|
assert.Equal(t, "HP OfficeJet 8010", resp.Result.MakeModel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleTestConnection_MissingHost(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
conn := &mockConn{Buffer: buf}
|
||||||
|
|
||||||
|
req := models.Request{
|
||||||
|
ID: 1,
|
||||||
|
Method: "cups.testConnection",
|
||||||
|
Params: map[string]any{},
|
||||||
|
}
|
||||||
|
|
||||||
|
handleTestConnection(conn, req, m)
|
||||||
|
|
||||||
|
var resp models.Response[any]
|
||||||
|
err := json.NewDecoder(buf).Decode(&resp)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Nil(t, resp.Result)
|
||||||
|
assert.NotNil(t, resp.Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleTestConnection_CustomPortAndProtocol(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
assert.Equal(t, 9631, port)
|
||||||
|
assert.True(t, useTLS)
|
||||||
|
return &RemotePrinterInfo{Reachable: true, URI: "ipps://192.168.0.5:9631/ipp/print"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
conn := &mockConn{Buffer: buf}
|
||||||
|
|
||||||
|
req := models.Request{
|
||||||
|
ID: 1,
|
||||||
|
Method: "cups.testConnection",
|
||||||
|
Params: map[string]any{
|
||||||
|
"host": "192.168.0.5",
|
||||||
|
"port": float64(9631),
|
||||||
|
"protocol": "ipps",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
handleTestConnection(conn, req, m)
|
||||||
|
|
||||||
|
var resp models.Response[RemotePrinterInfo]
|
||||||
|
err := json.NewDecoder(buf).Decode(&resp)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, resp.Result)
|
||||||
|
assert.True(t, resp.Result.Reachable)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHandleRequest_TestConnection(t *testing.T) {
|
||||||
|
m := NewTestManager(nil, nil)
|
||||||
|
m.probeRemoteFn = func(host string, port int, useTLS bool) (*RemotePrinterInfo, error) {
|
||||||
|
return &RemotePrinterInfo{Reachable: true}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
conn := &mockConn{Buffer: buf}
|
||||||
|
|
||||||
|
req := models.Request{
|
||||||
|
ID: 1,
|
||||||
|
Method: "cups.testConnection",
|
||||||
|
Params: map[string]any{"host": "192.168.0.5"},
|
||||||
|
}
|
||||||
|
|
||||||
|
HandleRequest(conn, req, m)
|
||||||
|
|
||||||
|
var resp models.Response[RemotePrinterInfo]
|
||||||
|
err := json.NewDecoder(buf).Decode(&resp)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, resp.Result)
|
||||||
|
assert.True(t, resp.Result.Reachable)
|
||||||
|
}
|
||||||
@@ -55,6 +55,16 @@ type PPD struct {
|
|||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type RemotePrinterInfo struct {
|
||||||
|
Reachable bool `json:"reachable"`
|
||||||
|
MakeModel string `json:"makeModel"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Info string `json:"info"`
|
||||||
|
State string `json:"state"`
|
||||||
|
URI string `json:"uri"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type PrinterClass struct {
|
type PrinterClass struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
URI string `json:"uri"`
|
URI string `json:"uri"`
|
||||||
@@ -77,6 +87,7 @@ type Manager struct {
|
|||||||
notifierWg sync.WaitGroup
|
notifierWg sync.WaitGroup
|
||||||
lastNotifiedState *CUPSState
|
lastNotifiedState *CUPSState
|
||||||
baseURL string
|
baseURL string
|
||||||
|
probeRemoteFn func(host string, port int, useTLS bool) (*RemotePrinterInfo, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type SubscriptionManagerInterface interface {
|
type SubscriptionManagerInterface interface {
|
||||||
|
|||||||
@@ -16,4 +16,8 @@ const (
|
|||||||
dbusScreensaverPath = "/ScreenSaver"
|
dbusScreensaverPath = "/ScreenSaver"
|
||||||
dbusScreensaverPath2 = "/org/freedesktop/ScreenSaver"
|
dbusScreensaverPath2 = "/org/freedesktop/ScreenSaver"
|
||||||
dbusScreensaverInterface = "org.freedesktop.ScreenSaver"
|
dbusScreensaverInterface = "org.freedesktop.ScreenSaver"
|
||||||
|
|
||||||
|
dbusGnomeScreensaverName = "org.gnome.ScreenSaver"
|
||||||
|
dbusGnomeScreensaverPath = "/org/gnome/ScreenSaver"
|
||||||
|
dbusGnomeScreensaverInterface = "org.gnome.ScreenSaver"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -191,6 +191,12 @@ func (m *Manager) Close() {
|
|||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
|
|
||||||
|
m.screensaverSubscribers.Range(func(key string, ch chan ScreensaverState) bool {
|
||||||
|
close(ch)
|
||||||
|
m.screensaverSubscribers.Delete(key)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
if m.systemConn != nil {
|
if m.systemConn != nil {
|
||||||
m.systemConn.Close()
|
m.systemConn.Close()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package freedesktop
|
package freedesktop
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
@@ -15,45 +16,9 @@ type screensaverHandler struct {
|
|||||||
manager *Manager
|
manager *Manager
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Manager) initializeScreensaver() error {
|
func screensaverIntrospectIface(ifaceName string) introspect.Interface {
|
||||||
if m.sessionConn == nil {
|
return introspect.Interface{
|
||||||
m.stateMutex.Lock()
|
Name: ifaceName,
|
||||||
m.state.Screensaver.Available = false
|
|
||||||
m.stateMutex.Unlock()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
reply, err := m.sessionConn.RequestName(dbusScreensaverName, dbus.NameFlagDoNotQueue)
|
|
||||||
if err != nil {
|
|
||||||
log.Warnf("Failed to request screensaver name: %v", err)
|
|
||||||
m.stateMutex.Lock()
|
|
||||||
m.state.Screensaver.Available = false
|
|
||||||
m.stateMutex.Unlock()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if reply != dbus.RequestNameReplyPrimaryOwner {
|
|
||||||
log.Warnf("Screensaver name already owned by another process")
|
|
||||||
m.stateMutex.Lock()
|
|
||||||
m.state.Screensaver.Available = false
|
|
||||||
m.stateMutex.Unlock()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
handler := &screensaverHandler{manager: m}
|
|
||||||
|
|
||||||
if err := m.sessionConn.Export(handler, dbusScreensaverPath, dbusScreensaverInterface); err != nil {
|
|
||||||
log.Warnf("Failed to export screensaver on %s: %v", dbusScreensaverPath, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := m.sessionConn.Export(handler, dbusScreensaverPath2, dbusScreensaverInterface); err != nil {
|
|
||||||
log.Warnf("Failed to export screensaver on %s: %v", dbusScreensaverPath2, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
screensaverIface := introspect.Interface{
|
|
||||||
Name: dbusScreensaverInterface,
|
|
||||||
Methods: []introspect.Method{
|
Methods: []introspect.Method{
|
||||||
{
|
{
|
||||||
Name: "Inhibit",
|
Name: "Inhibit",
|
||||||
@@ -69,40 +34,106 @@ func (m *Manager) initializeScreensaver() error {
|
|||||||
{Name: "cookie", Type: "u", Direction: "in"},
|
{Name: "cookie", Type: "u", Direction: "in"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "GetActive",
|
||||||
|
Args: []introspect.Arg{
|
||||||
|
{Name: "active", Type: "b", Direction: "out"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "SetActive",
|
||||||
|
Args: []introspect.Arg{
|
||||||
|
{Name: "active", Type: "b", Direction: "in"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Lock",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
Signals: []introspect.Signal{
|
||||||
|
{
|
||||||
|
Name: "ActiveChanged",
|
||||||
|
Args: []introspect.Arg{
|
||||||
|
{Name: "new_value", Type: "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) initializeScreensaver() error {
|
||||||
|
if m.sessionConn == nil {
|
||||||
|
m.stateMutex.Lock()
|
||||||
|
m.state.Screensaver.Available = false
|
||||||
|
m.stateMutex.Unlock()
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
introNode := &introspect.Node{
|
handler := &screensaverHandler{manager: m}
|
||||||
Name: dbusScreensaverPath,
|
|
||||||
Interfaces: []introspect.Interface{
|
|
||||||
introspect.IntrospectData,
|
|
||||||
screensaverIface,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if err := m.sessionConn.Export(introspect.NewIntrospectable(introNode), dbusScreensaverPath, "org.freedesktop.DBus.Introspectable"); err != nil {
|
|
||||||
log.Warnf("Failed to export introspectable on %s: %v", dbusScreensaverPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
introNode2 := &introspect.Node{
|
m.screensaverFreedesktopClaimed = m.claimScreensaverName(handler,
|
||||||
Name: dbusScreensaverPath2,
|
dbusScreensaverName, dbusScreensaverInterface, dbusScreensaverPath, dbusScreensaverPath2)
|
||||||
Interfaces: []introspect.Interface{
|
m.screensaverGnomeClaimed = m.claimScreensaverName(handler,
|
||||||
introspect.IntrospectData,
|
dbusGnomeScreensaverName, dbusGnomeScreensaverInterface, dbusGnomeScreensaverPath)
|
||||||
screensaverIface,
|
|
||||||
},
|
if !m.screensaverFreedesktopClaimed && !m.screensaverGnomeClaimed {
|
||||||
}
|
log.Warn("No screensaver interface could be claimed")
|
||||||
if err := m.sessionConn.Export(introspect.NewIntrospectable(introNode2), dbusScreensaverPath2, "org.freedesktop.DBus.Introspectable"); err != nil {
|
m.stateMutex.Lock()
|
||||||
log.Warnf("Failed to export introspectable on %s: %v", dbusScreensaverPath2, err)
|
m.state.Screensaver.Available = false
|
||||||
|
m.stateMutex.Unlock()
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
go m.watchPeerDisconnects()
|
go m.watchPeerDisconnects()
|
||||||
|
|
||||||
m.stateMutex.Lock()
|
m.stateMutex.Lock()
|
||||||
m.state.Screensaver.Available = true
|
m.state.Screensaver.Available = true
|
||||||
|
m.state.Screensaver.Active = false
|
||||||
m.state.Screensaver.Inhibited = false
|
m.state.Screensaver.Inhibited = false
|
||||||
m.state.Screensaver.Inhibitors = []ScreensaverInhibitor{}
|
m.state.Screensaver.Inhibitors = []ScreensaverInhibitor{}
|
||||||
m.stateMutex.Unlock()
|
m.stateMutex.Unlock()
|
||||||
|
|
||||||
log.Info("Screensaver inhibit listener initialized")
|
log.Info("Screensaver listener initialized")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) claimScreensaverName(handler *screensaverHandler, name, iface string, paths ...dbus.ObjectPath) bool {
|
||||||
|
reply, err := m.sessionConn.RequestName(name, dbus.NameFlagDoNotQueue)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("Failed to request screensaver name %s: %v", name, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if reply != dbus.RequestNameReplyPrimaryOwner {
|
||||||
|
log.Warnf("Screensaver name %s already owned by another process", name)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if err := m.exportScreensaverOnPaths(handler, iface, paths...); err != nil {
|
||||||
|
log.Warnf("Failed to export screensaver on %s: %v", name, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
log.Infof("Claimed %s on session bus", name)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// exportScreensaverOnPaths exports the handler and introspection on the given
|
||||||
|
// paths under the specified interface name.
|
||||||
|
func (m *Manager) exportScreensaverOnPaths(handler *screensaverHandler, ifaceName string, paths ...dbus.ObjectPath) error {
|
||||||
|
iface := screensaverIntrospectIface(ifaceName)
|
||||||
|
for _, path := range paths {
|
||||||
|
if err := m.sessionConn.Export(handler, path, ifaceName); err != nil {
|
||||||
|
return fmt.Errorf("export handler on %s: %w", path, err)
|
||||||
|
}
|
||||||
|
node := &introspect.Node{
|
||||||
|
Name: string(path),
|
||||||
|
Interfaces: []introspect.Interface{
|
||||||
|
introspect.IntrospectData,
|
||||||
|
iface,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if err := m.sessionConn.Export(introspect.NewIntrospectable(node), path, "org.freedesktop.DBus.Introspectable"); err != nil {
|
||||||
|
log.Warnf("Failed to export introspectable on %s: %v", path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -268,3 +299,51 @@ func (m *Manager) NotifyScreensaverSubscribers() {
|
|||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *screensaverHandler) GetActive() (bool, *dbus.Error) {
|
||||||
|
h.manager.stateMutex.RLock()
|
||||||
|
active := h.manager.state.Screensaver.Active
|
||||||
|
h.manager.stateMutex.RUnlock()
|
||||||
|
return active, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *screensaverHandler) SetActive(active bool) *dbus.Error {
|
||||||
|
h.manager.SetScreenLockActive(active)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *screensaverHandler) Lock() *dbus.Error {
|
||||||
|
h.manager.SetScreenLockActive(true)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) SetScreenLockActive(active bool) {
|
||||||
|
m.stateMutex.Lock()
|
||||||
|
changed := m.state.Screensaver.Active != active
|
||||||
|
m.state.Screensaver.Active = active
|
||||||
|
m.stateMutex.Unlock()
|
||||||
|
|
||||||
|
if !changed {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Infof("Screen lock active changed: %v", active)
|
||||||
|
defer m.NotifyScreensaverSubscribers()
|
||||||
|
|
||||||
|
if m.sessionConn == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if m.screensaverFreedesktopClaimed {
|
||||||
|
if err := m.sessionConn.Emit(dbusScreensaverPath, dbusScreensaverInterface+".ActiveChanged", active); err != nil {
|
||||||
|
log.Warnf("Failed to emit ActiveChanged on %s: %v", dbusScreensaverPath, err)
|
||||||
|
}
|
||||||
|
if err := m.sessionConn.Emit(dbusScreensaverPath2, dbusScreensaverInterface+".ActiveChanged", active); err != nil {
|
||||||
|
log.Warnf("Failed to emit ActiveChanged on %s: %v", dbusScreensaverPath2, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if m.screensaverGnomeClaimed {
|
||||||
|
if err := m.sessionConn.Emit(dbusGnomeScreensaverPath, dbusGnomeScreensaverInterface+".ActiveChanged", active); err != nil {
|
||||||
|
log.Warnf("Failed to emit ActiveChanged on %s: %v", dbusGnomeScreensaverPath, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
102
core/internal/server/freedesktop/screensaver_test.go
Normal file
102
core/internal/server/freedesktop/screensaver_test.go
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
package freedesktop
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSetScreenLockActive_ChangesState(t *testing.T) {
|
||||||
|
manager := &Manager{
|
||||||
|
state: &FreedeskState{
|
||||||
|
Screensaver: ScreensaverState{Available: true},
|
||||||
|
},
|
||||||
|
stateMutex: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.False(t, manager.GetScreensaverState().Active)
|
||||||
|
|
||||||
|
manager.SetScreenLockActive(true)
|
||||||
|
assert.True(t, manager.GetScreensaverState().Active)
|
||||||
|
|
||||||
|
manager.SetScreenLockActive(false)
|
||||||
|
assert.False(t, manager.GetScreensaverState().Active)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSetScreenLockActive_NoChangeNoDuplicate(t *testing.T) {
|
||||||
|
ch := make(chan ScreensaverState, 64)
|
||||||
|
manager := &Manager{
|
||||||
|
state: &FreedeskState{
|
||||||
|
Screensaver: ScreensaverState{Available: true, Active: false},
|
||||||
|
},
|
||||||
|
stateMutex: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
manager.screensaverSubscribers.Store("test", ch)
|
||||||
|
defer manager.screensaverSubscribers.Delete("test")
|
||||||
|
|
||||||
|
// Setting to same value should not notify
|
||||||
|
manager.SetScreenLockActive(false)
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-ch:
|
||||||
|
t.Fatal("should not have received notification for no-change")
|
||||||
|
case <-time.After(50 * time.Millisecond):
|
||||||
|
// Expected: no notification
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSetScreenLockActive_NotifiesSubscribers(t *testing.T) {
|
||||||
|
ch := make(chan ScreensaverState, 64)
|
||||||
|
manager := &Manager{
|
||||||
|
state: &FreedeskState{
|
||||||
|
Screensaver: ScreensaverState{Available: true, Active: false},
|
||||||
|
},
|
||||||
|
stateMutex: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
manager.screensaverSubscribers.Store("test", ch)
|
||||||
|
defer manager.screensaverSubscribers.Delete("test")
|
||||||
|
|
||||||
|
manager.SetScreenLockActive(true)
|
||||||
|
|
||||||
|
select {
|
||||||
|
case state := <-ch:
|
||||||
|
assert.True(t, state.Active)
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
t.Fatal("timeout waiting for subscriber notification")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSetScreenLockActive_NilSessionConn(t *testing.T) {
|
||||||
|
manager := &Manager{
|
||||||
|
state: &FreedeskState{
|
||||||
|
Screensaver: ScreensaverState{Available: true},
|
||||||
|
},
|
||||||
|
stateMutex: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.NotPanics(t, func() {
|
||||||
|
manager.SetScreenLockActive(true)
|
||||||
|
})
|
||||||
|
assert.True(t, manager.GetScreensaverState().Active)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetActive_ReturnsCurrentState(t *testing.T) {
|
||||||
|
manager := &Manager{
|
||||||
|
state: &FreedeskState{
|
||||||
|
Screensaver: ScreensaverState{Available: true, Active: true},
|
||||||
|
},
|
||||||
|
stateMutex: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
|
||||||
|
handler := &screensaverHandler{manager: manager}
|
||||||
|
active, dbusErr := handler.GetActive()
|
||||||
|
assert.Nil(t, dbusErr)
|
||||||
|
assert.True(t, active)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestScreensaverState_ActiveDefaultsFalse(t *testing.T) {
|
||||||
|
state := ScreensaverState{}
|
||||||
|
assert.False(t, state.Active)
|
||||||
|
}
|
||||||
@@ -39,6 +39,7 @@ type ScreensaverInhibitor struct {
|
|||||||
|
|
||||||
type ScreensaverState struct {
|
type ScreensaverState struct {
|
||||||
Available bool `json:"available"`
|
Available bool `json:"available"`
|
||||||
|
Active bool `json:"active"`
|
||||||
Inhibited bool `json:"inhibited"`
|
Inhibited bool `json:"inhibited"`
|
||||||
Inhibitors []ScreensaverInhibitor `json:"inhibitors"`
|
Inhibitors []ScreensaverInhibitor `json:"inhibitors"`
|
||||||
}
|
}
|
||||||
@@ -50,14 +51,16 @@ type FreedeskState struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
state *FreedeskState
|
state *FreedeskState
|
||||||
stateMutex sync.RWMutex
|
stateMutex sync.RWMutex
|
||||||
systemConn *dbus.Conn
|
systemConn *dbus.Conn
|
||||||
sessionConn *dbus.Conn
|
sessionConn *dbus.Conn
|
||||||
accountsObj dbus.BusObject
|
accountsObj dbus.BusObject
|
||||||
settingsObj dbus.BusObject
|
settingsObj dbus.BusObject
|
||||||
currentUID uint64
|
currentUID uint64
|
||||||
subscribers syncmap.Map[string, chan FreedeskState]
|
subscribers syncmap.Map[string, chan FreedeskState]
|
||||||
screensaverSubscribers syncmap.Map[string, chan ScreensaverState]
|
screensaverSubscribers syncmap.Map[string, chan ScreensaverState]
|
||||||
screensaverCookieCounter uint32
|
screensaverCookieCounter uint32
|
||||||
|
screensaverFreedesktopClaimed bool
|
||||||
|
screensaverGnomeClaimed bool
|
||||||
}
|
}
|
||||||
|
|||||||
61
core/internal/server/location/handlers.go
Normal file
61
core/internal/server/location/handlers.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
package location
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LocationEvent struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Data State `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func HandleRequest(conn net.Conn, req models.Request, manager *Manager) {
|
||||||
|
switch req.Method {
|
||||||
|
case "location.getState":
|
||||||
|
handleGetState(conn, req, manager)
|
||||||
|
case "location.subscribe":
|
||||||
|
handleSubscribe(conn, req, manager)
|
||||||
|
|
||||||
|
default:
|
||||||
|
models.RespondError(conn, req.ID, fmt.Sprintf("unknown method: %s", req.Method))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleGetState(conn net.Conn, req models.Request, manager *Manager) {
|
||||||
|
models.Respond(conn, req.ID, manager.GetState())
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleSubscribe(conn net.Conn, req models.Request, manager *Manager) {
|
||||||
|
clientID := fmt.Sprintf("client-%p", conn)
|
||||||
|
stateChan := manager.Subscribe(clientID)
|
||||||
|
defer manager.Unsubscribe(clientID)
|
||||||
|
|
||||||
|
initialState := manager.GetState()
|
||||||
|
event := LocationEvent{
|
||||||
|
Type: "state_changed",
|
||||||
|
Data: initialState,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := json.NewEncoder(conn).Encode(models.Response[LocationEvent]{
|
||||||
|
ID: req.ID,
|
||||||
|
Result: &event,
|
||||||
|
}); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for state := range stateChan {
|
||||||
|
event := LocationEvent{
|
||||||
|
Type: "state_changed",
|
||||||
|
Data: state,
|
||||||
|
}
|
||||||
|
if err := json.NewEncoder(conn).Encode(models.Response[LocationEvent]{
|
||||||
|
Result: &event,
|
||||||
|
}); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
175
core/internal/server/location/manager.go
Normal file
175
core/internal/server/location/manager.go
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
package location
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/geolocation"
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewManager(client geolocation.Client) (*Manager, error) {
|
||||||
|
currLocation, err := client.GetLocation()
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("Failed to get initial location: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
m := &Manager{
|
||||||
|
client: client,
|
||||||
|
dirty: make(chan struct{}),
|
||||||
|
stopChan: make(chan struct{}),
|
||||||
|
|
||||||
|
state: &State{
|
||||||
|
Latitude: currLocation.Latitude,
|
||||||
|
Longitude: currLocation.Longitude,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := m.startSignalPump(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
m.notifierWg.Add(1)
|
||||||
|
go m.notifier()
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) Close() {
|
||||||
|
close(m.stopChan)
|
||||||
|
m.notifierWg.Wait()
|
||||||
|
|
||||||
|
m.sigWG.Wait()
|
||||||
|
|
||||||
|
m.subscribers.Range(func(key string, ch chan State) bool {
|
||||||
|
close(ch)
|
||||||
|
m.subscribers.Delete(key)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) Subscribe(id string) chan State {
|
||||||
|
ch := make(chan State, 64)
|
||||||
|
m.subscribers.Store(id, ch)
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) Unsubscribe(id string) {
|
||||||
|
if ch, ok := m.subscribers.LoadAndDelete(id); ok {
|
||||||
|
close(ch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) startSignalPump() error {
|
||||||
|
m.sigWG.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer m.sigWG.Done()
|
||||||
|
|
||||||
|
subscription := m.client.Subscribe("locationManager")
|
||||||
|
defer m.client.Unsubscribe("locationManager")
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-m.stopChan:
|
||||||
|
return
|
||||||
|
case location, ok := <-subscription:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
m.handleLocationChange(location)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) handleLocationChange(location geolocation.Location) {
|
||||||
|
m.stateMutex.Lock()
|
||||||
|
defer m.stateMutex.Unlock()
|
||||||
|
|
||||||
|
m.state.Latitude = location.Latitude
|
||||||
|
m.state.Longitude = location.Longitude
|
||||||
|
|
||||||
|
m.notifySubscribers()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) notifySubscribers() {
|
||||||
|
select {
|
||||||
|
case m.dirty <- struct{}{}:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) GetState() State {
|
||||||
|
m.stateMutex.RLock()
|
||||||
|
defer m.stateMutex.RUnlock()
|
||||||
|
if m.state == nil {
|
||||||
|
return State{
|
||||||
|
Latitude: 0.0,
|
||||||
|
Longitude: 0.0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stateCopy := *m.state
|
||||||
|
return stateCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) notifier() {
|
||||||
|
defer m.notifierWg.Done()
|
||||||
|
const minGap = 200 * time.Millisecond
|
||||||
|
timer := time.NewTimer(minGap)
|
||||||
|
timer.Stop()
|
||||||
|
var pending bool
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-m.stopChan:
|
||||||
|
timer.Stop()
|
||||||
|
return
|
||||||
|
case <-m.dirty:
|
||||||
|
if pending {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pending = true
|
||||||
|
timer.Reset(minGap)
|
||||||
|
case <-timer.C:
|
||||||
|
if !pending {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
currentState := m.GetState()
|
||||||
|
|
||||||
|
if m.lastNotified != nil && !stateChanged(m.lastNotified, ¤tState) {
|
||||||
|
pending = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
m.subscribers.Range(func(key string, ch chan State) bool {
|
||||||
|
select {
|
||||||
|
case ch <- currentState:
|
||||||
|
default:
|
||||||
|
log.Warn("Location: subscriber channel full, dropping update")
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
stateCopy := currentState
|
||||||
|
m.lastNotified = &stateCopy
|
||||||
|
pending = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func stateChanged(old, new *State) bool {
|
||||||
|
if old == nil || new == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if old.Latitude != new.Latitude {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if old.Longitude != new.Longitude {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
28
core/internal/server/location/types.go
Normal file
28
core/internal/server/location/types.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package location
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/internal/geolocation"
|
||||||
|
"github.com/AvengeMedia/DankMaterialShell/core/pkg/syncmap"
|
||||||
|
)
|
||||||
|
|
||||||
|
type State struct {
|
||||||
|
Latitude float64 `json:"latitude"`
|
||||||
|
Longitude float64 `json:"longitude"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Manager struct {
|
||||||
|
state *State
|
||||||
|
stateMutex sync.RWMutex
|
||||||
|
|
||||||
|
client geolocation.Client
|
||||||
|
|
||||||
|
stopChan chan struct{}
|
||||||
|
sigWG sync.WaitGroup
|
||||||
|
|
||||||
|
subscribers syncmap.Map[string, chan State]
|
||||||
|
dirty chan struct{}
|
||||||
|
notifierWg sync.WaitGroup
|
||||||
|
lastNotified *State
|
||||||
|
}
|
||||||
@@ -5,5 +5,6 @@ const (
|
|||||||
dbusPath = "/org/freedesktop/login1"
|
dbusPath = "/org/freedesktop/login1"
|
||||||
dbusManagerInterface = "org.freedesktop.login1.Manager"
|
dbusManagerInterface = "org.freedesktop.login1.Manager"
|
||||||
dbusSessionInterface = "org.freedesktop.login1.Session"
|
dbusSessionInterface = "org.freedesktop.login1.Session"
|
||||||
|
dbusUserInterface = "org.freedesktop.login1.User"
|
||||||
dbusPropsInterface = "org.freedesktop.DBus.Properties"
|
dbusPropsInterface = "org.freedesktop.DBus.Properties"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -17,15 +17,8 @@ func NewManager() (*Manager, error) {
|
|||||||
return nil, fmt.Errorf("failed to connect to system bus: %w", err)
|
return nil, fmt.Errorf("failed to connect to system bus: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sessionID := os.Getenv("XDG_SESSION_ID")
|
|
||||||
if sessionID == "" {
|
|
||||||
sessionID = "self"
|
|
||||||
}
|
|
||||||
|
|
||||||
m := &Manager{
|
m := &Manager{
|
||||||
state: &SessionState{
|
state: &SessionState{},
|
||||||
SessionID: sessionID,
|
|
||||||
},
|
|
||||||
stateMutex: sync.RWMutex{},
|
stateMutex: sync.RWMutex{},
|
||||||
|
|
||||||
stopChan: make(chan struct{}),
|
stopChan: make(chan struct{}),
|
||||||
@@ -60,12 +53,13 @@ func (m *Manager) initialize() error {
|
|||||||
|
|
||||||
m.initializeFallbackDelay()
|
m.initializeFallbackDelay()
|
||||||
|
|
||||||
sessionPath, err := m.getSession(m.state.SessionID)
|
sessionID, sessionPath, err := m.discoverSession()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get session path: %w", err)
|
return fmt.Errorf("failed to get session path: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
m.stateMutex.Lock()
|
m.stateMutex.Lock()
|
||||||
|
m.state.SessionID = sessionID
|
||||||
m.state.SessionPath = string(sessionPath)
|
m.state.SessionPath = string(sessionPath)
|
||||||
m.sessionPath = sessionPath
|
m.sessionPath = sessionPath
|
||||||
m.stateMutex.Unlock()
|
m.stateMutex.Unlock()
|
||||||
@@ -79,6 +73,41 @@ func (m *Manager) initialize() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *Manager) discoverSession() (string, dbus.ObjectPath, error) {
|
||||||
|
// 1. Explicit XDG_SESSION_ID
|
||||||
|
if id := os.Getenv("XDG_SESSION_ID"); id != "" {
|
||||||
|
if path, err := m.getSession(id); err == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "loginctl: using XDG_SESSION_ID=%s\n", id)
|
||||||
|
return id, path, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. PID-based lookup (works when caller is inside a session cgroup)
|
||||||
|
if id, path, err := m.getSessionByPID(uint32(os.Getpid())); err == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "loginctl: found session %s via PID\n", id)
|
||||||
|
return id, path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. User's primary display session (handles UWSM and similar)
|
||||||
|
if id, path, err := m.getUserDisplaySession(); err == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "loginctl: found session %s via User.Display\n", id)
|
||||||
|
return id, path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Score all sessions for current UID
|
||||||
|
if id, path, err := m.findBestSession(); err == nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "loginctl: found session %s via ListSessions scoring\n", id)
|
||||||
|
return id, path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Last resort: "self"
|
||||||
|
path, err := m.getSession("self")
|
||||||
|
if err != nil {
|
||||||
|
return "", "", fmt.Errorf("%w", err)
|
||||||
|
}
|
||||||
|
return "self", path, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (m *Manager) getSession(id string) (dbus.ObjectPath, error) {
|
func (m *Manager) getSession(id string) (dbus.ObjectPath, error) {
|
||||||
var out dbus.ObjectPath
|
var out dbus.ObjectPath
|
||||||
err := m.managerObj.Call(dbusManagerInterface+".GetSession", 0, id).Store(&out)
|
err := m.managerObj.Call(dbusManagerInterface+".GetSession", 0, id).Store(&out)
|
||||||
@@ -88,6 +117,166 @@ func (m *Manager) getSession(id string) (dbus.ObjectPath, error) {
|
|||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *Manager) getSessionByPID(pid uint32) (string, dbus.ObjectPath, error) {
|
||||||
|
var path dbus.ObjectPath
|
||||||
|
if err := m.managerObj.Call(dbusManagerInterface+".GetSessionByPID", 0, pid).Store(&path); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
sessionObj := m.conn.Object(dbusDest, path)
|
||||||
|
var id dbus.Variant
|
||||||
|
if err := sessionObj.Call(dbusPropsInterface+".Get", 0, dbusSessionInterface, "Id").Store(&id); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
return id.Value().(string), path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) getUserDisplaySession() (string, dbus.ObjectPath, error) {
|
||||||
|
uid := uint32(os.Getuid())
|
||||||
|
|
||||||
|
var userPath dbus.ObjectPath
|
||||||
|
if err := m.managerObj.Call(dbusManagerInterface+".GetUser", 0, uid).Store(&userPath); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
userObj := m.conn.Object(dbusDest, userPath)
|
||||||
|
var display dbus.Variant
|
||||||
|
if err := userObj.Call(dbusPropsInterface+".Get", 0, dbusUserInterface, "Display").Store(&display); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
pair, ok := display.Value().([]any)
|
||||||
|
if !ok || len(pair) < 2 {
|
||||||
|
return "", "", fmt.Errorf("unexpected Display format")
|
||||||
|
}
|
||||||
|
|
||||||
|
sessionID, _ := pair[0].(string)
|
||||||
|
sessionPath, _ := pair[1].(dbus.ObjectPath)
|
||||||
|
if sessionID == "" || sessionPath == "" {
|
||||||
|
return "", "", fmt.Errorf("empty Display session")
|
||||||
|
}
|
||||||
|
|
||||||
|
return sessionID, sessionPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type sessionCandidate struct {
|
||||||
|
id string
|
||||||
|
path dbus.ObjectPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) findBestSession() (string, dbus.ObjectPath, error) {
|
||||||
|
// ListSessions returns a(susso): [][]any where each entry is [id, uid, name, seat, path]
|
||||||
|
var raw [][]any
|
||||||
|
if err := m.managerObj.Call(dbusManagerInterface+".ListSessions", 0).Store(&raw); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
uid := uint32(os.Getuid())
|
||||||
|
var candidates []sessionCandidate
|
||||||
|
for _, entry := range raw {
|
||||||
|
if len(entry) < 5 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
entryUID, _ := entry[1].(uint32)
|
||||||
|
if entryUID != uid {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
id, _ := entry[0].(string)
|
||||||
|
path, _ := entry[4].(dbus.ObjectPath)
|
||||||
|
if id != "" && path != "" {
|
||||||
|
candidates = append(candidates, sessionCandidate{id: id, path: path})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(candidates) == 0 {
|
||||||
|
return "", "", fmt.Errorf("no sessions for uid %d", uid)
|
||||||
|
}
|
||||||
|
|
||||||
|
bestScore := -1
|
||||||
|
var best sessionCandidate
|
||||||
|
for _, c := range candidates {
|
||||||
|
score := m.scoreSession(c.path)
|
||||||
|
if score > bestScore {
|
||||||
|
bestScore = score
|
||||||
|
best = c
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if bestScore < 0 {
|
||||||
|
return "", "", fmt.Errorf("no viable session found")
|
||||||
|
}
|
||||||
|
return best.id, best.path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Manager) scoreSession(path dbus.ObjectPath) int {
|
||||||
|
obj := m.conn.Object(dbusDest, path)
|
||||||
|
var props map[string]dbus.Variant
|
||||||
|
if err := obj.Call(dbusPropsInterface+".GetAll", 0, dbusSessionInterface).Store(&props); err != nil {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
getStr := func(key string) string {
|
||||||
|
if v, ok := props[key]; ok {
|
||||||
|
if s, ok := v.Value().(string); ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
getBool := func(key string) bool {
|
||||||
|
if v, ok := props[key]; ok {
|
||||||
|
if b, ok := v.Value().(bool); ok {
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
getUint32 := func(key string) uint32 {
|
||||||
|
if v, ok := props[key]; ok {
|
||||||
|
if u, ok := v.Value().(uint32); ok {
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
class := getStr("Class")
|
||||||
|
if class != "user" {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
if getBool("Remote") {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
score := 0
|
||||||
|
|
||||||
|
if getBool("Active") {
|
||||||
|
score += 100
|
||||||
|
}
|
||||||
|
|
||||||
|
switch getStr("Type") {
|
||||||
|
case "wayland", "x11":
|
||||||
|
score += 80
|
||||||
|
case "tty":
|
||||||
|
score += 10
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := props["Seat"]; ok {
|
||||||
|
if seatArr, ok := v.Value().([]any); ok && len(seatArr) >= 1 {
|
||||||
|
if seat, ok := seatArr[0].(string); ok && seat != "" {
|
||||||
|
score += 40
|
||||||
|
if seat == "seat0" {
|
||||||
|
score += 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if getUint32("VTNr") > 0 {
|
||||||
|
score += 20
|
||||||
|
}
|
||||||
|
|
||||||
|
return score
|
||||||
|
}
|
||||||
|
|
||||||
func (m *Manager) refreshSessionBinding() error {
|
func (m *Manager) refreshSessionBinding() error {
|
||||||
if m.managerObj == nil || m.conn == nil {
|
if m.managerObj == nil || m.conn == nil {
|
||||||
return fmt.Errorf("manager not fully initialized")
|
return fmt.Errorf("manager not fully initialized")
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ func handleMatugenQueue(conn net.Conn, req models.Request) {
|
|||||||
SyncModeWithPortal: models.GetOr(req, "syncModeWithPortal", false),
|
SyncModeWithPortal: models.GetOr(req, "syncModeWithPortal", false),
|
||||||
TerminalsAlwaysDark: models.GetOr(req, "terminalsAlwaysDark", false),
|
TerminalsAlwaysDark: models.GetOr(req, "terminalsAlwaysDark", false),
|
||||||
SkipTemplates: models.GetOr(req, "skipTemplates", ""),
|
SkipTemplates: models.GetOr(req, "skipTemplates", ""),
|
||||||
|
Contrast: models.GetOr(req, "contrast", 0.0),
|
||||||
}
|
}
|
||||||
|
|
||||||
wait := models.GetOr(req, "wait", true)
|
wait := models.GetOr(req, "wait", true)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ type Backend interface {
|
|||||||
ScanWiFi() error
|
ScanWiFi() error
|
||||||
ScanWiFiDevice(device string) error
|
ScanWiFiDevice(device string) error
|
||||||
GetWiFiNetworkDetails(ssid string) (*NetworkInfoResponse, error)
|
GetWiFiNetworkDetails(ssid string) (*NetworkInfoResponse, error)
|
||||||
|
GetWiFiQRCodeContent(ssid string) (string, error)
|
||||||
GetWiFiDevices() []WiFiDevice
|
GetWiFiDevices() []WiFiDevice
|
||||||
|
|
||||||
ConnectWiFi(req ConnectionRequest) error
|
ConnectWiFi(req ConnectionRequest) error
|
||||||
|
|||||||
@@ -111,6 +111,10 @@ func (b *HybridIwdNetworkdBackend) GetWiFiNetworkDetails(ssid string) (*NetworkI
|
|||||||
return b.wifi.GetWiFiNetworkDetails(ssid)
|
return b.wifi.GetWiFiNetworkDetails(ssid)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *HybridIwdNetworkdBackend) GetWiFiQRCodeContent(ssid string) (string, error) {
|
||||||
|
return b.wifi.GetWiFiQRCodeContent(ssid)
|
||||||
|
}
|
||||||
|
|
||||||
func (b *HybridIwdNetworkdBackend) ConnectWiFi(req ConnectionRequest) error {
|
func (b *HybridIwdNetworkdBackend) ConnectWiFi(req ConnectionRequest) error {
|
||||||
if err := b.wifi.ConnectWiFi(req); err != nil {
|
if err := b.wifi.ConnectWiFi(req); err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
package network
|
package network
|
||||||
|
|
||||||
import "fmt"
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
func (b *IWDBackend) GetWiredConnections() ([]WiredConnection, error) {
|
func (b *IWDBackend) GetWiredConnections() ([]WiredConnection, error) {
|
||||||
return nil, fmt.Errorf("wired connections not supported by iwd")
|
return nil, fmt.Errorf("wired connections not supported by iwd")
|
||||||
@@ -112,3 +115,19 @@ func (b *IWDBackend) getWiFiDevicesLocked() []WiFiDevice {
|
|||||||
Networks: b.state.WiFiNetworks,
|
Networks: b.state.WiFiNetworks,
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *IWDBackend) GetWiFiQRCodeContent(ssid string) (string, error) {
|
||||||
|
path := iwdConfigPath(ssid)
|
||||||
|
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("no saved iwd config for `%s`: %w", ssid, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
passphrase, err := parseIWDPassphrase(string(data))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to read passphrase for `%s`: %w", ssid, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return FormatWiFiQRString("WPA", ssid, passphrase), nil
|
||||||
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user