mirror of
https://github.com/AvengeMedia/DankMaterialShell.git
synced 2025-12-05 21:15:38 -05:00
Compare commits
99 Commits
wip/bar-ma
...
1bec20ecef
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1bec20ecef | ||
|
|
08c9bf570d | ||
|
|
5e77a10a81 | ||
|
|
3bc6461e2a | ||
|
|
d3194e15e2 | ||
|
|
2db79ef202 | ||
|
|
b3c07edef6 | ||
|
|
b773fdca34 | ||
|
|
2e9f9f7b7e | ||
|
|
30cbfe729d | ||
|
|
b036da2446 | ||
|
|
c8a9fb1674 | ||
|
|
43bea80cad | ||
|
|
23538c0323 | ||
|
|
2ae911230d | ||
|
|
5ce1cb87ea | ||
|
|
2a37028b6a | ||
|
|
8130feb2a0 | ||
|
|
c49a875ec2 | ||
|
|
2a002304b9 | ||
|
|
d9522818ae | ||
|
|
800588e121 | ||
|
|
991c31ebdb | ||
|
|
48f77e1691 | ||
|
|
42de6fd074 | ||
|
|
62845b470c | ||
|
|
fd20986cf8 | ||
|
|
61369cde9e | ||
|
|
644384ce8b | ||
|
|
97c11a2482 | ||
|
|
1e7e1c2d78 | ||
|
|
1c7201fb04 | ||
|
|
61ec0c697a | ||
|
|
4b5fce1bfc | ||
|
|
6cc6e7c8e9 | ||
|
|
89298fce30 | ||
|
|
a3a27e07fa | ||
|
|
4f32376f22 | ||
|
|
58bf189941 | ||
|
|
bcfa508da5 | ||
|
|
c0ae3ef58b | ||
|
|
1e70d7b4c3 | ||
|
|
f8dc6ad2bc | ||
|
|
e22482988f | ||
|
|
4eb896629d | ||
|
|
b310e66275 | ||
|
|
b39da1bea7 | ||
|
|
fa575d0574 | ||
|
|
dfe2f3771b | ||
|
|
46caeb0445 | ||
|
|
59cc9c7006 | ||
|
|
12e91534eb | ||
|
|
d9da88ceb5 | ||
|
|
2dbfec0307 | ||
|
|
09cf8c9641 | ||
|
|
f1bed4d6a3 | ||
|
|
2ed6c33c83 | ||
|
|
7ad532ed17 | ||
|
|
92fe8c5b14 | ||
|
|
8e95572589 | ||
|
|
62da862a66 | ||
|
|
993e34f548 | ||
|
|
e39465aece | ||
|
|
8fd616b680 | ||
|
|
cc054b27de | ||
|
|
dfdaa82245 | ||
|
|
99a307e0ad | ||
|
|
5ddea836a1 | ||
|
|
208d92aa06 | ||
|
|
6ef9ddd4f3 | ||
|
|
1c92d39185 | ||
|
|
c0f072217c | ||
|
|
542562f988 | ||
|
|
4e6f0d5e87 | ||
|
|
10639a5ead | ||
|
|
06d668e710 | ||
|
|
d1472dfcba | ||
|
|
ccb4da3cd8 | ||
|
|
46e96b49f0 | ||
|
|
984cfe7f98 | ||
|
|
d769300137 | ||
|
|
d175d66828 | ||
|
|
c1a314332e | ||
|
|
046ac59d21 | ||
|
|
00c06f07d0 | ||
|
|
3e2ab40c6a | ||
|
|
350ffd0052 | ||
|
|
ecd1a622d2 | ||
|
|
f13968aa61 | ||
|
|
4d1ffde54c | ||
|
|
d69017a706 | ||
|
|
f2deaeccdb | ||
|
|
ea9b0d2a79 | ||
|
|
2e6dbedb8b | ||
|
|
6f359df8f9 | ||
|
|
f6db20cd06 | ||
|
|
6287fae065 | ||
|
|
e441607ce3 | ||
|
|
b5379a95fa |
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
@@ -386,6 +386,68 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
trigger-obs-update:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
- name: Update OBS packages
|
||||
run: |
|
||||
VERSION="${{ github.ref_name }}"
|
||||
cd distro
|
||||
bash scripts/obs-upload.sh dms "Update to $VERSION"
|
||||
|
||||
trigger-ppa-update:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dput \
|
||||
lftp \
|
||||
build-essential \
|
||||
fakeroot \
|
||||
dpkg-dev
|
||||
|
||||
- name: Configure GPG
|
||||
env:
|
||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
run: |
|
||||
echo "$GPG_KEY" | gpg --import
|
||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Upload to PPA
|
||||
run: |
|
||||
VERSION="${{ github.ref_name }}"
|
||||
cd distro/ubuntu/ppa
|
||||
bash create-and-upload.sh ../dms dms questing
|
||||
|
||||
copr-build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: DMS Copr Stable Release (Manual)
|
||||
name: DMS Copr Stable Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
114
.github/workflows/run-obs.yml
vendored
Normal file
114
.github/workflows/run-obs.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Update OBS Packages
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: 'Package to update (dms, dms-git, or all)'
|
||||
required: false
|
||||
default: 'all'
|
||||
rebuild_release:
|
||||
description: 'Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)'
|
||||
required: false
|
||||
default: ''
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
schedule:
|
||||
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
|
||||
|
||||
jobs:
|
||||
update-obs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine packages to update
|
||||
id: packages
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
||||
VERSION="${GITHUB_REF#refs/tags/}"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by tag: $VERSION"
|
||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by schedule: updating git package"
|
||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
||||
else
|
||||
echo "packages=all" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Update version in packaging files
|
||||
if: steps.packages.outputs.version != ''
|
||||
run: |
|
||||
VERSION="${{ steps.packages.outputs.version }}"
|
||||
VERSION_NO_V="${VERSION#v}"
|
||||
echo "Updating packaging to version $VERSION_NO_V"
|
||||
|
||||
# Update openSUSE spec files
|
||||
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/*.spec
|
||||
|
||||
# Update Debian _service files
|
||||
for service in distro/debian/*/_service; do
|
||||
if [[ -f "$service" ]]; then
|
||||
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
|
||||
fi
|
||||
done
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add distro/
|
||||
git commit -m "chore: update packaging to $VERSION" || echo "No changes to commit"
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
- name: Upload to OBS
|
||||
env:
|
||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
MESSAGE="Automated update from GitHub Actions"
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
|
||||
fi
|
||||
|
||||
cd distro
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
bash scripts/obs-upload.sh dms "$MESSAGE"
|
||||
bash scripts/obs-upload.sh dms-git "Automated git update"
|
||||
else
|
||||
bash scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY
|
||||
110
.github/workflows/run-ppa.yml
vendored
Normal file
110
.github/workflows/run-ppa.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Update PPA Packages
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: 'Package to upload (dms, dms-git, or all)'
|
||||
required: false
|
||||
default: 'dms-git'
|
||||
rebuild_release:
|
||||
description: 'Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)'
|
||||
required: false
|
||||
default: ''
|
||||
schedule:
|
||||
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
|
||||
|
||||
jobs:
|
||||
upload-ppa:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dput \
|
||||
lftp \
|
||||
build-essential \
|
||||
fakeroot \
|
||||
dpkg-dev
|
||||
|
||||
- name: Configure GPG
|
||||
env:
|
||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
run: |
|
||||
echo "$GPG_KEY" | gpg --import
|
||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Determine packages to upload
|
||||
id: packages
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by schedule: uploading git package"
|
||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
||||
else
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload to PPA
|
||||
env:
|
||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
|
||||
cd distro/ubuntu/ppa
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms to PPA..."
|
||||
if [ -n "$REBUILD_RELEASE" ]; then
|
||||
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
|
||||
fi
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash create-and-upload.sh "../dms" dms questing
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms-git to PPA..."
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash create-and-upload.sh "../dms-git" dms-git questing
|
||||
else
|
||||
PPA_NAME="$PACKAGES"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading $PACKAGES to PPA..."
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash create-and-upload.sh "../$PACKAGES" "$PPA_NAME" questing
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ "$PACKAGES" == "dms" ]]; then
|
||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ "$PACKAGES" == "dms-git" ]]; then
|
||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -136,3 +136,9 @@ go.work.sum
|
||||
# .vscode/
|
||||
|
||||
bin/
|
||||
|
||||
# Extracted source trees in Ubuntu package directories
|
||||
distro/ubuntu/*/dms-git-repo/
|
||||
distro/ubuntu/*/DankMaterialShell-*/
|
||||
distro/ubuntu/danklinux/*/dsearch-*/
|
||||
distro/ubuntu/danklinux/*/dgop-*/
|
||||
|
||||
@@ -2,28 +2,42 @@
|
||||
|
||||
Contributions are welcome and encouraged.
|
||||
|
||||
## Formatting
|
||||
To contribute fork this repository, make your changes, and open a pull request.
|
||||
|
||||
The preferred tool for formatting files is [qmlfmt](https://github.com/jesperhh/qmlfmt) (also available on aur as qmlfmt-git). It actually kinda sucks, but `qmlformat` doesn't work with null safe operators and ternarys and pragma statements and a bunch of other things that are supported.
|
||||
## VSCode Setup
|
||||
|
||||
We need some consistent style, so this at least gives the same formatter that Qt Creator uses.
|
||||
This is a monorepo, the easiest thing to do is to open an editor in either `quickshell`, `core`, or both depending on which part of the project you are working on.
|
||||
|
||||
You can configure it to format on save in vscode by configuring the "custom local formatters" extension then adding this to settings json.
|
||||
### QML (`quickshell` directory)
|
||||
|
||||
1. Install the [QML Extension](https://doc.qt.io/vscodeext/)
|
||||
2. Configure `ctrl+shift+p` -> user preferences (json) with qmlls path
|
||||
|
||||
```json
|
||||
"customLocalFormatters.formatters": [
|
||||
{
|
||||
"command": "sh -c \"qmlfmt -t 4 -i 4 -b 250 | sed 's/pragma ComponentBehavior$/pragma ComponentBehavior: Bound/g'\"",
|
||||
"languages": ["qml"]
|
||||
}
|
||||
],
|
||||
"[qml]": {
|
||||
"editor.defaultFormatter": "jkillian.custom-local-formatters",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
{
|
||||
"qt-qml.doNotAskForQmllsDownload": true,
|
||||
"qt-qml.qmlls.customExePath": "/usr/lib/qt6/bin/qmlls"
|
||||
}
|
||||
```
|
||||
|
||||
Sometimes it just breaks code though. Like turning `"_\""` into `"_""`, so you may not want to do formatOnSave.
|
||||
3. Create empty `.qmlls.ini` file in `quickshell/` directory
|
||||
|
||||
```bash
|
||||
cd quickshell
|
||||
touch .qmlls.ini
|
||||
```
|
||||
|
||||
4. Restart dms to generate the `.qmlls.ini` file
|
||||
|
||||
5. Make your changes, test, and open a pull request.
|
||||
|
||||
### GO (`core` directory)
|
||||
|
||||
1. Install the [Go Extension](https://code.visualstudio.com/docs/languages/go)
|
||||
2. Ensure code is formatted with `make fmt`
|
||||
3. Add appropriate test coverage and ensure tests pass with `make test`
|
||||
4. Run `go mod tidy`
|
||||
5. Open pull request
|
||||
|
||||
## Pull request
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
</div>
|
||||
|
||||
DankMaterialShell is a complete desktop shell for [niri](https://github.com/YaLTeR/niri), [Hyprland](https://hyprland.org/), [MangoWC](https://github.com/DreamMaoMao/mangowc), [Sway](https://swaywm.org), and other Wayland compositors. It replaces waybar, swaylock, swayidle, mako, fuzzel, polkit, and everything else you'd normally stitch together to make a desktop.
|
||||
DankMaterialShell is a complete desktop shell for [niri](https://github.com/YaLTeR/niri), [Hyprland](https://hyprland.org/), [MangoWC](https://github.com/DreamMaoMao/mangowc), [Sway](https://swaywm.org), [labwc](https://labwc.github.io/), and other Wayland compositors. It replaces waybar, swaylock, swayidle, mako, fuzzel, polkit, and everything else you'd normally stitch together to make a desktop.
|
||||
|
||||
## Repository Structure
|
||||
|
||||
@@ -105,7 +105,7 @@ Extend functionality with the [plugin registry](https://plugins.danklinux.com).
|
||||
|
||||
## Supported Compositors
|
||||
|
||||
Works best with [niri](https://github.com/YaLTeR/niri), [Hyprland](https://hyprland.org/), [Sway](https://swaywm.org/), and [MangoWC](https://github.com/DreamMaoMao/mangowc) with full workspace switching, overview integration, and monitor management. Other Wayland compositors work with reduced features.
|
||||
Works best with [niri](https://github.com/YaLTeR/niri), [Hyprland](https://hyprland.org/), [Sway](https://swaywm.org/), [MangoWC](https://github.com/DreamMaoMao/mangowc), and [labwc](https://labwc.github.io/) with full workspace switching, overview integration, and monitor management. Other Wayland compositors work with reduced features.
|
||||
|
||||
[Compositor configuration guide](https://danklinux.com/docs/dankmaterialshell/compositors)
|
||||
|
||||
@@ -183,6 +183,10 @@ For documentation contributions, see [DankLinux-Docs](https://github.com/AvengeM
|
||||
- [soramanew](https://github.com/soramanew) - [Caelestia](https://github.com/caelestia-dots/shell) inspiration
|
||||
- [end-4](https://github.com/end-4) - [dots-hyprland](https://github.com/end-4/dots-hyprland) inspiration
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://www.star-history.com/#AvengeMedia/DankMaterialShell&type=date&legend=top-left)
|
||||
|
||||
## License
|
||||
|
||||
MIT License - See [LICENSE](LICENSE) for details.
|
||||
|
||||
@@ -368,6 +368,7 @@ func getCommonCommands() []*cobra.Command {
|
||||
pluginsCmd,
|
||||
dank16Cmd,
|
||||
brightnessCmd,
|
||||
dpmsCmd,
|
||||
keybindsCmd,
|
||||
greeterCmd,
|
||||
setupCmd,
|
||||
|
||||
84
core/cmd/dms/commands_dpms.go
Normal file
84
core/cmd/dms/commands_dpms.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var dpmsCmd = &cobra.Command{
|
||||
Use: "dpms",
|
||||
Short: "Control display power management",
|
||||
}
|
||||
|
||||
var dpmsOnCmd = &cobra.Command{
|
||||
Use: "on [output]",
|
||||
Short: "Turn display(s) on",
|
||||
Args: cobra.MaximumNArgs(1),
|
||||
Run: runDPMSOn,
|
||||
}
|
||||
|
||||
var dpmsOffCmd = &cobra.Command{
|
||||
Use: "off [output]",
|
||||
Short: "Turn display(s) off",
|
||||
Args: cobra.MaximumNArgs(1),
|
||||
Run: runDPMSOff,
|
||||
}
|
||||
|
||||
var dpmsListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List outputs",
|
||||
Args: cobra.NoArgs,
|
||||
Run: runDPMSList,
|
||||
}
|
||||
|
||||
func init() {
|
||||
dpmsCmd.AddCommand(dpmsOnCmd, dpmsOffCmd, dpmsListCmd)
|
||||
}
|
||||
|
||||
func runDPMSOn(cmd *cobra.Command, args []string) {
|
||||
outputName := ""
|
||||
if len(args) > 0 {
|
||||
outputName = args[0]
|
||||
}
|
||||
|
||||
client, err := newDPMSClient()
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
if err := client.SetDPMS(outputName, true); err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runDPMSOff(cmd *cobra.Command, args []string) {
|
||||
outputName := ""
|
||||
if len(args) > 0 {
|
||||
outputName = args[0]
|
||||
}
|
||||
|
||||
client, err := newDPMSClient()
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
if err := client.SetDPMS(outputName, false); err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func runDPMSList(cmd *cobra.Command, args []string) {
|
||||
client, err := newDPMSClient()
|
||||
if err != nil {
|
||||
log.Fatalf("%v", err)
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
for _, output := range client.ListOutputs() {
|
||||
fmt.Println(output)
|
||||
}
|
||||
}
|
||||
345
core/cmd/dms/dpms_client.go
Normal file
345
core/cmd/dms/dpms_client.go
Normal file
@@ -0,0 +1,345 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/wlr_output_power"
|
||||
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
|
||||
)
|
||||
|
||||
type cmd struct {
|
||||
fn func()
|
||||
done chan error
|
||||
}
|
||||
|
||||
type dpmsClient struct {
|
||||
display *wlclient.Display
|
||||
ctx *wlclient.Context
|
||||
powerMgr *wlr_output_power.ZwlrOutputPowerManagerV1
|
||||
outputs map[string]*outputState
|
||||
mu sync.Mutex
|
||||
syncRound int
|
||||
done bool
|
||||
err error
|
||||
cmdq chan cmd
|
||||
stopChan chan struct{}
|
||||
wg sync.WaitGroup
|
||||
}
|
||||
|
||||
type outputState struct {
|
||||
wlOutput *wlclient.Output
|
||||
powerCtrl *wlr_output_power.ZwlrOutputPowerV1
|
||||
name string
|
||||
mode uint32
|
||||
failed bool
|
||||
waitCh chan struct{}
|
||||
wantMode *uint32
|
||||
}
|
||||
|
||||
func (c *dpmsClient) post(fn func()) {
|
||||
done := make(chan error, 1)
|
||||
select {
|
||||
case c.cmdq <- cmd{fn: fn, done: done}:
|
||||
<-done
|
||||
case <-c.stopChan:
|
||||
}
|
||||
}
|
||||
|
||||
func (c *dpmsClient) waylandActor() {
|
||||
defer c.wg.Done()
|
||||
for {
|
||||
select {
|
||||
case <-c.stopChan:
|
||||
return
|
||||
case cmd := <-c.cmdq:
|
||||
cmd.fn()
|
||||
close(cmd.done)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func newDPMSClient() (*dpmsClient, error) {
|
||||
display, err := wlclient.Connect("")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to connect to Wayland: %w", err)
|
||||
}
|
||||
|
||||
c := &dpmsClient{
|
||||
display: display,
|
||||
ctx: display.Context(),
|
||||
outputs: make(map[string]*outputState),
|
||||
cmdq: make(chan cmd, 128),
|
||||
stopChan: make(chan struct{}),
|
||||
}
|
||||
|
||||
c.wg.Add(1)
|
||||
go c.waylandActor()
|
||||
|
||||
registry, err := display.GetRegistry()
|
||||
if err != nil {
|
||||
display.Context().Close()
|
||||
return nil, fmt.Errorf("failed to get registry: %w", err)
|
||||
}
|
||||
|
||||
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
|
||||
switch e.Interface {
|
||||
case wlr_output_power.ZwlrOutputPowerManagerV1InterfaceName:
|
||||
powerMgr := wlr_output_power.NewZwlrOutputPowerManagerV1(c.ctx)
|
||||
version := e.Version
|
||||
if version > 1 {
|
||||
version = 1
|
||||
}
|
||||
if err := registry.Bind(e.Name, e.Interface, version, powerMgr); err == nil {
|
||||
c.powerMgr = powerMgr
|
||||
}
|
||||
|
||||
case "wl_output":
|
||||
output := wlclient.NewOutput(c.ctx)
|
||||
version := e.Version
|
||||
if version > 4 {
|
||||
version = 4
|
||||
}
|
||||
if err := registry.Bind(e.Name, e.Interface, version, output); err == nil {
|
||||
outputID := fmt.Sprintf("output-%d", output.ID())
|
||||
state := &outputState{
|
||||
wlOutput: output,
|
||||
name: outputID,
|
||||
}
|
||||
|
||||
c.mu.Lock()
|
||||
c.outputs[outputID] = state
|
||||
c.mu.Unlock()
|
||||
|
||||
output.SetNameHandler(func(ev wlclient.OutputNameEvent) {
|
||||
c.mu.Lock()
|
||||
delete(c.outputs, state.name)
|
||||
state.name = ev.Name
|
||||
c.outputs[ev.Name] = state
|
||||
c.mu.Unlock()
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
syncCallback, err := display.Sync()
|
||||
if err != nil {
|
||||
c.Close()
|
||||
return nil, fmt.Errorf("failed to sync display: %w", err)
|
||||
}
|
||||
syncCallback.SetDoneHandler(func(e wlclient.CallbackDoneEvent) {
|
||||
c.handleSync()
|
||||
})
|
||||
|
||||
for !c.done {
|
||||
if err := c.ctx.Dispatch(); err != nil {
|
||||
c.Close()
|
||||
return nil, fmt.Errorf("dispatch error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if c.err != nil {
|
||||
c.Close()
|
||||
return nil, c.err
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *dpmsClient) handleSync() {
|
||||
c.syncRound++
|
||||
|
||||
switch c.syncRound {
|
||||
case 1:
|
||||
if c.powerMgr == nil {
|
||||
c.err = fmt.Errorf("wlr-output-power-management protocol not supported by compositor")
|
||||
c.done = true
|
||||
return
|
||||
}
|
||||
|
||||
c.mu.Lock()
|
||||
for _, state := range c.outputs {
|
||||
powerCtrl, err := c.powerMgr.GetOutputPower(state.wlOutput)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
state.powerCtrl = powerCtrl
|
||||
|
||||
powerCtrl.SetModeHandler(func(e wlr_output_power.ZwlrOutputPowerV1ModeEvent) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
if state.powerCtrl == nil {
|
||||
return
|
||||
}
|
||||
state.mode = e.Mode
|
||||
if state.wantMode != nil && e.Mode == *state.wantMode && state.waitCh != nil {
|
||||
close(state.waitCh)
|
||||
state.wantMode = nil
|
||||
}
|
||||
})
|
||||
|
||||
powerCtrl.SetFailedHandler(func(e wlr_output_power.ZwlrOutputPowerV1FailedEvent) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
if state.powerCtrl == nil {
|
||||
return
|
||||
}
|
||||
state.failed = true
|
||||
if state.waitCh != nil {
|
||||
close(state.waitCh)
|
||||
state.wantMode = nil
|
||||
}
|
||||
})
|
||||
}
|
||||
c.mu.Unlock()
|
||||
|
||||
syncCallback, err := c.display.Sync()
|
||||
if err != nil {
|
||||
c.err = fmt.Errorf("failed to sync display: %w", err)
|
||||
c.done = true
|
||||
return
|
||||
}
|
||||
syncCallback.SetDoneHandler(func(e wlclient.CallbackDoneEvent) {
|
||||
c.handleSync()
|
||||
})
|
||||
|
||||
default:
|
||||
c.done = true
|
||||
}
|
||||
}
|
||||
|
||||
func (c *dpmsClient) ListOutputs() []string {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
names := make([]string, 0, len(c.outputs))
|
||||
for name := range c.outputs {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func (c *dpmsClient) SetDPMS(outputName string, on bool) error {
|
||||
var mode uint32
|
||||
if on {
|
||||
mode = uint32(wlr_output_power.ZwlrOutputPowerV1ModeOn)
|
||||
} else {
|
||||
mode = uint32(wlr_output_power.ZwlrOutputPowerV1ModeOff)
|
||||
}
|
||||
|
||||
var setErr error
|
||||
c.post(func() {
|
||||
c.mu.Lock()
|
||||
var waitStates []*outputState
|
||||
|
||||
if outputName == "" || outputName == "all" {
|
||||
if len(c.outputs) == 0 {
|
||||
c.mu.Unlock()
|
||||
setErr = fmt.Errorf("no outputs found")
|
||||
return
|
||||
}
|
||||
|
||||
for _, state := range c.outputs {
|
||||
if state.powerCtrl == nil {
|
||||
continue
|
||||
}
|
||||
state.wantMode = &mode
|
||||
state.waitCh = make(chan struct{})
|
||||
state.failed = false
|
||||
waitStates = append(waitStates, state)
|
||||
state.powerCtrl.SetMode(mode)
|
||||
}
|
||||
} else {
|
||||
state, ok := c.outputs[outputName]
|
||||
if !ok {
|
||||
c.mu.Unlock()
|
||||
setErr = fmt.Errorf("output not found: %s", outputName)
|
||||
return
|
||||
}
|
||||
if state.powerCtrl == nil {
|
||||
c.mu.Unlock()
|
||||
setErr = fmt.Errorf("output %s has nil powerCtrl", outputName)
|
||||
return
|
||||
}
|
||||
state.wantMode = &mode
|
||||
state.waitCh = make(chan struct{})
|
||||
state.failed = false
|
||||
waitStates = append(waitStates, state)
|
||||
state.powerCtrl.SetMode(mode)
|
||||
}
|
||||
c.mu.Unlock()
|
||||
|
||||
deadline := time.Now().Add(10 * time.Second)
|
||||
|
||||
for _, state := range waitStates {
|
||||
c.mu.Lock()
|
||||
ch := state.waitCh
|
||||
c.mu.Unlock()
|
||||
|
||||
done := false
|
||||
for !done {
|
||||
if err := c.ctx.Dispatch(); err != nil {
|
||||
setErr = fmt.Errorf("dispatch error: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ch:
|
||||
c.mu.Lock()
|
||||
if state.failed {
|
||||
setErr = fmt.Errorf("compositor reported failed for %s", state.name)
|
||||
c.mu.Unlock()
|
||||
return
|
||||
}
|
||||
c.mu.Unlock()
|
||||
done = true
|
||||
default:
|
||||
if time.Now().After(deadline) {
|
||||
setErr = fmt.Errorf("timeout waiting for mode change on %s", state.name)
|
||||
return
|
||||
}
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
c.mu.Lock()
|
||||
for _, state := range waitStates {
|
||||
if state.powerCtrl != nil {
|
||||
state.powerCtrl.Destroy()
|
||||
state.powerCtrl = nil
|
||||
}
|
||||
}
|
||||
c.mu.Unlock()
|
||||
|
||||
c.display.Roundtrip()
|
||||
})
|
||||
|
||||
return setErr
|
||||
}
|
||||
|
||||
func (c *dpmsClient) Close() {
|
||||
close(c.stopChan)
|
||||
c.wg.Wait()
|
||||
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
for _, state := range c.outputs {
|
||||
if state.powerCtrl != nil {
|
||||
state.powerCtrl.Destroy()
|
||||
}
|
||||
}
|
||||
c.outputs = nil
|
||||
|
||||
if c.powerMgr != nil {
|
||||
c.powerMgr.Destroy()
|
||||
c.powerMgr = nil
|
||||
}
|
||||
|
||||
if c.display != nil {
|
||||
c.ctx.Close()
|
||||
c.display = nil
|
||||
}
|
||||
}
|
||||
@@ -57,6 +57,11 @@ func getRuntimeDir() string {
|
||||
return os.TempDir()
|
||||
}
|
||||
|
||||
func hasSystemdRun() bool {
|
||||
_, err := exec.LookPath("systemd-run")
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func getPIDFilePath() string {
|
||||
return filepath.Join(getRuntimeDir(), fmt.Sprintf("danklinux-%d.pid", os.Getpid()))
|
||||
}
|
||||
@@ -165,6 +170,10 @@ func runShellInteractive(session bool) {
|
||||
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
|
||||
}
|
||||
|
||||
if isSessionManaged && hasSystemdRun() {
|
||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||
}
|
||||
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err == nil && os.Getenv("DMS_DISABLE_HOT_RELOAD") == "" {
|
||||
if !strings.HasPrefix(configPath, homeDir) {
|
||||
@@ -387,6 +396,10 @@ func runShellDaemon(session bool) {
|
||||
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
|
||||
}
|
||||
|
||||
if isSessionManaged && hasSystemdRun() {
|
||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||
}
|
||||
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err == nil && os.Getenv("DMS_DISABLE_HOT_RELOAD") == "" {
|
||||
if !strings.HasPrefix(configPath, homeDir) {
|
||||
|
||||
@@ -125,6 +125,8 @@ windowrulev2 = noborder, class:^(kitty)$
|
||||
windowrulev2 = float, class:^(firefox)$, title:^(Picture-in-Picture)$
|
||||
windowrulev2 = float, class:^(zoom)$
|
||||
|
||||
# DMS windows floating by default
|
||||
windowrulev2 = float, class:^(org.quickshell)$
|
||||
windowrulev2 = opacity 0.9 0.9, floating:0, focus:0
|
||||
|
||||
layerrule = noanim, ^(quickshell)$
|
||||
|
||||
@@ -218,6 +218,11 @@ window-rule {
|
||||
geometry-corner-radius 12
|
||||
clip-to-geometry true
|
||||
}
|
||||
// Open dms windows as floating by default
|
||||
window-rule {
|
||||
match app-id=r#"org.quickshell$"#
|
||||
open-floating true
|
||||
}
|
||||
binds {
|
||||
// === System & Overview ===
|
||||
Mod+D { spawn "niri" "msg" "action" "toggle-overview"; }
|
||||
|
||||
@@ -19,10 +19,12 @@ func init() {
|
||||
Register("fedora-asahi-remix", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
|
||||
Register("bluefin", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
Register("ultramarine", "#00078b", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
}
|
||||
|
||||
type FedoraDistribution struct {
|
||||
@@ -506,6 +508,14 @@ func (f *FedoraDistribution) installDNFPackages(ctx context.Context, packages []
|
||||
f.log(fmt.Sprintf("Installing DNF packages: %s", strings.Join(packages, ", ")))
|
||||
|
||||
args := []string{"dnf", "install", "-y"}
|
||||
|
||||
for _, pkg := range packages {
|
||||
if pkg == "niri" || pkg == "niri-git" {
|
||||
args = append(args, "--setopt=install_weak_deps=False")
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
args = append(args, packages...)
|
||||
|
||||
progressChan <- InstallProgressMsg{
|
||||
|
||||
@@ -157,6 +157,16 @@ type ZdwlIpcOutputV2 struct {
|
||||
appidHandler ZdwlIpcOutputV2AppidHandlerFunc
|
||||
layoutSymbolHandler ZdwlIpcOutputV2LayoutSymbolHandlerFunc
|
||||
frameHandler ZdwlIpcOutputV2FrameHandlerFunc
|
||||
fullscreenHandler ZdwlIpcOutputV2FullscreenHandlerFunc
|
||||
floatingHandler ZdwlIpcOutputV2FloatingHandlerFunc
|
||||
xHandler ZdwlIpcOutputV2XHandlerFunc
|
||||
yHandler ZdwlIpcOutputV2YHandlerFunc
|
||||
widthHandler ZdwlIpcOutputV2WidthHandlerFunc
|
||||
heightHandler ZdwlIpcOutputV2HeightHandlerFunc
|
||||
lastLayerHandler ZdwlIpcOutputV2LastLayerHandlerFunc
|
||||
kbLayoutHandler ZdwlIpcOutputV2KbLayoutHandlerFunc
|
||||
keymodeHandler ZdwlIpcOutputV2KeymodeHandlerFunc
|
||||
scalefactorHandler ZdwlIpcOutputV2ScalefactorHandlerFunc
|
||||
}
|
||||
|
||||
// NewZdwlIpcOutputV2 : control dwl output
|
||||
@@ -251,6 +261,60 @@ func (i *ZdwlIpcOutputV2) SetLayout(index uint32) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// Quit : Quit mango
|
||||
// This request allows clients to instruct the compositor to quit mango.
|
||||
func (i *ZdwlIpcOutputV2) Quit() error {
|
||||
const opcode = 4
|
||||
const _reqBufLen = 8
|
||||
var _reqBuf [_reqBufLen]byte
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
err := i.Context().WriteMsg(_reqBuf[:], nil)
|
||||
return err
|
||||
}
|
||||
|
||||
// SendDispatch : Set the active tags of this output
|
||||
//
|
||||
// dispatch: dispatch name.
|
||||
// arg1: arg1.
|
||||
// arg2: arg2.
|
||||
// arg3: arg3.
|
||||
// arg4: arg4.
|
||||
// arg5: arg5.
|
||||
func (i *ZdwlIpcOutputV2) SendDispatch(dispatch, arg1, arg2, arg3, arg4, arg5 string) error {
|
||||
const opcode = 5
|
||||
dispatchLen := client.PaddedLen(len(dispatch) + 1)
|
||||
arg1Len := client.PaddedLen(len(arg1) + 1)
|
||||
arg2Len := client.PaddedLen(len(arg2) + 1)
|
||||
arg3Len := client.PaddedLen(len(arg3) + 1)
|
||||
arg4Len := client.PaddedLen(len(arg4) + 1)
|
||||
arg5Len := client.PaddedLen(len(arg5) + 1)
|
||||
_reqBufLen := 8 + (4 + dispatchLen) + (4 + arg1Len) + (4 + arg2Len) + (4 + arg3Len) + (4 + arg4Len) + (4 + arg5Len)
|
||||
_reqBuf := make([]byte, _reqBufLen)
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
client.PutString(_reqBuf[l:l+(4+dispatchLen)], dispatch)
|
||||
l += (4 + dispatchLen)
|
||||
client.PutString(_reqBuf[l:l+(4+arg1Len)], arg1)
|
||||
l += (4 + arg1Len)
|
||||
client.PutString(_reqBuf[l:l+(4+arg2Len)], arg2)
|
||||
l += (4 + arg2Len)
|
||||
client.PutString(_reqBuf[l:l+(4+arg3Len)], arg3)
|
||||
l += (4 + arg3Len)
|
||||
client.PutString(_reqBuf[l:l+(4+arg4Len)], arg4)
|
||||
l += (4 + arg4Len)
|
||||
client.PutString(_reqBuf[l:l+(4+arg5Len)], arg5)
|
||||
l += (4 + arg5Len)
|
||||
err := i.Context().WriteMsg(_reqBuf, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
type ZdwlIpcOutputV2TagState uint32
|
||||
|
||||
// ZdwlIpcOutputV2TagState :
|
||||
@@ -399,6 +463,136 @@ func (i *ZdwlIpcOutputV2) SetFrameHandler(f ZdwlIpcOutputV2FrameHandlerFunc) {
|
||||
i.frameHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2FullscreenEvent : Update fullscreen status
|
||||
//
|
||||
// Indicates if the selected client on this output is fullscreen.
|
||||
type ZdwlIpcOutputV2FullscreenEvent struct {
|
||||
IsFullscreen uint32
|
||||
}
|
||||
type ZdwlIpcOutputV2FullscreenHandlerFunc func(ZdwlIpcOutputV2FullscreenEvent)
|
||||
|
||||
// SetFullscreenHandler : sets handler for ZdwlIpcOutputV2FullscreenEvent
|
||||
func (i *ZdwlIpcOutputV2) SetFullscreenHandler(f ZdwlIpcOutputV2FullscreenHandlerFunc) {
|
||||
i.fullscreenHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2FloatingEvent : Update the floating status
|
||||
//
|
||||
// Indicates if the selected client on this output is floating.
|
||||
type ZdwlIpcOutputV2FloatingEvent struct {
|
||||
IsFloating uint32
|
||||
}
|
||||
type ZdwlIpcOutputV2FloatingHandlerFunc func(ZdwlIpcOutputV2FloatingEvent)
|
||||
|
||||
// SetFloatingHandler : sets handler for ZdwlIpcOutputV2FloatingEvent
|
||||
func (i *ZdwlIpcOutputV2) SetFloatingHandler(f ZdwlIpcOutputV2FloatingHandlerFunc) {
|
||||
i.floatingHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2XEvent : Update the x coordinates
|
||||
//
|
||||
// Indicates if x coordinates of the selected client.
|
||||
type ZdwlIpcOutputV2XEvent struct {
|
||||
X int32
|
||||
}
|
||||
type ZdwlIpcOutputV2XHandlerFunc func(ZdwlIpcOutputV2XEvent)
|
||||
|
||||
// SetXHandler : sets handler for ZdwlIpcOutputV2XEvent
|
||||
func (i *ZdwlIpcOutputV2) SetXHandler(f ZdwlIpcOutputV2XHandlerFunc) {
|
||||
i.xHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2YEvent : Update the y coordinates
|
||||
//
|
||||
// Indicates if y coordinates of the selected client.
|
||||
type ZdwlIpcOutputV2YEvent struct {
|
||||
Y int32
|
||||
}
|
||||
type ZdwlIpcOutputV2YHandlerFunc func(ZdwlIpcOutputV2YEvent)
|
||||
|
||||
// SetYHandler : sets handler for ZdwlIpcOutputV2YEvent
|
||||
func (i *ZdwlIpcOutputV2) SetYHandler(f ZdwlIpcOutputV2YHandlerFunc) {
|
||||
i.yHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2WidthEvent : Update the width
|
||||
//
|
||||
// Indicates if width of the selected client.
|
||||
type ZdwlIpcOutputV2WidthEvent struct {
|
||||
Width int32
|
||||
}
|
||||
type ZdwlIpcOutputV2WidthHandlerFunc func(ZdwlIpcOutputV2WidthEvent)
|
||||
|
||||
// SetWidthHandler : sets handler for ZdwlIpcOutputV2WidthEvent
|
||||
func (i *ZdwlIpcOutputV2) SetWidthHandler(f ZdwlIpcOutputV2WidthHandlerFunc) {
|
||||
i.widthHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2HeightEvent : Update the height
|
||||
//
|
||||
// Indicates if height of the selected client.
|
||||
type ZdwlIpcOutputV2HeightEvent struct {
|
||||
Height int32
|
||||
}
|
||||
type ZdwlIpcOutputV2HeightHandlerFunc func(ZdwlIpcOutputV2HeightEvent)
|
||||
|
||||
// SetHeightHandler : sets handler for ZdwlIpcOutputV2HeightEvent
|
||||
func (i *ZdwlIpcOutputV2) SetHeightHandler(f ZdwlIpcOutputV2HeightHandlerFunc) {
|
||||
i.heightHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2LastLayerEvent : last map layer.
|
||||
//
|
||||
// last map layer.
|
||||
type ZdwlIpcOutputV2LastLayerEvent struct {
|
||||
LastLayer string
|
||||
}
|
||||
type ZdwlIpcOutputV2LastLayerHandlerFunc func(ZdwlIpcOutputV2LastLayerEvent)
|
||||
|
||||
// SetLastLayerHandler : sets handler for ZdwlIpcOutputV2LastLayerEvent
|
||||
func (i *ZdwlIpcOutputV2) SetLastLayerHandler(f ZdwlIpcOutputV2LastLayerHandlerFunc) {
|
||||
i.lastLayerHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2KbLayoutEvent : current keyboard layout.
|
||||
//
|
||||
// current keyboard layout.
|
||||
type ZdwlIpcOutputV2KbLayoutEvent struct {
|
||||
KbLayout string
|
||||
}
|
||||
type ZdwlIpcOutputV2KbLayoutHandlerFunc func(ZdwlIpcOutputV2KbLayoutEvent)
|
||||
|
||||
// SetKbLayoutHandler : sets handler for ZdwlIpcOutputV2KbLayoutEvent
|
||||
func (i *ZdwlIpcOutputV2) SetKbLayoutHandler(f ZdwlIpcOutputV2KbLayoutHandlerFunc) {
|
||||
i.kbLayoutHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2KeymodeEvent : current keybind mode.
|
||||
//
|
||||
// current keybind mode.
|
||||
type ZdwlIpcOutputV2KeymodeEvent struct {
|
||||
Keymode string
|
||||
}
|
||||
type ZdwlIpcOutputV2KeymodeHandlerFunc func(ZdwlIpcOutputV2KeymodeEvent)
|
||||
|
||||
// SetKeymodeHandler : sets handler for ZdwlIpcOutputV2KeymodeEvent
|
||||
func (i *ZdwlIpcOutputV2) SetKeymodeHandler(f ZdwlIpcOutputV2KeymodeHandlerFunc) {
|
||||
i.keymodeHandler = f
|
||||
}
|
||||
|
||||
// ZdwlIpcOutputV2ScalefactorEvent : scale factor of monitor.
|
||||
//
|
||||
// scale factor of monitor.
|
||||
type ZdwlIpcOutputV2ScalefactorEvent struct {
|
||||
Scalefactor uint32
|
||||
}
|
||||
type ZdwlIpcOutputV2ScalefactorHandlerFunc func(ZdwlIpcOutputV2ScalefactorEvent)
|
||||
|
||||
// SetScalefactorHandler : sets handler for ZdwlIpcOutputV2ScalefactorEvent
|
||||
func (i *ZdwlIpcOutputV2) SetScalefactorHandler(f ZdwlIpcOutputV2ScalefactorHandlerFunc) {
|
||||
i.scalefactorHandler = f
|
||||
}
|
||||
|
||||
func (i *ZdwlIpcOutputV2) Dispatch(opcode uint32, fd int, data []byte) {
|
||||
switch opcode {
|
||||
case 0:
|
||||
@@ -487,5 +681,111 @@ func (i *ZdwlIpcOutputV2) Dispatch(opcode uint32, fd int, data []byte) {
|
||||
var e ZdwlIpcOutputV2FrameEvent
|
||||
|
||||
i.frameHandler(e)
|
||||
case 8:
|
||||
if i.fullscreenHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2FullscreenEvent
|
||||
l := 0
|
||||
e.IsFullscreen = client.Uint32(data[l : l+4])
|
||||
l += 4
|
||||
|
||||
i.fullscreenHandler(e)
|
||||
case 9:
|
||||
if i.floatingHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2FloatingEvent
|
||||
l := 0
|
||||
e.IsFloating = client.Uint32(data[l : l+4])
|
||||
l += 4
|
||||
|
||||
i.floatingHandler(e)
|
||||
case 10:
|
||||
if i.xHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2XEvent
|
||||
l := 0
|
||||
e.X = int32(client.Uint32(data[l : l+4]))
|
||||
l += 4
|
||||
|
||||
i.xHandler(e)
|
||||
case 11:
|
||||
if i.yHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2YEvent
|
||||
l := 0
|
||||
e.Y = int32(client.Uint32(data[l : l+4]))
|
||||
l += 4
|
||||
|
||||
i.yHandler(e)
|
||||
case 12:
|
||||
if i.widthHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2WidthEvent
|
||||
l := 0
|
||||
e.Width = int32(client.Uint32(data[l : l+4]))
|
||||
l += 4
|
||||
|
||||
i.widthHandler(e)
|
||||
case 13:
|
||||
if i.heightHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2HeightEvent
|
||||
l := 0
|
||||
e.Height = int32(client.Uint32(data[l : l+4]))
|
||||
l += 4
|
||||
|
||||
i.heightHandler(e)
|
||||
case 14:
|
||||
if i.lastLayerHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2LastLayerEvent
|
||||
l := 0
|
||||
lastLayerLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
|
||||
l += 4
|
||||
e.LastLayer = client.String(data[l : l+lastLayerLen])
|
||||
l += lastLayerLen
|
||||
|
||||
i.lastLayerHandler(e)
|
||||
case 15:
|
||||
if i.kbLayoutHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2KbLayoutEvent
|
||||
l := 0
|
||||
kbLayoutLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
|
||||
l += 4
|
||||
e.KbLayout = client.String(data[l : l+kbLayoutLen])
|
||||
l += kbLayoutLen
|
||||
|
||||
i.kbLayoutHandler(e)
|
||||
case 16:
|
||||
if i.keymodeHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2KeymodeEvent
|
||||
l := 0
|
||||
keymodeLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
|
||||
l += 4
|
||||
e.Keymode = client.String(data[l : l+keymodeLen])
|
||||
l += keymodeLen
|
||||
|
||||
i.keymodeHandler(e)
|
||||
case 17:
|
||||
if i.scalefactorHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZdwlIpcOutputV2ScalefactorEvent
|
||||
l := 0
|
||||
e.Scalefactor = client.Uint32(data[l : l+4])
|
||||
l += 4
|
||||
|
||||
i.scalefactorHandler(e)
|
||||
}
|
||||
}
|
||||
|
||||
283
core/internal/proto/wlr_output_power/output_power.go
Normal file
283
core/internal/proto/wlr_output_power/output_power.go
Normal file
@@ -0,0 +1,283 @@
|
||||
// Generated by go-wayland-scanner
|
||||
// https://github.com/yaslama/go-wayland/cmd/go-wayland-scanner
|
||||
// XML file : internal/proto/xml/wlr-output-power-management-unstable-v1.xml
|
||||
//
|
||||
// wlr_output_power_management_unstable_v1 Protocol Copyright:
|
||||
//
|
||||
// Copyright © 2019 Purism SPC
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the "Software"),
|
||||
// to deal in the Software without restriction, including without limitation
|
||||
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
// and/or sell copies of the Software, and to permit persons to whom the
|
||||
// Software is furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice (including the next
|
||||
// paragraph) shall be included in all copies or substantial portions of the
|
||||
// Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
// DEALINGS IN THE SOFTWARE.
|
||||
|
||||
package wlr_output_power
|
||||
|
||||
import "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
|
||||
|
||||
// ZwlrOutputPowerManagerV1InterfaceName is the name of the interface as it appears in the [client.Registry].
|
||||
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
|
||||
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
|
||||
const ZwlrOutputPowerManagerV1InterfaceName = "zwlr_output_power_manager_v1"
|
||||
|
||||
// ZwlrOutputPowerManagerV1 : manager to create per-output power management
|
||||
//
|
||||
// This interface is a manager that allows creating per-output power
|
||||
// management mode controls.
|
||||
type ZwlrOutputPowerManagerV1 struct {
|
||||
client.BaseProxy
|
||||
}
|
||||
|
||||
// NewZwlrOutputPowerManagerV1 : manager to create per-output power management
|
||||
//
|
||||
// This interface is a manager that allows creating per-output power
|
||||
// management mode controls.
|
||||
func NewZwlrOutputPowerManagerV1(ctx *client.Context) *ZwlrOutputPowerManagerV1 {
|
||||
zwlrOutputPowerManagerV1 := &ZwlrOutputPowerManagerV1{}
|
||||
ctx.Register(zwlrOutputPowerManagerV1)
|
||||
return zwlrOutputPowerManagerV1
|
||||
}
|
||||
|
||||
// GetOutputPower : get a power management for an output
|
||||
//
|
||||
// Create an output power management mode control that can be used to
|
||||
// adjust the power management mode for a given output.
|
||||
func (i *ZwlrOutputPowerManagerV1) GetOutputPower(output *client.Output) (*ZwlrOutputPowerV1, error) {
|
||||
id := NewZwlrOutputPowerV1(i.Context())
|
||||
const opcode = 0
|
||||
const _reqBufLen = 8 + 4 + 4
|
||||
var _reqBuf [_reqBufLen]byte
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], id.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], output.ID())
|
||||
l += 4
|
||||
err := i.Context().WriteMsg(_reqBuf[:], nil)
|
||||
return id, err
|
||||
}
|
||||
|
||||
// Destroy : destroy the manager
|
||||
//
|
||||
// All objects created by the manager will still remain valid, until their
|
||||
// appropriate destroy request has been called.
|
||||
func (i *ZwlrOutputPowerManagerV1) Destroy() error {
|
||||
defer i.Context().Unregister(i)
|
||||
const opcode = 1
|
||||
const _reqBufLen = 8
|
||||
var _reqBuf [_reqBufLen]byte
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
err := i.Context().WriteMsg(_reqBuf[:], nil)
|
||||
return err
|
||||
}
|
||||
|
||||
// ZwlrOutputPowerV1InterfaceName is the name of the interface as it appears in the [client.Registry].
|
||||
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
|
||||
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
|
||||
const ZwlrOutputPowerV1InterfaceName = "zwlr_output_power_v1"
|
||||
|
||||
// ZwlrOutputPowerV1 : adjust power management mode for an output
|
||||
//
|
||||
// This object offers requests to set the power management mode of
|
||||
// an output.
|
||||
type ZwlrOutputPowerV1 struct {
|
||||
client.BaseProxy
|
||||
modeHandler ZwlrOutputPowerV1ModeHandlerFunc
|
||||
failedHandler ZwlrOutputPowerV1FailedHandlerFunc
|
||||
}
|
||||
|
||||
// NewZwlrOutputPowerV1 : adjust power management mode for an output
|
||||
//
|
||||
// This object offers requests to set the power management mode of
|
||||
// an output.
|
||||
func NewZwlrOutputPowerV1(ctx *client.Context) *ZwlrOutputPowerV1 {
|
||||
zwlrOutputPowerV1 := &ZwlrOutputPowerV1{}
|
||||
ctx.Register(zwlrOutputPowerV1)
|
||||
return zwlrOutputPowerV1
|
||||
}
|
||||
|
||||
// SetMode : Set an outputs power save mode
|
||||
//
|
||||
// Set an output's power save mode to the given mode. The mode change
|
||||
// is effective immediately. If the output does not support the given
|
||||
// mode a failed event is sent.
|
||||
//
|
||||
// mode: the power save mode to set
|
||||
func (i *ZwlrOutputPowerV1) SetMode(mode uint32) error {
|
||||
const opcode = 0
|
||||
const _reqBufLen = 8 + 4
|
||||
var _reqBuf [_reqBufLen]byte
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(mode))
|
||||
l += 4
|
||||
err := i.Context().WriteMsg(_reqBuf[:], nil)
|
||||
return err
|
||||
}
|
||||
|
||||
// Destroy : destroy this power management
|
||||
//
|
||||
// Destroys the output power management mode control object.
|
||||
func (i *ZwlrOutputPowerV1) Destroy() error {
|
||||
defer i.Context().Unregister(i)
|
||||
const opcode = 1
|
||||
const _reqBufLen = 8
|
||||
var _reqBuf [_reqBufLen]byte
|
||||
l := 0
|
||||
client.PutUint32(_reqBuf[l:4], i.ID())
|
||||
l += 4
|
||||
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
|
||||
l += 4
|
||||
err := i.Context().WriteMsg(_reqBuf[:], nil)
|
||||
return err
|
||||
}
|
||||
|
||||
type ZwlrOutputPowerV1Mode uint32
|
||||
|
||||
// ZwlrOutputPowerV1Mode :
|
||||
const (
|
||||
// ZwlrOutputPowerV1ModeOff : Output is turned off.
|
||||
ZwlrOutputPowerV1ModeOff ZwlrOutputPowerV1Mode = 0
|
||||
// ZwlrOutputPowerV1ModeOn : Output is turned on, no power saving
|
||||
ZwlrOutputPowerV1ModeOn ZwlrOutputPowerV1Mode = 1
|
||||
)
|
||||
|
||||
func (e ZwlrOutputPowerV1Mode) Name() string {
|
||||
switch e {
|
||||
case ZwlrOutputPowerV1ModeOff:
|
||||
return "off"
|
||||
case ZwlrOutputPowerV1ModeOn:
|
||||
return "on"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (e ZwlrOutputPowerV1Mode) Value() string {
|
||||
switch e {
|
||||
case ZwlrOutputPowerV1ModeOff:
|
||||
return "0"
|
||||
case ZwlrOutputPowerV1ModeOn:
|
||||
return "1"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (e ZwlrOutputPowerV1Mode) String() string {
|
||||
return e.Name() + "=" + e.Value()
|
||||
}
|
||||
|
||||
type ZwlrOutputPowerV1Error uint32
|
||||
|
||||
// ZwlrOutputPowerV1Error :
|
||||
const (
|
||||
// ZwlrOutputPowerV1ErrorInvalidMode : nonexistent power save mode
|
||||
ZwlrOutputPowerV1ErrorInvalidMode ZwlrOutputPowerV1Error = 1
|
||||
)
|
||||
|
||||
func (e ZwlrOutputPowerV1Error) Name() string {
|
||||
switch e {
|
||||
case ZwlrOutputPowerV1ErrorInvalidMode:
|
||||
return "invalid_mode"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (e ZwlrOutputPowerV1Error) Value() string {
|
||||
switch e {
|
||||
case ZwlrOutputPowerV1ErrorInvalidMode:
|
||||
return "1"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (e ZwlrOutputPowerV1Error) String() string {
|
||||
return e.Name() + "=" + e.Value()
|
||||
}
|
||||
|
||||
// ZwlrOutputPowerV1ModeEvent : Report a power management mode change
|
||||
//
|
||||
// Report the power management mode change of an output.
|
||||
//
|
||||
// The mode event is sent after an output changed its power
|
||||
// management mode. The reason can be a client using set_mode or the
|
||||
// compositor deciding to change an output's mode.
|
||||
// This event is also sent immediately when the object is created
|
||||
// so the client is informed about the current power management mode.
|
||||
type ZwlrOutputPowerV1ModeEvent struct {
|
||||
Mode uint32
|
||||
}
|
||||
type ZwlrOutputPowerV1ModeHandlerFunc func(ZwlrOutputPowerV1ModeEvent)
|
||||
|
||||
// SetModeHandler : sets handler for ZwlrOutputPowerV1ModeEvent
|
||||
func (i *ZwlrOutputPowerV1) SetModeHandler(f ZwlrOutputPowerV1ModeHandlerFunc) {
|
||||
i.modeHandler = f
|
||||
}
|
||||
|
||||
// ZwlrOutputPowerV1FailedEvent : object no longer valid
|
||||
//
|
||||
// This event indicates that the output power management mode control
|
||||
// is no longer valid. This can happen for a number of reasons,
|
||||
// including:
|
||||
// - The output doesn't support power management
|
||||
// - Another client already has exclusive power management mode control
|
||||
// for this output
|
||||
// - The output disappeared
|
||||
//
|
||||
// Upon receiving this event, the client should destroy this object.
|
||||
type ZwlrOutputPowerV1FailedEvent struct{}
|
||||
type ZwlrOutputPowerV1FailedHandlerFunc func(ZwlrOutputPowerV1FailedEvent)
|
||||
|
||||
// SetFailedHandler : sets handler for ZwlrOutputPowerV1FailedEvent
|
||||
func (i *ZwlrOutputPowerV1) SetFailedHandler(f ZwlrOutputPowerV1FailedHandlerFunc) {
|
||||
i.failedHandler = f
|
||||
}
|
||||
|
||||
func (i *ZwlrOutputPowerV1) Dispatch(opcode uint32, fd int, data []byte) {
|
||||
switch opcode {
|
||||
case 0:
|
||||
if i.modeHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZwlrOutputPowerV1ModeEvent
|
||||
l := 0
|
||||
e.Mode = client.Uint32(data[l : l+4])
|
||||
l += 4
|
||||
|
||||
i.modeHandler(e)
|
||||
case 1:
|
||||
if i.failedHandler == nil {
|
||||
return
|
||||
}
|
||||
var e ZwlrOutputPowerV1FailedEvent
|
||||
|
||||
i.failedHandler(e)
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ I would probably just submit raphi's patchset but I don't think that would be po
|
||||
reset.
|
||||
</description>
|
||||
|
||||
<interface name="zdwl_ipc_manager_v2" version="1">
|
||||
<interface name="zdwl_ipc_manager_v2" version="2">
|
||||
<description summary="manage dwl state">
|
||||
This interface is exposed as a global in wl_registry.
|
||||
|
||||
@@ -60,7 +60,7 @@ I would probably just submit raphi's patchset but I don't think that would be po
|
||||
</event>
|
||||
</interface>
|
||||
|
||||
<interface name="zdwl_ipc_output_v2" version="1">
|
||||
<interface name="zdwl_ipc_output_v2" version="2">
|
||||
<description summary="control dwl output">
|
||||
Observe and control a dwl output.
|
||||
|
||||
@@ -162,5 +162,91 @@ I would probably just submit raphi's patchset but I don't think that would be po
|
||||
<description summary="Set the layout of this output"/>
|
||||
<arg name="index" type="uint" summary="index of a layout recieved by dwl_ipc_manager.layout"/>
|
||||
</request>
|
||||
|
||||
<request name="quit" since="2">
|
||||
<description summary="Quit mango">This request allows clients to instruct the compositor to quit mango.</description>
|
||||
</request>
|
||||
|
||||
<request name="dispatch" since="2">
|
||||
<description summary="Set the active tags of this output"/>
|
||||
<arg name="dispatch" type="string" summary="dispatch name."/>
|
||||
<arg name="arg1" type="string" summary="arg1."/>
|
||||
<arg name="arg2" type="string" summary="arg2."/>
|
||||
<arg name="arg3" type="string" summary="arg3."/>
|
||||
<arg name="arg4" type="string" summary="arg4."/>
|
||||
<arg name="arg5" type="string" summary="arg5."/>
|
||||
</request>
|
||||
|
||||
<!-- Version 2 -->
|
||||
<event name="fullscreen" since="2">
|
||||
<description summary="Update fullscreen status">
|
||||
Indicates if the selected client on this output is fullscreen.
|
||||
</description>
|
||||
<arg name="is_fullscreen" type="uint" summary="If the selected client is fullscreen. Nonzero is valid, zero invalid"/>
|
||||
</event>
|
||||
|
||||
<event name="floating" since="2">
|
||||
<description summary="Update the floating status">
|
||||
Indicates if the selected client on this output is floating.
|
||||
</description>
|
||||
<arg name="is_floating" type="uint" summary="If the selected client is floating. Nonzero is valid, zero invalid"/>
|
||||
</event>
|
||||
|
||||
<event name="x" since="2">
|
||||
<description summary="Update the x coordinates">
|
||||
Indicates if x coordinates of the selected client.
|
||||
</description>
|
||||
<arg name="x" type="int" summary="x coordinate of the selected client"/>
|
||||
</event>
|
||||
|
||||
<event name="y" since="2">
|
||||
<description summary="Update the y coordinates">
|
||||
Indicates if y coordinates of the selected client.
|
||||
</description>
|
||||
<arg name="y" type="int" summary="y coordinate of the selected client"/>
|
||||
</event>
|
||||
|
||||
<event name="width" since="2">
|
||||
<description summary="Update the width">
|
||||
Indicates if width of the selected client.
|
||||
</description>
|
||||
<arg name="width" type="int" summary="width of the selected client"/>
|
||||
</event>
|
||||
|
||||
<event name="height" since="2">
|
||||
<description summary="Update the height">
|
||||
Indicates if height of the selected client.
|
||||
</description>
|
||||
<arg name="height" type="int" summary="height of the selected client"/>
|
||||
</event>
|
||||
|
||||
<event name="last_layer" since="2">
|
||||
<description summary="last map layer.">
|
||||
last map layer.
|
||||
</description>
|
||||
<arg name="last_layer" type="string" summary="last map layer."/>
|
||||
</event>
|
||||
|
||||
<event name="kb_layout" since="2">
|
||||
<description summary="current keyboard layout.">
|
||||
current keyboard layout.
|
||||
</description>
|
||||
<arg name="kb_layout" type="string" summary="current keyboard layout."/>
|
||||
</event>
|
||||
|
||||
<event name="keymode" since="2">
|
||||
<description summary="current keybind mode.">
|
||||
current keybind mode.
|
||||
</description>
|
||||
<arg name="keymode" type="string" summary="current keybind mode."/>
|
||||
</event>
|
||||
|
||||
<event name="scalefactor" since="2">
|
||||
<description summary="scale factor of monitor.">
|
||||
scale factor of monitor.
|
||||
</description>
|
||||
<arg name="scalefactor" type="uint" summary="scale factor of monitor."/>
|
||||
</event>
|
||||
|
||||
</interface>
|
||||
</protocol>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<protocol name="wlr_output_power_management_unstable_v1">
|
||||
<copyright>
|
||||
Copyright © 2019 Purism SPC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice (including the next
|
||||
paragraph) shall be included in all copies or substantial portions of the
|
||||
Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
</copyright>
|
||||
|
||||
<description summary="Control power management modes of outputs">
|
||||
This protocol allows clients to control power management modes
|
||||
of outputs that are currently part of the compositor space. The
|
||||
intent is to allow special clients like desktop shells to power
|
||||
down outputs when the system is idle.
|
||||
|
||||
To modify outputs not currently part of the compositor space see
|
||||
wlr-output-management.
|
||||
|
||||
Warning! The protocol described in this file is experimental and
|
||||
backward incompatible changes may be made. Backward compatible changes
|
||||
may be added together with the corresponding interface version bump.
|
||||
Backward incompatible changes are done by bumping the version number in
|
||||
the protocol and interface names and resetting the interface version.
|
||||
Once the protocol is to be declared stable, the 'z' prefix and the
|
||||
version number in the protocol and interface names are removed and the
|
||||
interface version number is reset.
|
||||
</description>
|
||||
|
||||
<interface name="zwlr_output_power_manager_v1" version="1">
|
||||
<description summary="manager to create per-output power management">
|
||||
This interface is a manager that allows creating per-output power
|
||||
management mode controls.
|
||||
</description>
|
||||
|
||||
<request name="get_output_power">
|
||||
<description summary="get a power management for an output">
|
||||
Create an output power management mode control that can be used to
|
||||
adjust the power management mode for a given output.
|
||||
</description>
|
||||
<arg name="id" type="new_id" interface="zwlr_output_power_v1"/>
|
||||
<arg name="output" type="object" interface="wl_output"/>
|
||||
</request>
|
||||
|
||||
<request name="destroy" type="destructor">
|
||||
<description summary="destroy the manager">
|
||||
All objects created by the manager will still remain valid, until their
|
||||
appropriate destroy request has been called.
|
||||
</description>
|
||||
</request>
|
||||
</interface>
|
||||
|
||||
<interface name="zwlr_output_power_v1" version="1">
|
||||
<description summary="adjust power management mode for an output">
|
||||
This object offers requests to set the power management mode of
|
||||
an output.
|
||||
</description>
|
||||
|
||||
<enum name="mode">
|
||||
<entry name="off" value="0"
|
||||
summary="Output is turned off."/>
|
||||
<entry name="on" value="1"
|
||||
summary="Output is turned on, no power saving"/>
|
||||
</enum>
|
||||
|
||||
<enum name="error">
|
||||
<entry name="invalid_mode" value="1" summary="nonexistent power save mode"/>
|
||||
</enum>
|
||||
|
||||
<request name="set_mode">
|
||||
<description summary="Set an outputs power save mode">
|
||||
Set an output's power save mode to the given mode. The mode change
|
||||
is effective immediately. If the output does not support the given
|
||||
mode a failed event is sent.
|
||||
</description>
|
||||
<arg name="mode" type="uint" enum="mode" summary="the power save mode to set"/>
|
||||
</request>
|
||||
|
||||
<event name="mode">
|
||||
<description summary="Report a power management mode change">
|
||||
Report the power management mode change of an output.
|
||||
|
||||
The mode event is sent after an output changed its power
|
||||
management mode. The reason can be a client using set_mode or the
|
||||
compositor deciding to change an output's mode.
|
||||
This event is also sent immediately when the object is created
|
||||
so the client is informed about the current power management mode.
|
||||
</description>
|
||||
<arg name="mode" type="uint" enum="mode"
|
||||
summary="the output's new power management mode"/>
|
||||
</event>
|
||||
|
||||
<event name="failed">
|
||||
<description summary="object no longer valid">
|
||||
This event indicates that the output power management mode control
|
||||
is no longer valid. This can happen for a number of reasons,
|
||||
including:
|
||||
- The output doesn't support power management
|
||||
- Another client already has exclusive power management mode control
|
||||
for this output
|
||||
- The output disappeared
|
||||
|
||||
Upon receiving this event, the client should destroy this object.
|
||||
</description>
|
||||
</event>
|
||||
|
||||
<request name="destroy" type="destructor">
|
||||
<description summary="destroy this power management">
|
||||
Destroys the output power management mode control object.
|
||||
</description>
|
||||
</request>
|
||||
</interface>
|
||||
</protocol>
|
||||
@@ -165,12 +165,11 @@ func (a *BluezAgent) DisplayPasskey(device dbus.ObjectPath, passkey uint32, ente
|
||||
log.Infof("[BluezAgent] DisplayPasskey: device=%s, passkey=%06d, entered=%d", device, passkey, entered)
|
||||
|
||||
if entered == 0 {
|
||||
pk := passkey
|
||||
_, err := a.promptFor(device, "display-passkey", []string{}, nil)
|
||||
passkeyStr := strconv.FormatUint(uint64(passkey), 10)
|
||||
_, err := a.promptFor(device, "display-passkey", []string{}, &passkeyStr)
|
||||
if err != nil {
|
||||
log.Warnf("[BluezAgent] DisplayPasskey acknowledgment failed: %v", err)
|
||||
}
|
||||
_ = pk
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -179,7 +178,8 @@ func (a *BluezAgent) DisplayPasskey(device dbus.ObjectPath, passkey uint32, ente
|
||||
func (a *BluezAgent) RequestConfirmation(device dbus.ObjectPath, passkey uint32) *dbus.Error {
|
||||
log.Infof("[BluezAgent] RequestConfirmation: device=%s, passkey=%06d", device, passkey)
|
||||
|
||||
secrets, err := a.promptFor(device, "confirm", []string{"decision"}, nil)
|
||||
passkeyStr := strconv.FormatUint(uint64(passkey), 10)
|
||||
secrets, err := a.promptFor(device, "confirm", []string{"decision"}, &passkeyStr)
|
||||
if err != nil {
|
||||
log.Warnf("[BluezAgent] RequestConfirmation failed: %v", err)
|
||||
return a.errorFrom(err)
|
||||
|
||||
@@ -354,21 +354,25 @@ func (m *Manager) handleDevicePropertiesChanged(path dbus.ObjectPath, changed ma
|
||||
_, hasTrusted := changed["Trusted"]
|
||||
|
||||
if hasPaired {
|
||||
if paired, ok := pairedVar.Value().(bool); ok && paired {
|
||||
devicePath := string(path)
|
||||
_, wasPending := m.pendingPairings.LoadAndDelete(devicePath)
|
||||
devicePath := string(path)
|
||||
if paired, ok := pairedVar.Value().(bool); ok {
|
||||
if paired {
|
||||
_, wasPending := m.pendingPairings.LoadAndDelete(devicePath)
|
||||
|
||||
if wasPending {
|
||||
select {
|
||||
case m.eventQueue <- func() {
|
||||
time.Sleep(300 * time.Millisecond)
|
||||
log.Infof("[Bluetooth] Auto-connecting newly paired device: %s", devicePath)
|
||||
if err := m.ConnectDevice(devicePath); err != nil {
|
||||
log.Warnf("[Bluetooth] Auto-connect failed: %v", err)
|
||||
if wasPending {
|
||||
select {
|
||||
case m.eventQueue <- func() {
|
||||
time.Sleep(300 * time.Millisecond)
|
||||
log.Infof("[Bluetooth] Auto-connecting newly paired device: %s", devicePath)
|
||||
if err := m.ConnectDevice(devicePath); err != nil {
|
||||
log.Warnf("[Bluetooth] Auto-connect failed: %v", err)
|
||||
}
|
||||
}:
|
||||
default:
|
||||
}
|
||||
}:
|
||||
default:
|
||||
}
|
||||
} else {
|
||||
m.pendingPairings.Delete(devicePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,8 +100,8 @@ func (m *Manager) setupRegistry() error {
|
||||
log.Infof("DWL: found %s", dwl_ipc.ZdwlIpcManagerV2InterfaceName)
|
||||
manager := dwl_ipc.NewZdwlIpcManagerV2(m.ctx)
|
||||
version := e.Version
|
||||
if version > 1 {
|
||||
version = 1
|
||||
if version > 2 {
|
||||
version = 2
|
||||
}
|
||||
if err := registry.Bind(e.Name, e.Interface, version, manager); err == nil {
|
||||
dwlMgr = manager
|
||||
@@ -282,6 +282,14 @@ func (m *Manager) setupOutput(manager *dwl_ipc.ZdwlIpcManagerV2, output *wlclien
|
||||
outState.layoutSymbol = e.Layout
|
||||
})
|
||||
|
||||
ipcOutput.SetKbLayoutHandler(func(e dwl_ipc.ZdwlIpcOutputV2KbLayoutEvent) {
|
||||
outState.kbLayout = e.KbLayout
|
||||
})
|
||||
|
||||
ipcOutput.SetKeymodeHandler(func(e dwl_ipc.ZdwlIpcOutputV2KeymodeEvent) {
|
||||
outState.keymode = e.Keymode
|
||||
})
|
||||
|
||||
ipcOutput.SetFrameHandler(func(e dwl_ipc.ZdwlIpcOutputV2FrameEvent) {
|
||||
m.updateState()
|
||||
})
|
||||
@@ -310,6 +318,8 @@ func (m *Manager) updateState() {
|
||||
LayoutSymbol: out.layoutSymbol,
|
||||
Title: out.title,
|
||||
AppID: out.appID,
|
||||
KbLayout: out.kbLayout,
|
||||
Keymode: out.keymode,
|
||||
}
|
||||
|
||||
if out.active != 0 {
|
||||
|
||||
@@ -22,6 +22,8 @@ type OutputState struct {
|
||||
LayoutSymbol string `json:"layoutSymbol"`
|
||||
Title string `json:"title"`
|
||||
AppID string `json:"appId"`
|
||||
KbLayout string `json:"kbLayout"`
|
||||
Keymode string `json:"keymode"`
|
||||
}
|
||||
|
||||
type State struct {
|
||||
@@ -73,6 +75,8 @@ type outputState struct {
|
||||
layoutSymbol string
|
||||
title string
|
||||
appID string
|
||||
kbLayout string
|
||||
keymode string
|
||||
}
|
||||
|
||||
func (m *Manager) GetState() State {
|
||||
@@ -147,6 +151,12 @@ func stateChanged(old, new *State) bool {
|
||||
if oldOut.AppID != newOut.AppID {
|
||||
return true
|
||||
}
|
||||
if oldOut.KbLayout != newOut.KbLayout {
|
||||
return true
|
||||
}
|
||||
if oldOut.Keymode != newOut.Keymode {
|
||||
return true
|
||||
}
|
||||
if len(oldOut.Tags) != len(newOut.Tags) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -9,6 +9,35 @@ import (
|
||||
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
|
||||
)
|
||||
|
||||
func CheckCapability() bool {
|
||||
display, err := wlclient.Connect("")
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
defer display.Destroy()
|
||||
|
||||
registry, err := display.GetRegistry()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
defer registry.Destroy()
|
||||
|
||||
found := false
|
||||
|
||||
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
|
||||
if e.Interface == ext_workspace.ExtWorkspaceManagerV1InterfaceName {
|
||||
found = true
|
||||
}
|
||||
})
|
||||
|
||||
// Roundtrip to ensure all registry events are processed
|
||||
if err := display.Roundtrip(); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return found
|
||||
}
|
||||
|
||||
func NewManager(display *wlclient.Display) (*Manager, error) {
|
||||
m := &Manager{
|
||||
display: display,
|
||||
@@ -75,6 +104,9 @@ func (m *Manager) setupRegistry() error {
|
||||
output.SetNameHandler(func(ev wlclient.OutputNameEvent) {
|
||||
m.outputNames.Store(outputID, ev.Name)
|
||||
log.Debugf("ExtWorkspace: Output %d (%s) name received", outputID, ev.Name)
|
||||
m.post(func() {
|
||||
m.updateState()
|
||||
})
|
||||
})
|
||||
}
|
||||
return
|
||||
@@ -295,14 +327,8 @@ func (m *Manager) updateState() {
|
||||
|
||||
outputs := make([]string, 0)
|
||||
for outputID := range group.outputIDs {
|
||||
if name, ok := m.outputNames.Load(outputID); ok {
|
||||
if name != "" {
|
||||
outputs = append(outputs, name)
|
||||
} else {
|
||||
outputs = append(outputs, fmt.Sprintf("output-%d", outputID))
|
||||
}
|
||||
} else {
|
||||
outputs = append(outputs, fmt.Sprintf("output-%d", outputID))
|
||||
if name, ok := m.outputNames.Load(outputID); ok && name != "" {
|
||||
outputs = append(outputs, name)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/errdefs"
|
||||
@@ -125,8 +126,9 @@ func (a *SecretAgent) GetSecrets(
|
||||
connType, displayName, vpnSvc := readConnTypeAndName(conn)
|
||||
ssid := readSSID(conn)
|
||||
fields := fieldsNeeded(settingName, hints)
|
||||
vpnPasswordFlags := readVPNPasswordFlags(conn, settingName)
|
||||
|
||||
log.Infof("[SecretAgent] connType=%s, name=%s, vpnSvc=%s, fields=%v, flags=%d", connType, displayName, vpnSvc, fields, flags)
|
||||
log.Infof("[SecretAgent] connType=%s, name=%s, vpnSvc=%s, fields=%v, flags=%d, vpnPasswordFlags=%d", connType, displayName, vpnSvc, fields, flags, vpnPasswordFlags)
|
||||
|
||||
if a.backend != nil {
|
||||
a.backend.stateMutex.RLock()
|
||||
@@ -163,57 +165,70 @@ func (a *SecretAgent) GetSecrets(
|
||||
}
|
||||
|
||||
if len(fields) == 0 {
|
||||
// For VPN connections with no hints, we can't provide a proper UI.
|
||||
// Defer to other agents (like nm-applet or VPN-specific auth dialogs)
|
||||
// that can handle the VPN type properly (e.g., OpenConnect with SAML, etc.)
|
||||
if settingName == "vpn" {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
if a.backend != nil {
|
||||
a.backend.stateMutex.RLock()
|
||||
isConnectingVPN := a.backend.state.IsConnectingVPN
|
||||
a.backend.stateMutex.RUnlock()
|
||||
|
||||
const (
|
||||
NM_SETTING_SECRET_FLAG_NONE = 0
|
||||
NM_SETTING_SECRET_FLAG_AGENT_OWNED = 1
|
||||
NM_SETTING_SECRET_FLAG_NOT_SAVED = 2
|
||||
NM_SETTING_SECRET_FLAG_NOT_REQUIRED = 4
|
||||
)
|
||||
if !isConnectingVPN {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
|
||||
var passwordFlags uint32 = 0xFFFF
|
||||
switch settingName {
|
||||
case "802-11-wireless-security":
|
||||
if wifiSecSettings, ok := conn["802-11-wireless-security"]; ok {
|
||||
if flagsVariant, ok := wifiSecSettings["psk-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
case "802-1x":
|
||||
if dot1xSettings, ok := conn["802-1x"]; ok {
|
||||
if flagsVariant, ok := dot1xSettings["password-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
log.Infof("[SecretAgent] VPN with empty hints but we're connecting - prompting for password")
|
||||
fields = []string{"password"}
|
||||
} else {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
}
|
||||
|
||||
if passwordFlags == 0xFFFF {
|
||||
log.Warnf("[SecretAgent] Could not determine password-flags for empty hints - returning NoSecrets error")
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_NOT_REQUIRED != 0 {
|
||||
log.Infof("[SecretAgent] Secrets not required (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_AGENT_OWNED != 0 {
|
||||
log.Warnf("[SecretAgent] Secrets are agent-owned but we don't store secrets (flags=%d) - returning NoSecrets error", passwordFlags)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else {
|
||||
log.Infof("[SecretAgent] No secrets needed, using system stored secrets (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
if len(fields) == 0 {
|
||||
const (
|
||||
NM_SETTING_SECRET_FLAG_NONE = 0
|
||||
NM_SETTING_SECRET_FLAG_AGENT_OWNED = 1
|
||||
NM_SETTING_SECRET_FLAG_NOT_SAVED = 2
|
||||
NM_SETTING_SECRET_FLAG_NOT_REQUIRED = 4
|
||||
)
|
||||
|
||||
var passwordFlags uint32 = 0xFFFF
|
||||
switch settingName {
|
||||
case "802-11-wireless-security":
|
||||
if wifiSecSettings, ok := conn["802-11-wireless-security"]; ok {
|
||||
if flagsVariant, ok := wifiSecSettings["psk-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
case "802-1x":
|
||||
if dot1xSettings, ok := conn["802-1x"]; ok {
|
||||
if flagsVariant, ok := dot1xSettings["password-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if passwordFlags == 0xFFFF {
|
||||
log.Warnf("[SecretAgent] Could not determine password-flags for empty hints - returning NoSecrets error")
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_NOT_REQUIRED != 0 {
|
||||
log.Infof("[SecretAgent] Secrets not required (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_AGENT_OWNED != 0 {
|
||||
log.Warnf("[SecretAgent] Secrets are agent-owned but we don't store secrets (flags=%d) - returning NoSecrets error", passwordFlags)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else {
|
||||
log.Infof("[SecretAgent] No secrets needed, using system stored secrets (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -343,13 +358,11 @@ func (a *SecretAgent) GetSecrets(
|
||||
// Update settings based on type
|
||||
switch settingName {
|
||||
case "vpn":
|
||||
// Set password-flags=0 and add secrets to vpn section
|
||||
vpn, ok := existingSettings["vpn"]
|
||||
if !ok {
|
||||
vpn = make(map[string]dbus.Variant)
|
||||
}
|
||||
|
||||
// Get existing data map (vpn.data is string->string)
|
||||
var data map[string]string
|
||||
if dataVariant, ok := vpn["data"]; ok {
|
||||
if dm, ok := dataVariant.Value().(map[string]string); ok {
|
||||
@@ -364,11 +377,9 @@ func (a *SecretAgent) GetSecrets(
|
||||
data = make(map[string]string)
|
||||
}
|
||||
|
||||
// Update password-flags to 0 (system-stored)
|
||||
data["password-flags"] = "0"
|
||||
vpn["data"] = dbus.MakeVariant(data)
|
||||
|
||||
// Add secrets (vpn.secrets is string->string)
|
||||
secs := make(map[string]string)
|
||||
for k, v := range reply.Secrets {
|
||||
secs[k] = v
|
||||
@@ -379,14 +390,12 @@ func (a *SecretAgent) GetSecrets(
|
||||
log.Infof("[SecretAgent] Updated VPN settings: password-flags=0, secrets with %d fields", len(secs))
|
||||
|
||||
case "802-11-wireless-security":
|
||||
// Set psk-flags=0 for WiFi
|
||||
wifiSec, ok := existingSettings["802-11-wireless-security"]
|
||||
if !ok {
|
||||
wifiSec = make(map[string]dbus.Variant)
|
||||
}
|
||||
wifiSec["psk-flags"] = dbus.MakeVariant(uint32(0))
|
||||
|
||||
// Add PSK secret
|
||||
if psk, ok := reply.Secrets["psk"]; ok {
|
||||
wifiSec["psk"] = dbus.MakeVariant(psk)
|
||||
log.Infof("[SecretAgent] Updated WiFi settings: psk-flags=0")
|
||||
@@ -394,14 +403,12 @@ func (a *SecretAgent) GetSecrets(
|
||||
settings["802-11-wireless-security"] = wifiSec
|
||||
|
||||
case "802-1x":
|
||||
// Set password-flags=0 for 802.1x
|
||||
dot1x, ok := existingSettings["802-1x"]
|
||||
if !ok {
|
||||
dot1x = make(map[string]dbus.Variant)
|
||||
}
|
||||
dot1x["password-flags"] = dbus.MakeVariant(uint32(0))
|
||||
|
||||
// Add password secret
|
||||
if password, ok := reply.Secrets["password"]; ok {
|
||||
dot1x["password"] = dbus.MakeVariant(password)
|
||||
log.Infof("[SecretAgent] Updated 802.1x settings: password-flags=0")
|
||||
@@ -507,6 +514,39 @@ func fieldsNeeded(setting string, hints []string) []string {
|
||||
}
|
||||
}
|
||||
|
||||
func readVPNPasswordFlags(conn map[string]nmVariantMap, settingName string) uint32 {
|
||||
if settingName != "vpn" {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
vpnSettings, ok := conn["vpn"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
dataVariant, ok := vpnSettings["data"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
dataMap, ok := dataVariant.Value().(map[string]string)
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
flagsStr, ok := dataMap["password-flags"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
flags64, err := strconv.ParseUint(flagsStr, 10, 32)
|
||||
if err != nil {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
return uint32(flags64)
|
||||
}
|
||||
|
||||
func reasonFromFlags(flags uint32) string {
|
||||
const (
|
||||
NM_SECRET_AGENT_GET_SECRETS_FLAG_NONE = 0x0
|
||||
|
||||
@@ -235,7 +235,7 @@ func (b *NetworkManagerBackend) ConnectVPN(uuidOrName string, singleActive bool)
|
||||
}
|
||||
|
||||
nm := b.nmConn.(gonetworkmanager.NetworkManager)
|
||||
activeConn, err := nm.ActivateConnection(targetConn, nil, nil)
|
||||
_, err = nm.ActivateConnection(targetConn, nil, nil)
|
||||
if err != nil {
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnectingVPN = false
|
||||
@@ -249,20 +249,6 @@ func (b *NetworkManagerBackend) ConnectVPN(uuidOrName string, singleActive bool)
|
||||
return fmt.Errorf("failed to activate VPN: %w", err)
|
||||
}
|
||||
|
||||
if activeConn != nil {
|
||||
state, _ := activeConn.GetPropertyState()
|
||||
if state == 2 {
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnectingVPN = false
|
||||
b.state.ConnectingVPNUUID = ""
|
||||
b.stateMutex.Unlock()
|
||||
b.ListActiveVPN()
|
||||
if b.onStateChange != nil {
|
||||
b.onStateChange()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -140,8 +140,20 @@ func RouteRequest(conn net.Conn, req models.Request) {
|
||||
|
||||
if strings.HasPrefix(req.Method, "extworkspace.") {
|
||||
if extWorkspaceManager == nil {
|
||||
models.RespondError(conn, req.ID, "extworkspace manager not initialized")
|
||||
return
|
||||
if extWorkspaceAvailable.Load() {
|
||||
extWorkspaceInitMutex.Lock()
|
||||
if extWorkspaceManager == nil {
|
||||
if err := InitializeExtWorkspaceManager(); err != nil {
|
||||
extWorkspaceInitMutex.Unlock()
|
||||
models.RespondError(conn, req.ID, "extworkspace manager not available")
|
||||
return
|
||||
}
|
||||
}
|
||||
extWorkspaceInitMutex.Unlock()
|
||||
} else {
|
||||
models.RespondError(conn, req.ID, "extworkspace manager not initialized")
|
||||
return
|
||||
}
|
||||
}
|
||||
extWorkspaceReq := extworkspace.Request{
|
||||
ID: req.ID,
|
||||
|
||||
@@ -31,7 +31,7 @@ import (
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/pkg/syncmap"
|
||||
)
|
||||
|
||||
const APIVersion = 18
|
||||
const APIVersion = 19
|
||||
|
||||
type Capabilities struct {
|
||||
Capabilities []string `json:"capabilities"`
|
||||
@@ -63,6 +63,8 @@ var wlContext *wlcontext.SharedContext
|
||||
var capabilitySubscribers syncmap.Map[string, chan ServerInfo]
|
||||
var cupsSubscribers syncmap.Map[string, bool]
|
||||
var cupsSubscriberCount atomic.Int32
|
||||
var extWorkspaceAvailable atomic.Bool
|
||||
var extWorkspaceInitMutex sync.Mutex
|
||||
|
||||
func getSocketDir() string {
|
||||
if runtime := os.Getenv("XDG_RUNTIME_DIR"); runtime != "" {
|
||||
@@ -361,7 +363,7 @@ func getCapabilities() Capabilities {
|
||||
caps = append(caps, "dwl")
|
||||
}
|
||||
|
||||
if extWorkspaceManager != nil {
|
||||
if extWorkspaceAvailable.Load() {
|
||||
caps = append(caps, "extworkspace")
|
||||
}
|
||||
|
||||
@@ -411,7 +413,7 @@ func getServerInfo() ServerInfo {
|
||||
caps = append(caps, "dwl")
|
||||
}
|
||||
|
||||
if extWorkspaceManager != nil {
|
||||
if extWorkspaceAvailable.Load() {
|
||||
caps = append(caps, "extworkspace")
|
||||
}
|
||||
|
||||
@@ -810,12 +812,14 @@ func handleSubscribe(conn net.Conn, req models.Request) {
|
||||
}
|
||||
|
||||
if shouldSubscribe("extworkspace") {
|
||||
if extWorkspaceManager == nil {
|
||||
if err := InitializeExtWorkspaceManager(); err != nil {
|
||||
log.Warnf("Failed to initialize ExtWorkspace manager for subscription: %v", err)
|
||||
} else {
|
||||
notifyCapabilityChange()
|
||||
if extWorkspaceManager == nil && extWorkspaceAvailable.Load() {
|
||||
extWorkspaceInitMutex.Lock()
|
||||
if extWorkspaceManager == nil {
|
||||
if err := InitializeExtWorkspaceManager(); err != nil {
|
||||
log.Warnf("Failed to initialize ExtWorkspace manager for subscription: %v", err)
|
||||
}
|
||||
}
|
||||
extWorkspaceInitMutex.Unlock()
|
||||
}
|
||||
|
||||
if extWorkspaceManager != nil {
|
||||
@@ -1141,11 +1145,18 @@ func Start(printDocs bool) error {
|
||||
log.Info(" cups.cancelJob - Cancel job (params: printerName, jobID)")
|
||||
log.Info(" cups.purgeJobs - Cancel all jobs (params: printerName)")
|
||||
log.Info("DWL:")
|
||||
log.Info(" dwl.getState - Get current dwl state (tags, windows, layouts)")
|
||||
log.Info(" dwl.getState - Get current dwl state (tags, windows, layouts, keyboard)")
|
||||
log.Info(" dwl.setTags - Set active tags (params: output, tagmask, toggleTagset)")
|
||||
log.Info(" dwl.setClientTags - Set focused client tags (params: output, andTags, xorTags)")
|
||||
log.Info(" dwl.setLayout - Set layout (params: output, index)")
|
||||
log.Info(" dwl.subscribe - Subscribe to dwl state changes (streaming)")
|
||||
log.Info(" Output state includes:")
|
||||
log.Info(" - tags : Tag states (active, clients, focused)")
|
||||
log.Info(" - layoutSymbol : Current layout name")
|
||||
log.Info(" - title : Focused window title")
|
||||
log.Info(" - appId : Focused window app ID")
|
||||
log.Info(" - kbLayout : Current keyboard layout")
|
||||
log.Info(" - keymode : Current keybind mode")
|
||||
log.Info("ExtWorkspace:")
|
||||
log.Info(" extworkspace.getState - Get current workspace state (groups, workspaces)")
|
||||
log.Info(" extworkspace.activateWorkspace - Activate workspace (params: groupID, workspaceID)")
|
||||
@@ -1241,6 +1252,14 @@ func Start(printDocs bool) error {
|
||||
log.Debugf("DWL manager unavailable: %v", err)
|
||||
}
|
||||
|
||||
if extworkspace.CheckCapability() {
|
||||
extWorkspaceAvailable.Store(true)
|
||||
log.Info("ExtWorkspace capability detected and will be available on subscription")
|
||||
} else {
|
||||
log.Debug("ExtWorkspace capability not available")
|
||||
extWorkspaceAvailable.Store(false)
|
||||
}
|
||||
|
||||
if err := InitializeWlrOutputManager(); err != nil {
|
||||
log.Debugf("WlrOutput manager unavailable: %v", err)
|
||||
}
|
||||
|
||||
@@ -607,41 +607,6 @@ func (m *Manager) transitionWorker() {
|
||||
|
||||
if finalTarget == targetTemp {
|
||||
log.Debugf("Transition complete: now at %dK", targetTemp)
|
||||
|
||||
m.configMutex.RLock()
|
||||
enabled := m.config.Enabled
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
if !enabled && targetTemp == identityTemp && m.controlsInitialized {
|
||||
m.post(func() {
|
||||
log.Info("Destroying gamma controls after transition to identity")
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
return true
|
||||
})
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
if _, err := m.display.Sync(); err != nil {
|
||||
log.Warnf("Failed to sync Wayland display after destroying controls: %v", err)
|
||||
}
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1262,46 +1227,33 @@ func (m *Manager) SetEnabled(enabled bool) {
|
||||
}
|
||||
} else {
|
||||
if m.controlsInitialized {
|
||||
m.configMutex.RLock()
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
m.transitionMutex.RLock()
|
||||
currentTemp := m.currentTemp
|
||||
m.transitionMutex.RUnlock()
|
||||
|
||||
if currentTemp == identityTemp {
|
||||
m.post(func() {
|
||||
log.Infof("Already at %dK, destroying gamma controls immediately", identityTemp)
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
return true
|
||||
})
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
if _, err := m.display.Sync(); err != nil {
|
||||
log.Warnf("Failed to sync Wayland display after destroying controls: %v", err)
|
||||
m.post(func() {
|
||||
log.Info("Disabling gamma, destroying controls immediately")
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
return true
|
||||
})
|
||||
} else {
|
||||
log.Infof("Disabling: transitioning to %dK before destroying controls", identityTemp)
|
||||
m.startTransition(identityTemp)
|
||||
}
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.configMutex.RLock()
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
24
distro/debian/dms-git/_service
Normal file
24
distro/debian/dms-git/_service
Normal file
@@ -0,0 +1,24 @@
|
||||
<services>
|
||||
<!-- Pull full git repository for master branch -->
|
||||
<service name="tar_scm" mode="disabled">
|
||||
<param name="scm">git</param>
|
||||
<param name="url">https://github.com/AvengeMedia/DankMaterialShell.git</param>
|
||||
<param name="revision">master</param>
|
||||
<param name="filename">dms-git-source</param>
|
||||
</service>
|
||||
<service name="recompress" mode="disabled">
|
||||
<param name="file">*.tar</param>
|
||||
<param name="compression">gz</param>
|
||||
</service>
|
||||
<!-- Download pre-built binaries (fallback for Debian 13 with Go 1.22) -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
8
distro/debian/dms-git/debian/changelog
Normal file
8
distro/debian/dms-git/debian/changelog
Normal file
@@ -0,0 +1,8 @@
|
||||
dms-git (0.6.2+git) nightly; urgency=medium
|
||||
|
||||
* Build dms binary from source for true git version strings
|
||||
* Match Fedora COPR git build behavior
|
||||
* Now shows proper git version (e.g., v0.6.2-11-g12e91534)
|
||||
* Add golang-go and make as build dependencies
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Fri, 22 Nov 2025 00:00:00 -0500
|
||||
50
distro/debian/dms-git/debian/control
Normal file
50
distro/debian/dms-git/debian/control
Normal file
@@ -0,0 +1,50 @@
|
||||
Source: dms-git
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms-git
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Replaces: dms
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell (git nightly)
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
This is the nightly/git version built from the latest master branch.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/debian/dms-git/debian/copyright
Normal file
27
distro/debian/dms-git/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/debian/dms-git/debian/files
Normal file
1
distro/debian/dms-git/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms-git_0.6.0+git2061.5ddea836ppa1_source.buildinfo x11 optional
|
||||
54
distro/debian/dms-git/debian/rules
Executable file
54
distro/debian/dms-git/debian/rules
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
if [ "$(DEB_HOST_ARCH)" = "amd64" ]; then \
|
||||
if [ -f dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
elif [ "$(DEB_HOST_ARCH)" = "arm64" ]; then \
|
||||
if [ -f dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-arm64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
else \
|
||||
echo "Unsupported architecture: $(DEB_HOST_ARCH)" && exit 1; \
|
||||
fi
|
||||
chmod +x dms
|
||||
|
||||
override_dh_auto_install:
|
||||
install -Dm755 dms debian/dms-git/usr/bin/dms
|
||||
|
||||
mkdir -p debian/dms-git/usr/share/quickshell/dms debian/dms-git/usr/lib/systemd/user
|
||||
if [ -d quickshell ]; then \
|
||||
cp -r quickshell/* debian/dms-git/usr/share/quickshell/dms/; \
|
||||
install -Dm644 quickshell/assets/systemd/dms.service debian/dms-git/usr/lib/systemd/user/dms.service; \
|
||||
elif [ -d dms-git-source/quickshell ]; then \
|
||||
cp -r dms-git-source/quickshell/* debian/dms-git/usr/share/quickshell/dms/; \
|
||||
install -Dm644 dms-git-source/quickshell/assets/systemd/dms.service debian/dms-git/usr/lib/systemd/user/dms.service; \
|
||||
else \
|
||||
echo "ERROR: quickshell directory not found (checked root and dms-git-source/)!" && \
|
||||
echo "Contents of current directory:" && ls -la && \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/core \
|
||||
debian/dms-git/usr/share/quickshell/dms/distro
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
[ ! -d dms-git-source ] || rm -rf dms-git-source
|
||||
dh_auto_clean
|
||||
1
distro/debian/dms-git/debian/source/format
Normal file
1
distro/debian/dms-git/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
1
distro/debian/dms-git/debian/source/include-binaries
Normal file
1
distro/debian/dms-git/debian/source/include-binaries
Normal file
@@ -0,0 +1 @@
|
||||
dms-distropkg-amd64.gz
|
||||
4
distro/debian/dms-git/debian/source/options
Normal file
4
distro/debian/dms-git/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-git-repo
|
||||
21
distro/debian/dms/_service
Normal file
21
distro/debian/dms/_service
Normal file
@@ -0,0 +1,21 @@
|
||||
<services>
|
||||
<!-- Download source tarball from GitHub releases -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/archive/refs/tags/v0.6.2.tar.gz</param>
|
||||
<param name="filename">dms-source.tar.gz</param>
|
||||
</service>
|
||||
<!-- Download amd64 binary -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/download/v0.6.2/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<!-- Download arm64 binary -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/download/v0.6.2/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
7
distro/debian/dms/debian/changelog
Normal file
7
distro/debian/dms/debian/changelog
Normal file
@@ -0,0 +1,7 @@
|
||||
dms (0.6.2) stable; urgency=medium
|
||||
|
||||
* Update to v0.6.2 release
|
||||
* Fix binary download paths for OBS builds
|
||||
* Native format: removed revisions
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Tue, 19 Nov 2025 10:00:00 -0500
|
||||
47
distro/debian/dms/debian/control
Normal file
47
distro/debian/dms/debian/control
Normal file
@@ -0,0 +1,47 @@
|
||||
Source: dms
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Conflicts: dms-git
|
||||
Replaces: dms-git
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/debian/dms/debian/copyright
Normal file
27
distro/debian/dms/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/debian/dms/debian/files
Normal file
1
distro/debian/dms/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms_0.6.0ppa2_source.buildinfo x11 optional
|
||||
64
distro/debian/dms/debian/rules
Executable file
64
distro/debian/dms/debian/rules
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
if [ "$(DEB_HOST_ARCH)" = "amd64" ]; then \
|
||||
if [ -f dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
elif [ "$(DEB_HOST_ARCH)" = "arm64" ]; then \
|
||||
if [ -f dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-arm64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
else \
|
||||
echo "Unsupported architecture: $(DEB_HOST_ARCH)" && exit 1; \
|
||||
fi
|
||||
chmod +x dms
|
||||
|
||||
if [ ! -d DankMaterialShell-$(UPSTREAM_VERSION) ]; then \
|
||||
if [ -f ../SOURCES/dms-source.tar.gz ]; then \
|
||||
tar -xzf ../SOURCES/dms-source.tar.gz; \
|
||||
elif [ -f dms-source.tar.gz ]; then \
|
||||
tar -xzf dms-source.tar.gz; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
|
||||
override_dh_auto_install:
|
||||
install -Dm755 dms debian/dms/usr/bin/dms
|
||||
|
||||
mkdir -p debian/dms/usr/share/quickshell/dms debian/dms/usr/lib/systemd/user
|
||||
if [ -d DankMaterialShell-$(UPSTREAM_VERSION) ]; then \
|
||||
cp -r DankMaterialShell-$(UPSTREAM_VERSION)/quickshell/* debian/dms/usr/share/quickshell/dms/; \
|
||||
install -Dm644 DankMaterialShell-$(UPSTREAM_VERSION)/quickshell/assets/systemd/dms.service debian/dms/usr/lib/systemd/user/dms.service; \
|
||||
else \
|
||||
echo "ERROR: DankMaterialShell-$(UPSTREAM_VERSION) directory not found!" && \
|
||||
echo "Contents of current directory:" && ls -la && \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/core \
|
||||
debian/dms/usr/share/quickshell/dms/distro
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
rm -rf DankMaterialShell-$(UPSTREAM_VERSION)
|
||||
dh_auto_clean
|
||||
1
distro/debian/dms/debian/source/format
Normal file
1
distro/debian/dms/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
2
distro/debian/dms/debian/source/include-binaries
Normal file
2
distro/debian/dms/debian/source/include-binaries
Normal file
@@ -0,0 +1,2 @@
|
||||
dms-distropkg-amd64.gz
|
||||
dms-source.tar.gz
|
||||
4
distro/debian/dms/debian/source/options
Normal file
4
distro/debian/dms/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-source.tar.gz
|
||||
33
distro/nix/common.nix
Normal file
33
distro/nix/common.nix
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
dmsPkgs,
|
||||
...
|
||||
}: let
|
||||
cfg = config.programs.dankMaterialShell;
|
||||
in {
|
||||
qmlPath = "${dmsPkgs.dankMaterialShell}/etc/xdg/quickshell/dms";
|
||||
|
||||
packages =
|
||||
[
|
||||
pkgs.material-symbols
|
||||
pkgs.inter
|
||||
pkgs.fira-code
|
||||
|
||||
pkgs.ddcutil
|
||||
pkgs.libsForQt5.qt5ct
|
||||
pkgs.kdePackages.qt6ct
|
||||
|
||||
dmsPkgs.dmsCli
|
||||
]
|
||||
++ lib.optional cfg.enableSystemMonitoring dmsPkgs.dgop
|
||||
++ lib.optionals cfg.enableClipboard [pkgs.cliphist pkgs.wl-clipboard]
|
||||
++ lib.optionals cfg.enableVPN [pkgs.glib pkgs.networkmanager]
|
||||
++ lib.optional cfg.enableBrightnessControl pkgs.brightnessctl
|
||||
++ lib.optional cfg.enableColorPicker pkgs.hyprpicker
|
||||
++ lib.optional cfg.enableDynamicTheming pkgs.matugen
|
||||
++ lib.optional cfg.enableAudioWavelength pkgs.cava
|
||||
++ lib.optional cfg.enableCalendarEvents pkgs.khal
|
||||
++ lib.optional cfg.enableSystemSound pkgs.kdePackages.qtmultimedia;
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
dmsPkgs,
|
||||
...
|
||||
}: let
|
||||
cfg = config.programs.dankMaterialShell;
|
||||
jsonFormat = pkgs.formats.json { };
|
||||
in {
|
||||
imports = [
|
||||
(lib.mkRemovedOptionModule ["programs" "dankMaterialShell" "enableNightMode"] "Night mode is now always available.")
|
||||
(lib.mkRenamedOptionModule ["programs" "dankMaterialShell" "enableSystemd"] ["programs" "dankMaterialShell" "systemd" "enable"])
|
||||
];
|
||||
options.programs.dankMaterialShell = with lib.types; {
|
||||
enable = lib.mkEnableOption "DankMaterialShell";
|
||||
|
||||
systemd = {
|
||||
enable = lib.mkEnableOption "DankMaterialShell systemd startup";
|
||||
restartIfChanged = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Auto-restart dms.service when dankMaterialShell changes";
|
||||
};
|
||||
};
|
||||
enableSystemMonitoring = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use system monitoring widgets";
|
||||
};
|
||||
enableClipboard = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use the clipboard widget";
|
||||
};
|
||||
enableVPN = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use the VPN widget";
|
||||
};
|
||||
enableBrightnessControl = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have brightness/backlight support";
|
||||
};
|
||||
enableColorPicker = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have color picking support";
|
||||
};
|
||||
enableDynamicTheming = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have dynamic theming support";
|
||||
};
|
||||
enableAudioWavelength = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have audio waveleng support";
|
||||
};
|
||||
enableCalendarEvents = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add calendar events support via khal";
|
||||
};
|
||||
enableSystemSound = lib.mkOption {
|
||||
type = bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have system sound support";
|
||||
};
|
||||
quickshell = {
|
||||
package = lib.mkPackageOption pkgs "quickshell" {};
|
||||
};
|
||||
|
||||
default = {
|
||||
settings = lib.mkOption {
|
||||
type = jsonFormat.type;
|
||||
default = { };
|
||||
description = "The default settings are only read if the settings.json file don't exist";
|
||||
};
|
||||
session = lib.mkOption {
|
||||
type = jsonFormat.type;
|
||||
default = { };
|
||||
description = "The default session are only read if the session.json file don't exist";
|
||||
};
|
||||
};
|
||||
|
||||
plugins = lib.mkOption {
|
||||
type = attrsOf (types.submodule ({ config, ... }: {
|
||||
options = {
|
||||
enable = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Whether to link this plugin";
|
||||
};
|
||||
src = lib.mkOption {
|
||||
type = types.path;
|
||||
description = "Source to link to DMS plugins directory";
|
||||
};
|
||||
};
|
||||
}));
|
||||
default = {};
|
||||
description = "DMS Plugins to install";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable
|
||||
{
|
||||
programs.quickshell = {
|
||||
enable = true;
|
||||
package = cfg.quickshell.package;
|
||||
|
||||
configs.dms = "${dmsPkgs.dankMaterialShell}/etc/xdg/quickshell/dms";
|
||||
};
|
||||
|
||||
systemd.user.services.dms = lib.mkIf cfg.systemd.enable {
|
||||
Unit = {
|
||||
Description = "DankMaterialShell";
|
||||
PartOf = [ config.wayland.systemd.target ];
|
||||
After = [ config.wayland.systemd.target ];
|
||||
X-Restart-Triggers = lib.optional cfg.systemd.restartIfChanged config.programs.quickshell.configs.dms;
|
||||
};
|
||||
|
||||
Service = {
|
||||
ExecStart = lib.getExe dmsPkgs.dmsCli + " run --session";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
|
||||
Install.WantedBy = [ config.wayland.systemd.target ];
|
||||
};
|
||||
|
||||
xdg.stateFile."DankMaterialShell/default-session.json" = lib.mkIf (cfg.default.session != { }) {
|
||||
source = jsonFormat.generate "default-session.json" cfg.default.session;
|
||||
};
|
||||
|
||||
xdg.configFile = lib.mkMerge [
|
||||
(lib.mapAttrs' (name: plugin: {
|
||||
name = "DankMaterialShell/plugins/${name}";
|
||||
value.source = plugin.src;
|
||||
}) (lib.filterAttrs (n: v: v.enable) cfg.plugins))
|
||||
{
|
||||
"DankMaterialShell/default-settings.json" = lib.mkIf (cfg.default.settings != { }) {
|
||||
source = jsonFormat.generate "default-settings.json" cfg.default.settings;
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
home.packages =
|
||||
[
|
||||
pkgs.material-symbols
|
||||
pkgs.inter
|
||||
pkgs.fira-code
|
||||
|
||||
pkgs.ddcutil
|
||||
pkgs.libsForQt5.qt5ct
|
||||
pkgs.kdePackages.qt6ct
|
||||
|
||||
dmsPkgs.dmsCli
|
||||
]
|
||||
++ lib.optional cfg.enableSystemMonitoring dmsPkgs.dgop
|
||||
++ lib.optionals cfg.enableClipboard [pkgs.cliphist pkgs.wl-clipboard]
|
||||
++ lib.optionals cfg.enableVPN [pkgs.glib pkgs.networkmanager]
|
||||
++ lib.optional cfg.enableBrightnessControl pkgs.brightnessctl
|
||||
++ lib.optional cfg.enableColorPicker pkgs.hyprpicker
|
||||
++ lib.optional cfg.enableDynamicTheming pkgs.matugen
|
||||
++ lib.optional cfg.enableAudioWavelength pkgs.cava
|
||||
++ lib.optional cfg.enableCalendarEvents pkgs.khal
|
||||
++ lib.optional cfg.enableSystemSound pkgs.kdePackages.qtmultimedia;
|
||||
};
|
||||
}
|
||||
@@ -11,7 +11,7 @@
|
||||
user = config.services.greetd.settings.default_session.user;
|
||||
|
||||
greeterScript = pkgs.writeShellScriptBin "dms-greeter" ''
|
||||
export PATH=$PATH:${lib.makeBinPath [ cfg.quickshell.package config.programs.${cfg.compositor.name}.package ]}
|
||||
export PATH=$PATH:${lib.makeBinPath [cfg.quickshell.package config.programs.${cfg.compositor.name}.package]}
|
||||
${lib.escapeShellArgs ([
|
||||
"sh"
|
||||
"${../../quickshell/Modules/Greetd/assets/dms-greeter}"
|
||||
@@ -28,11 +28,9 @@
|
||||
])} ${lib.optionalString cfg.logs.save "> ${cfg.logs.path} 2>&1"}
|
||||
'';
|
||||
in {
|
||||
imports =
|
||||
let
|
||||
msg = "The option 'programs.dankMaterialShell.greeter.compositor.extraConfig' is deprecated. Please use 'programs.dankMaterialShell.greeter.compositor.customConfig' instead.";
|
||||
in
|
||||
[ (lib.mkRemovedOptionModule [ "programs" "dankMaterialShell" "greeter" "compositor" "extraConfig" ] msg) ];
|
||||
imports = let
|
||||
msg = "The option 'programs.dankMaterialShell.greeter.compositor.extraConfig' is deprecated. Please use 'programs.dankMaterialShell.greeter.compositor.customConfig' instead.";
|
||||
in [(lib.mkRemovedOptionModule ["programs" "dankMaterialShell" "greeter" "compositor" "extraConfig"] msg)];
|
||||
|
||||
options.programs.dankMaterialShell.greeter = {
|
||||
enable = lib.mkEnableOption "DankMaterialShell greeter";
|
||||
@@ -77,7 +75,7 @@ in {
|
||||
config = lib.mkIf cfg.enable {
|
||||
assertions = [
|
||||
{
|
||||
assertion = (config.users.users.${user} or { }) != { };
|
||||
assertion = (config.users.users.${user} or {}) != {};
|
||||
message = ''
|
||||
dmsgreeter: user set for greetd default_session ${user} does not exist. Please create it before referencing it.
|
||||
'';
|
||||
@@ -95,8 +93,10 @@ in {
|
||||
systemd.tmpfiles.settings."10-dmsgreeter" = {
|
||||
"/var/lib/dmsgreeter".d = {
|
||||
user = user;
|
||||
group = if config.users.users.${user}.group != ""
|
||||
then config.users.users.${user}.group else "greeter";
|
||||
group =
|
||||
if config.users.users.${user}.group != ""
|
||||
then config.users.users.${user}.group
|
||||
else "greeter";
|
||||
mode = "0755";
|
||||
};
|
||||
};
|
||||
@@ -106,7 +106,8 @@ in {
|
||||
if [ -f "${f}" ]; then
|
||||
cp "${f}" .
|
||||
fi
|
||||
'') cfg.configFiles)}
|
||||
'')
|
||||
cfg.configFiles)}
|
||||
|
||||
if [ -f session.json ]; then
|
||||
if cp "$(${lib.getExe pkgs.jq} -r '.wallpaperPath' session.json)" wallpaper.jpg; then
|
||||
|
||||
94
distro/nix/home.nix
Normal file
94
distro/nix/home.nix
Normal file
@@ -0,0 +1,94 @@
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
dmsPkgs,
|
||||
...
|
||||
}: let
|
||||
cfg = config.programs.dankMaterialShell;
|
||||
jsonFormat = pkgs.formats.json {};
|
||||
common = import ./common.nix {inherit config pkgs lib dmsPkgs;};
|
||||
in {
|
||||
imports = [
|
||||
./options.nix
|
||||
(lib.mkRemovedOptionModule ["programs" "dankMaterialShell" "enableNightMode"] "Night mode is now always available.")
|
||||
(lib.mkRenamedOptionModule ["programs" "dankMaterialShell" "enableSystemd"] ["programs" "dankMaterialShell" "systemd" "enable"])
|
||||
];
|
||||
|
||||
options.programs.dankMaterialShell = with lib.types; {
|
||||
default = {
|
||||
settings = lib.mkOption {
|
||||
type = jsonFormat.type;
|
||||
default = {};
|
||||
description = "The default settings are only read if the settings.json file don't exist";
|
||||
};
|
||||
session = lib.mkOption {
|
||||
type = jsonFormat.type;
|
||||
default = {};
|
||||
description = "The default session are only read if the session.json file don't exist";
|
||||
};
|
||||
};
|
||||
|
||||
plugins = lib.mkOption {
|
||||
type = attrsOf (types.submodule ({config, ...}: {
|
||||
options = {
|
||||
enable = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Whether to link this plugin";
|
||||
};
|
||||
src = lib.mkOption {
|
||||
type = types.path;
|
||||
description = "Source to link to DMS plugins directory";
|
||||
};
|
||||
};
|
||||
}));
|
||||
default = {};
|
||||
description = "DMS Plugins to install";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable
|
||||
{
|
||||
programs.quickshell = {
|
||||
enable = true;
|
||||
package = cfg.quickshell.package;
|
||||
|
||||
configs.dms = common.qmlPath;
|
||||
};
|
||||
|
||||
systemd.user.services.dms = lib.mkIf cfg.systemd.enable {
|
||||
Unit = {
|
||||
Description = "DankMaterialShell";
|
||||
PartOf = [config.wayland.systemd.target];
|
||||
After = [config.wayland.systemd.target];
|
||||
X-Restart-Triggers = lib.optional cfg.systemd.restartIfChanged common.qmlPath;
|
||||
};
|
||||
|
||||
Service = {
|
||||
ExecStart = lib.getExe dmsPkgs.dmsCli + " run --session";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
|
||||
Install.WantedBy = [config.wayland.systemd.target];
|
||||
};
|
||||
|
||||
xdg.stateFile."DankMaterialShell/default-session.json" = lib.mkIf (cfg.default.session != {}) {
|
||||
source = jsonFormat.generate "default-session.json" cfg.default.session;
|
||||
};
|
||||
|
||||
xdg.configFile = lib.mkMerge [
|
||||
(lib.mapAttrs' (name: plugin: {
|
||||
name = "DankMaterialShell/plugins/${name}";
|
||||
value.source = plugin.src;
|
||||
}) (lib.filterAttrs (n: v: v.enable) cfg.plugins))
|
||||
{
|
||||
"DankMaterialShell/default-settings.json" = lib.mkIf (cfg.default.settings != {}) {
|
||||
source = jsonFormat.generate "default-settings.json" cfg.default.settings;
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
home.packages = common.packages;
|
||||
};
|
||||
}
|
||||
36
distro/nix/nixos.nix
Normal file
36
distro/nix/nixos.nix
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
dmsPkgs,
|
||||
...
|
||||
}: let
|
||||
cfg = config.programs.dankMaterialShell;
|
||||
common = import ./common.nix {inherit config pkgs lib dmsPkgs;};
|
||||
in {
|
||||
imports = [
|
||||
./options.nix
|
||||
];
|
||||
|
||||
config = lib.mkIf cfg.enable
|
||||
{
|
||||
environment.etc."xdg/quickshell/dms".source = "${dmsPkgs.dankMaterialShell}/etc/xdg/quickshell/dms";
|
||||
|
||||
systemd.user.services.dms = lib.mkIf cfg.systemd.enable {
|
||||
description = "DankMaterialShell";
|
||||
path = [cfg.quickshell.package];
|
||||
|
||||
partOf = ["graphical-session.target"];
|
||||
after = ["graphical-session.target"];
|
||||
wantedBy = ["graphical-session.target"];
|
||||
restartTriggers = lib.optional cfg.systemd.restartIfChanged common.qmlPath;
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = lib.getExe dmsPkgs.dmsCli + " run --session";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
};
|
||||
|
||||
environment.systemPackages = [cfg.quickshell.package] ++ common.packages;
|
||||
};
|
||||
}
|
||||
68
distro/nix/options.nix
Normal file
68
distro/nix/options.nix
Normal file
@@ -0,0 +1,68 @@
|
||||
{
|
||||
pkgs,
|
||||
lib,
|
||||
...
|
||||
}: let
|
||||
inherit (lib) types;
|
||||
in {
|
||||
options.programs.dankMaterialShell = {
|
||||
enable = lib.mkEnableOption "DankMaterialShell";
|
||||
|
||||
systemd = {
|
||||
enable = lib.mkEnableOption "DankMaterialShell systemd startup";
|
||||
restartIfChanged = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Auto-restart dms.service when dankMaterialShell changes";
|
||||
};
|
||||
};
|
||||
enableSystemMonitoring = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use system monitoring widgets";
|
||||
};
|
||||
enableClipboard = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use the clipboard widget";
|
||||
};
|
||||
enableVPN = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to use the VPN widget";
|
||||
};
|
||||
enableBrightnessControl = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have brightness/backlight support";
|
||||
};
|
||||
enableColorPicker = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have color picking support";
|
||||
};
|
||||
enableDynamicTheming = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have dynamic theming support";
|
||||
};
|
||||
enableAudioWavelength = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have audio wavelength support";
|
||||
};
|
||||
enableCalendarEvents = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add calendar events support via khal";
|
||||
};
|
||||
enableSystemSound = lib.mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = "Add needed dependencies to have system sound support";
|
||||
};
|
||||
quickshell = {
|
||||
package = lib.mkPackageOption pkgs "quickshell" {};
|
||||
};
|
||||
};
|
||||
}
|
||||
24
distro/opensuse/_service
Normal file
24
distro/opensuse/_service
Normal file
@@ -0,0 +1,24 @@
|
||||
<services>
|
||||
<!-- Pull full git repository for master branch (QML code) -->
|
||||
<service name="tar_scm">
|
||||
<param name="scm">git</param>
|
||||
<param name="url">https://github.com/AvengeMedia/DankMaterialShell.git</param>
|
||||
<param name="revision">master</param>
|
||||
<param name="filename">dms-git-source</param>
|
||||
</service>
|
||||
<service name="recompress">
|
||||
<param name="file">*.tar</param>
|
||||
<param name="compression">gz</param>
|
||||
</service>
|
||||
<!-- Download pre-built binaries -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
107
distro/opensuse/dms-git.spec
Normal file
107
distro/opensuse/dms-git.spec
Normal file
@@ -0,0 +1,107 @@
|
||||
%global debug_package %{nil}
|
||||
|
||||
Name: dms-git
|
||||
Version: 0.6.2+git
|
||||
Release: 5%{?dist}
|
||||
Epoch: 1
|
||||
Summary: DankMaterialShell - Material 3 inspired shell (git nightly)
|
||||
|
||||
License: MIT
|
||||
URL: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Source0: dms-git-source.tar.gz
|
||||
Source1: dms-distropkg-amd64.gz
|
||||
Source2: dms-distropkg-arm64.gz
|
||||
|
||||
BuildRequires: gzip
|
||||
BuildRequires: systemd-rpm-macros
|
||||
|
||||
Requires: (quickshell-git or quickshell)
|
||||
Requires: accountsservice
|
||||
Requires: dgop
|
||||
|
||||
Recommends: cava
|
||||
Recommends: cliphist
|
||||
Recommends: danksearch
|
||||
Recommends: matugen
|
||||
Recommends: quickshell-git
|
||||
Recommends: wl-clipboard
|
||||
|
||||
Recommends: NetworkManager
|
||||
Recommends: qt6-qtmultimedia
|
||||
Suggests: qt6ct
|
||||
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Obsoletes: dms
|
||||
|
||||
%description
|
||||
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
|
||||
and optimized for niri, Hyprland, Sway, and other wlroots compositors.
|
||||
|
||||
This git version tracks the master branch and includes the latest features
|
||||
and fixes. Includes pre-built dms CLI binary and QML shell files.
|
||||
|
||||
%prep
|
||||
%setup -q -n dms-git-source
|
||||
|
||||
%ifarch x86_64
|
||||
gunzip -c %{SOURCE1} > dms
|
||||
%endif
|
||||
%ifarch aarch64
|
||||
gunzip -c %{SOURCE2} > dms
|
||||
%endif
|
||||
chmod +x dms
|
||||
|
||||
%build
|
||||
|
||||
%install
|
||||
install -Dm755 dms %{buildroot}%{_bindir}/dms
|
||||
|
||||
install -d %{buildroot}%{_datadir}/bash-completion/completions
|
||||
install -d %{buildroot}%{_datadir}/zsh/site-functions
|
||||
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
|
||||
./dms completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
|
||||
./dms completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
|
||||
./dms completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
|
||||
|
||||
install -Dm644 quickshell/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
|
||||
|
||||
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
|
||||
cp -r quickshell/* %{buildroot}%{_datadir}/quickshell/dms/
|
||||
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
|
||||
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/core
|
||||
|
||||
%posttrans
|
||||
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [ "$1" -ge 2 ]; then
|
||||
pkill -USR1 -x dms >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
%doc CONTRIBUTING.md
|
||||
%doc quickshell/README.md
|
||||
%{_bindir}/dms
|
||||
%dir %{_datadir}/fish
|
||||
%dir %{_datadir}/fish/vendor_completions.d
|
||||
%{_datadir}/fish/vendor_completions.d/dms.fish
|
||||
%dir %{_datadir}/zsh
|
||||
%dir %{_datadir}/zsh/site-functions
|
||||
%{_datadir}/zsh/site-functions/_dms
|
||||
%{_datadir}/bash-completion/completions/dms
|
||||
%dir %{_datadir}/quickshell
|
||||
%{_datadir}/quickshell/dms/
|
||||
%{_userunitdir}/dms.service
|
||||
|
||||
%changelog
|
||||
* Fri Nov 22 2025 AvengeMedia <maintainer@avengemedia.com> - 0.6.2+git-5
|
||||
- Git nightly build from master branch
|
||||
- Multi-arch support (x86_64, aarch64)
|
||||
107
distro/opensuse/dms.spec
Normal file
107
distro/opensuse/dms.spec
Normal file
@@ -0,0 +1,107 @@
|
||||
# Spec for DMS for OpenSUSE/OBS
|
||||
|
||||
%global debug_package %{nil}
|
||||
|
||||
Name: dms
|
||||
Version: 0.6.2
|
||||
Release: 1%{?dist}
|
||||
Summary: DankMaterialShell - Material 3 inspired shell for Wayland compositors
|
||||
|
||||
License: MIT
|
||||
URL: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Source0: dms-source.tar.gz
|
||||
Source1: dms-distropkg-amd64.gz
|
||||
Source2: dms-distropkg-arm64.gz
|
||||
|
||||
BuildRequires: gzip
|
||||
BuildRequires: systemd-rpm-macros
|
||||
|
||||
# Core requirements
|
||||
Requires: (quickshell-git or quickshell)
|
||||
Requires: accountsservice
|
||||
Requires: dgop
|
||||
|
||||
# Core utilities (Highly recommended for DMS functionality)
|
||||
Recommends: cava
|
||||
Recommends: cliphist
|
||||
Recommends: danksearch
|
||||
Recommends: matugen
|
||||
Recommends: NetworkManager
|
||||
Recommends: qt6-qtmultimedia
|
||||
Recommends: wl-clipboard
|
||||
Suggests: qt6ct
|
||||
|
||||
%description
|
||||
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
|
||||
and optimized for niri, Hyprland, Sway, and other wlroots compositors. Features
|
||||
notifications, app launcher, wallpaper customization, and plugin system.
|
||||
|
||||
Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
|
||||
process monitoring, notification center, clipboard history, dock, control center,
|
||||
lock screen, and comprehensive plugin system.
|
||||
|
||||
%prep
|
||||
%setup -q -n DankMaterialShell-%{version}
|
||||
|
||||
%ifarch x86_64
|
||||
gunzip -c %{SOURCE1} > dms
|
||||
%endif
|
||||
%ifarch aarch64
|
||||
gunzip -c %{SOURCE2} > dms
|
||||
%endif
|
||||
chmod +x dms
|
||||
|
||||
%build
|
||||
|
||||
%install
|
||||
install -Dm755 dms %{buildroot}%{_bindir}/dms
|
||||
|
||||
install -d %{buildroot}%{_datadir}/bash-completion/completions
|
||||
install -d %{buildroot}%{_datadir}/zsh/site-functions
|
||||
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
|
||||
./dms completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
|
||||
./dms completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
|
||||
./dms completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
|
||||
|
||||
install -Dm644 quickshell/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
|
||||
|
||||
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
|
||||
cp -r quickshell/* %{buildroot}%{_datadir}/quickshell/dms/
|
||||
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
|
||||
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/core
|
||||
|
||||
%posttrans
|
||||
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [ "$1" -ge 2 ]; then
|
||||
pkill -USR1 -x dms >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
%doc CONTRIBUTING.md
|
||||
%doc quickshell/README.md
|
||||
%{_bindir}/dms
|
||||
%dir %{_datadir}/fish
|
||||
%dir %{_datadir}/fish/vendor_completions.d
|
||||
%{_datadir}/fish/vendor_completions.d/dms.fish
|
||||
%dir %{_datadir}/zsh
|
||||
%dir %{_datadir}/zsh/site-functions
|
||||
%{_datadir}/zsh/site-functions/_dms
|
||||
%{_datadir}/bash-completion/completions/dms
|
||||
%dir %{_datadir}/quickshell
|
||||
%{_datadir}/quickshell/dms/
|
||||
%{_userunitdir}/dms.service
|
||||
|
||||
%changelog
|
||||
* Fri Nov 22 2025 AvengeMedia <maintainer@avengemedia.com> - 0.6.2-1
|
||||
- Stable release build with pre-built binaries
|
||||
- Multi-arch support (x86_64, aarch64)
|
||||
106
distro/scripts/obs-status.sh
Normal file
106
distro/scripts/obs-status.sh
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/bin/bash
|
||||
# Unified OBS status checker for dms packages
|
||||
# Checks all platforms (Debian, OpenSUSE) and architectures (x86_64, aarch64)
|
||||
# Only pulls logs if build failed
|
||||
# Usage: ./distro/scripts/obs-status.sh [package-name]
|
||||
#
|
||||
# Examples:
|
||||
# ./distro/scripts/obs-status.sh # Check all packages
|
||||
# ./distro/scripts/obs-status.sh dms # Check specific package
|
||||
|
||||
OBS_BASE_PROJECT="home:AvengeMedia"
|
||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
||||
|
||||
ALL_PACKAGES=(dms dms-git)
|
||||
|
||||
REPOS=("Debian_13" "openSUSE_Tumbleweed" "16.0")
|
||||
ARCHES=("x86_64" "aarch64")
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
PACKAGES=("$1")
|
||||
else
|
||||
PACKAGES=("${ALL_PACKAGES[@]}")
|
||||
fi
|
||||
|
||||
cd "$OBS_BASE"
|
||||
|
||||
for pkg in "${PACKAGES[@]}"; do
|
||||
case "$pkg" in
|
||||
dms)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$pkg'"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "=========================================="
|
||||
echo "=== $pkg ==="
|
||||
echo "=========================================="
|
||||
|
||||
# Checkout if needed
|
||||
if [[ ! -d "$PROJECT/$pkg" ]]; then
|
||||
osc co "$PROJECT/$pkg" 2>&1 | tail -1
|
||||
fi
|
||||
|
||||
cd "$PROJECT/$pkg"
|
||||
|
||||
ALL_RESULTS=$(osc results 2>&1)
|
||||
|
||||
# Check each repository and architecture
|
||||
FAILED_BUILDS=()
|
||||
for repo in "${REPOS[@]}"; do
|
||||
for arch in "${ARCHES[@]}"; do
|
||||
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1)
|
||||
|
||||
if [[ -n "$STATUS" ]]; then
|
||||
# Color code status
|
||||
case "$STATUS" in
|
||||
succeeded)
|
||||
COLOR="\033[0;32m" # Green
|
||||
SYMBOL="✅"
|
||||
;;
|
||||
failed)
|
||||
COLOR="\033[0;31m" # Red
|
||||
SYMBOL="❌"
|
||||
FAILED_BUILDS+=("$repo $arch")
|
||||
;;
|
||||
unresolvable)
|
||||
COLOR="\033[0;33m" # Yellow
|
||||
SYMBOL="⚠️"
|
||||
;;
|
||||
*)
|
||||
COLOR="\033[0;37m" # White
|
||||
SYMBOL="⏳"
|
||||
;;
|
||||
esac
|
||||
echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Pull logs for failed builds
|
||||
if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo " 📋 Fetching logs for failed builds..."
|
||||
for build in "${FAILED_BUILDS[@]}"; do
|
||||
read -r repo arch <<< "$build"
|
||||
echo ""
|
||||
echo " ────────────────────────────────────────────"
|
||||
echo " Build log: $repo $arch"
|
||||
echo " ────────────────────────────────────────────"
|
||||
osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
echo "=========================================="
|
||||
echo "Status check complete!"
|
||||
|
||||
733
distro/scripts/obs-upload.sh
Normal file
733
distro/scripts/obs-upload.sh
Normal file
@@ -0,0 +1,733 @@
|
||||
#!/bin/bash
|
||||
# Unified OBS upload script for dms packages
|
||||
# Handles Debian and OpenSUSE builds for both x86_64 and aarch64
|
||||
# Usage: ./distro/scripts/obs-upload.sh [distro] <package-name> [commit-message]
|
||||
#
|
||||
# Examples:
|
||||
# ./distro/scripts/obs-upload.sh dms "Update to v0.6.2"
|
||||
# ./distro/scripts/obs-upload.sh debian dms
|
||||
# ./distro/scripts/obs-upload.sh opensuse dms-git
|
||||
|
||||
set -e
|
||||
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=true
|
||||
PACKAGE=""
|
||||
MESSAGE=""
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
debian)
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=false
|
||||
;;
|
||||
opensuse)
|
||||
UPLOAD_DEBIAN=false
|
||||
UPLOAD_OPENSUSE=true
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
PACKAGE="$arg"
|
||||
elif [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="$arg"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
OBS_BASE_PROJECT="home:AvengeMedia"
|
||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
||||
|
||||
# Available packages
|
||||
AVAILABLE_PACKAGES=(dms dms-git)
|
||||
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
echo "Available packages:"
|
||||
echo ""
|
||||
echo " 1. dms - Stable DMS"
|
||||
echo " 2. dms-git - Nightly DMS"
|
||||
echo " a. all"
|
||||
echo ""
|
||||
read -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection
|
||||
|
||||
if [[ "$selection" == "a" ]] || [[ "$selection" == "all" ]]; then
|
||||
PACKAGE="all"
|
||||
elif [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -le ${#AVAILABLE_PACKAGES[@]} ]]; then
|
||||
PACKAGE="${AVAILABLE_PACKAGES[$((selection-1))]}"
|
||||
else
|
||||
echo "Error: Invalid selection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="Update packaging"
|
||||
fi
|
||||
|
||||
# Get repo root (2 levels up from distro/scripts/)
|
||||
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# Ensure we're in repo root
|
||||
if [[ ! -d "distro/debian" ]]; then
|
||||
echo "Error: Run this script from the repository root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle "all" option
|
||||
if [[ "$PACKAGE" == "all" ]]; then
|
||||
echo "==> Uploading all packages"
|
||||
DISTRO_ARG=""
|
||||
if [[ "$UPLOAD_DEBIAN" == true && "$UPLOAD_OPENSUSE" == false ]]; then
|
||||
DISTRO_ARG="debian"
|
||||
elif [[ "$UPLOAD_DEBIAN" == false && "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
DISTRO_ARG="opensuse"
|
||||
fi
|
||||
echo ""
|
||||
FAILED=()
|
||||
for pkg in "${AVAILABLE_PACKAGES[@]}"; do
|
||||
if [[ -d "distro/debian/$pkg" ]]; then
|
||||
echo "=========================================="
|
||||
echo "Uploading $pkg..."
|
||||
echo "=========================================="
|
||||
if [[ -n "$DISTRO_ARG" ]]; then
|
||||
if bash "$0" "$DISTRO_ARG" "$pkg" "$MESSAGE"; then
|
||||
echo "✅ $pkg uploaded successfully"
|
||||
else
|
||||
echo "❌ $pkg failed to upload"
|
||||
FAILED+=("$pkg")
|
||||
fi
|
||||
else
|
||||
if bash "$0" "$pkg" "$MESSAGE"; then
|
||||
echo "✅ $pkg uploaded successfully"
|
||||
else
|
||||
echo "❌ $pkg failed to upload"
|
||||
FAILED+=("$pkg")
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
else
|
||||
echo "⚠️ Skipping $pkg (not found in distro/debian/)"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#FAILED[@]} -eq 0 ]]; then
|
||||
echo "✅ All packages uploaded successfully!"
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Some packages failed: ${FAILED[*]}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if package exists
|
||||
if [[ ! -d "distro/debian/$PACKAGE" ]]; then
|
||||
echo "Error: Package '$PACKAGE' not found in distro/debian/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
PROJECT="dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$PACKAGE'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
OBS_PROJECT="${OBS_BASE_PROJECT}:${PROJECT}"
|
||||
|
||||
echo "==> Target: $OBS_PROJECT / $PACKAGE"
|
||||
echo "==> Message: $MESSAGE"
|
||||
if [[ "$UPLOAD_DEBIAN" == true && "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo "==> Distributions: Debian + OpenSUSE"
|
||||
elif [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
echo "==> Distribution: Debian only"
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo "==> Distribution: OpenSUSE only"
|
||||
fi
|
||||
|
||||
# Create .obs directory if it doesn't exist
|
||||
mkdir -p "$OBS_BASE"
|
||||
|
||||
# Check out package if not already present
|
||||
if [[ ! -d "$OBS_BASE/$OBS_PROJECT/$PACKAGE" ]]; then
|
||||
echo "Checking out $OBS_PROJECT/$PACKAGE..."
|
||||
cd "$OBS_BASE"
|
||||
osc co "$OBS_PROJECT/$PACKAGE"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
|
||||
WORK_DIR="$OBS_BASE/$OBS_PROJECT/$PACKAGE"
|
||||
|
||||
echo "==> Preparing $PACKAGE for OBS upload"
|
||||
|
||||
# Clean working directory (keep osc metadata)
|
||||
find "$WORK_DIR" -maxdepth 1 -type f \( -name "*.tar.gz" -o -name "*.spec" -o -name "_service" -o -name "*.dsc" \) -delete 2>/dev/null || true
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
echo " - Copying _service (for binary downloads)"
|
||||
cp "distro/debian/$PACKAGE/_service" "$WORK_DIR/"
|
||||
fi
|
||||
|
||||
# Copy OpenSUSE spec if it exists and handle auto-increment
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - Copying $PACKAGE.spec for OpenSUSE"
|
||||
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
|
||||
|
||||
# Auto-increment Release if same Version is being rebuilt
|
||||
if [[ -f "$WORK_DIR/.osc/$PACKAGE.spec" ]]; then
|
||||
NEW_VERSION=$(grep "^Version:" "$WORK_DIR/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
NEW_RELEASE=$(grep "^Release:" "$WORK_DIR/$PACKAGE.spec" | sed 's/^Release:[[:space:]]*//' | sed 's/%{?dist}//' | head -1)
|
||||
|
||||
OLD_VERSION=$(grep "^Version:" "$WORK_DIR/.osc/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
OLD_RELEASE=$(grep "^Release:" "$WORK_DIR/.osc/$PACKAGE.spec" | sed 's/^Release:[[:space:]]*//' | sed 's/%{?dist}//' | head -1)
|
||||
|
||||
if [[ "$NEW_VERSION" == "$OLD_VERSION" ]]; then
|
||||
# Same version - increment release number
|
||||
if [[ "$OLD_RELEASE" =~ ^([0-9]+) ]]; then
|
||||
BASE_RELEASE="${BASH_REMATCH[1]}"
|
||||
NEXT_RELEASE=$((BASE_RELEASE + 1))
|
||||
echo " - Detected rebuild of same version $NEW_VERSION (release $OLD_RELEASE -> $NEXT_RELEASE)"
|
||||
sed -i "s/^Release:[[:space:]]*${NEW_RELEASE}%{?dist}/Release: ${NEXT_RELEASE}%{?dist}/" "$WORK_DIR/$PACKAGE.spec"
|
||||
fi
|
||||
else
|
||||
echo " - New version detected: $OLD_VERSION -> $NEW_VERSION (keeping release $NEW_RELEASE)"
|
||||
fi
|
||||
else
|
||||
echo " - First upload to OBS (no previous spec found)"
|
||||
fi
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo " - Warning: OpenSUSE spec file not found, skipping OpenSUSE upload"
|
||||
fi
|
||||
|
||||
# Handle OpenSUSE-only uploads (create tarball without Debian processing)
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - OpenSUSE-only upload: creating source tarball"
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
# Check _service file to determine how to get source
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
# Check for tar_scm (git source)
|
||||
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
fi
|
||||
SOURCE_DIR=$(pwd)
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE_DIR" && -d "$SOURCE_DIR" ]]; then
|
||||
# Extract Source0 from spec file
|
||||
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
|
||||
if [[ -n "$SOURCE0" ]]; then
|
||||
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
;;
|
||||
*)
|
||||
EXPECTED_DIR=$(basename "$SOURCE_DIR")
|
||||
;;
|
||||
esac
|
||||
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
tar -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$OBS_TARBALL_DIR"
|
||||
fi
|
||||
else
|
||||
echo " - Warning: Could not obtain source for OpenSUSE tarball"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate .dsc file and handle source format (for Debian only)
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; then
|
||||
# Get version from changelog
|
||||
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/$PACKAGE/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11")
|
||||
|
||||
# Determine source format
|
||||
SOURCE_FORMAT=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)")
|
||||
|
||||
# Handle native format (3.0 native)
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
|
||||
echo " - Native format detected: creating combined tarball"
|
||||
|
||||
VERSION="$CHANGELOG_VERSION"
|
||||
|
||||
# Create temp directory for building combined tarball
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
# Determine tarball name for native format (use version without revision)
|
||||
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
|
||||
|
||||
SOURCE_DIR=""
|
||||
|
||||
# Check _service file to determine how to get source
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
# Check for tar_scm first (git source) - this takes priority for git packages
|
||||
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
# For dms-git, use tar_scm to get git source
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
fi
|
||||
SOURCE_DIR=$(pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to clone git repository"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then
|
||||
# Extract download_url for source (skip binary downloads)
|
||||
# Look for download_url with "source" in path or .tar.gz/.tar.xz archives
|
||||
# Skip binaries (distropkg, standalone .gz files, etc.)
|
||||
|
||||
# Extract all paths from download_url services
|
||||
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" | \
|
||||
grep '<param name="path">' | \
|
||||
sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
# Find source path (has "source" or ends with .tar.gz/.tar.xz, but not distropkg)
|
||||
SOURCE_PATH=""
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" && \
|
||||
! echo "$path" | grep -qE "(distropkg|binary)"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If no source found, try first path that ends with .tar.gz/.tar.xz
|
||||
if [[ -z "$SOURCE_PATH" ]]; then
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE_PATH" ]]; then
|
||||
# Extract the service block containing this path
|
||||
SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" '
|
||||
/<service name="download_url">/ { in_block=1; block="" }
|
||||
in_block { block=block"\n"$0 }
|
||||
/<\/service>/ {
|
||||
if (in_block && block ~ target) {
|
||||
print block
|
||||
exit
|
||||
}
|
||||
in_block=0
|
||||
}
|
||||
' "distro/debian/$PACKAGE/_service")
|
||||
|
||||
URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_PATH="$SOURCE_PATH"
|
||||
fi
|
||||
|
||||
if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then
|
||||
SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}"
|
||||
echo " Downloading source from: $SOURCE_URL"
|
||||
|
||||
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL"; then
|
||||
cd "$TEMP_DIR"
|
||||
if [[ "$SOURCE_URL" == *.tar.xz ]]; then
|
||||
tar -xJf source-archive
|
||||
elif [[ "$SOURCE_URL" == *.tar.gz ]] || [[ "$SOURCE_URL" == *.tgz ]]; then
|
||||
tar -xzf source-archive
|
||||
fi
|
||||
# GitHub archives extract to DankMaterialShell-VERSION/ or similar
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d -name "DankMaterialShell-*" | head -1)
|
||||
if [[ -z "$SOURCE_DIR" ]]; then
|
||||
# Try to find any extracted directory
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d ! -name "." | head -1)
|
||||
fi
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Failed to extract source archive or find source directory"
|
||||
echo "Contents of $TEMP_DIR:"
|
||||
ls -la "$TEMP_DIR"
|
||||
cd "$REPO_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
# Convert to absolute path
|
||||
SOURCE_DIR=$(cd "$SOURCE_DIR" && pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to download source from $SOURCE_URL"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Could not determine or obtain source for $PACKAGE"
|
||||
echo "SOURCE_DIR: $SOURCE_DIR"
|
||||
if [[ -d "$TEMP_DIR" ]]; then
|
||||
echo "Contents of temp directory:"
|
||||
ls -la "$TEMP_DIR"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo " Found source directory: $SOURCE_DIR"
|
||||
|
||||
# Create OpenSUSE-compatible source tarballs BEFORE adding debian/ directory
|
||||
# (OpenSUSE doesn't need debian/ directory)
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
# If SOURCE_DIR is not set (OpenSUSE-only upload), detect source now
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo " - Detecting source for OpenSUSE-only upload"
|
||||
if [[ -z "$TEMP_DIR" ]]; then
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
fi
|
||||
|
||||
# Check _service file to determine how to get source
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
# Check for tar_scm first (git source) - this takes priority for git packages
|
||||
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
# For dms-git, use tar_scm to get git source
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
fi
|
||||
SOURCE_DIR=$(pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to clone git repository"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then
|
||||
# Extract download_url for source (skip binary downloads)
|
||||
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" | \
|
||||
grep '<param name="path">' | \
|
||||
sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
# Find source path (has "source" or ends with .tar.gz/.tar.xz, but not distropkg)
|
||||
SOURCE_PATH=""
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" && \
|
||||
! echo "$path" | grep -qE "(distropkg|binary)"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If no source found, try first path that ends with .tar.gz/.tar.xz
|
||||
if [[ -z "$SOURCE_PATH" ]]; then
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE_PATH" ]]; then
|
||||
# Extract the service block containing this path
|
||||
SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" '
|
||||
/<service name="download_url">/ { in_block=1; block="" }
|
||||
in_block { block=block"\n"$0 }
|
||||
/<\/service>/ {
|
||||
if (in_block && block ~ target) {
|
||||
print block
|
||||
exit
|
||||
}
|
||||
in_block=0
|
||||
}
|
||||
' "distro/debian/$PACKAGE/_service")
|
||||
|
||||
URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_PATH="$SOURCE_PATH"
|
||||
fi
|
||||
|
||||
if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then
|
||||
SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}"
|
||||
echo " Downloading source from: $SOURCE_URL"
|
||||
|
||||
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL"; then
|
||||
cd "$TEMP_DIR"
|
||||
if [[ "$SOURCE_URL" == *.tar.xz ]]; then
|
||||
tar -xJf source-archive
|
||||
elif [[ "$SOURCE_URL" == *.tar.gz ]] || [[ "$SOURCE_URL" == *.tgz ]]; then
|
||||
tar -xzf source-archive
|
||||
fi
|
||||
# GitHub archives extract to DankMaterialShell-VERSION/ or similar
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d -name "DankMaterialShell-*" | head -1)
|
||||
if [[ -z "$SOURCE_DIR" ]]; then
|
||||
# Try to find any extracted directory
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d ! -name "." | head -1)
|
||||
fi
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Failed to extract source archive or find source directory"
|
||||
echo "Contents of $TEMP_DIR:"
|
||||
ls -la "$TEMP_DIR"
|
||||
cd "$REPO_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
# Convert to absolute path
|
||||
SOURCE_DIR=$(cd "$SOURCE_DIR" && pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to download source from $SOURCE_URL"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Could not determine or obtain source for $PACKAGE (OpenSUSE-only upload)"
|
||||
echo "SOURCE_DIR: $SOURCE_DIR"
|
||||
if [[ -d "$TEMP_DIR" ]]; then
|
||||
echo "Contents of temp directory:"
|
||||
ls -la "$TEMP_DIR"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo " Found source directory: $SOURCE_DIR"
|
||||
fi
|
||||
echo " - Creating OpenSUSE-compatible source tarballs"
|
||||
|
||||
# Extract Source0 from spec file
|
||||
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1); if [[ -z "$SOURCE0" && "$PACKAGE" == "dms-git" ]]; then SOURCE0="dms-git-source.tar.gz"; fi
|
||||
|
||||
if [[ -n "$SOURCE0" ]]; then
|
||||
# Create a separate temporary directory for OpenSUSE tarball creation to avoid conflicts
|
||||
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
# dms spec expects DankMaterialShell-%{version} directory (from %setup -q -n DankMaterialShell-%{version})
|
||||
# Extract version from spec file
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
dms-git)
|
||||
# dms-git spec expects dms-git-source directory (from %setup -q -n dms-git-source)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
*)
|
||||
# Generic handling
|
||||
DIR_NAME=$(basename "$SOURCE_DIR")
|
||||
echo " Creating $SOURCE0 (directory: $DIR_NAME)"
|
||||
cp -r "$SOURCE_DIR" "$DIR_NAME"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
else
|
||||
tar -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
fi
|
||||
rm -rf "$DIR_NAME"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
esac
|
||||
# Clean up the tarball work directory
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$OBS_TARBALL_DIR"
|
||||
echo " - OpenSUSE source tarballs created"
|
||||
fi
|
||||
|
||||
# Copy spec file
|
||||
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
|
||||
fi
|
||||
|
||||
# Copy debian/ directory into source (for Debian builds only)
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
echo " Copying debian/ directory into source"
|
||||
cp -r "distro/debian/$PACKAGE/debian" "$SOURCE_DIR/"
|
||||
|
||||
# Create combined tarball
|
||||
echo " Creating combined tarball: $COMBINED_TARBALL"
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
TARBALL_BASE=$(basename "$SOURCE_DIR")
|
||||
tar -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# Generate .dsc file for native format
|
||||
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
|
||||
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
|
||||
|
||||
# Extract Build-Depends from control file
|
||||
BUILD_DEPS="debhelper-compat (= 13)"
|
||||
if [[ -f "distro/debian/$PACKAGE/debian/control" ]]; then
|
||||
CONTROL_DEPS=$(sed -n '/^Build-Depends:/,/^[A-Z]/p' "distro/debian/$PACKAGE/debian/control" | \
|
||||
sed '/^Build-Depends:/s/^Build-Depends: *//' | \
|
||||
sed '/^[A-Z]/d' | \
|
||||
tr '\n' ' ' | \
|
||||
sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/[[:space:]]\+/ /g')
|
||||
if [[ -n "$CONTROL_DEPS" && "$CONTROL_DEPS" != "" ]]; then
|
||||
BUILD_DEPS="$CONTROL_DEPS"
|
||||
fi
|
||||
fi
|
||||
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
Format: 3.0 (native)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
Architecture: any
|
||||
Version: $VERSION
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: $BUILD_DEPS
|
||||
Files:
|
||||
$TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL
|
||||
EOF
|
||||
|
||||
echo " - Generated $PACKAGE.dsc for native format"
|
||||
fi
|
||||
else
|
||||
# Quilt format (legacy) - for Debian only
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
# For quilt format, version can have revision
|
||||
if [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
|
||||
VERSION="$CHANGELOG_VERSION"
|
||||
else
|
||||
VERSION="${CHANGELOG_VERSION}-1"
|
||||
fi
|
||||
|
||||
echo " - Quilt format detected: creating debian.tar.gz"
|
||||
tar -czf "$WORK_DIR/debian.tar.gz" -C "distro/debian/$PACKAGE" debian/
|
||||
|
||||
echo " - Generating $PACKAGE.dsc for quilt format"
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
Format: 3.0 (quilt)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
Architecture: any
|
||||
Version: $VERSION
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13), wget, gzip
|
||||
DEBTRANSFORM-TAR: debian.tar.gz
|
||||
Files:
|
||||
00000000000000000000000000000000 1 debian.tar.gz
|
||||
EOF
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Change to working directory and commit
|
||||
cd "$WORK_DIR"
|
||||
|
||||
echo "==> Staging changes"
|
||||
# List files to be uploaded
|
||||
echo "Files to upload:"
|
||||
# Only list files relevant to the selected upload type
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
ls -lh *.tar.gz *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
fi
|
||||
echo ""
|
||||
|
||||
osc addremove
|
||||
|
||||
echo "==> Committing to OBS"
|
||||
osc commit -m "$MESSAGE"
|
||||
|
||||
echo "==> Checking build status"
|
||||
osc results
|
||||
|
||||
echo ""
|
||||
echo "Upload complete! Monitor builds with:"
|
||||
echo " cd $WORK_DIR && osc results"
|
||||
echo " cd $WORK_DIR && osc buildlog <repo> <arch>"
|
||||
echo ""
|
||||
|
||||
# Don't cleanup - keep checkout for status checking
|
||||
echo ""
|
||||
echo "Upload complete! Build status:"
|
||||
cd "$WORK_DIR"
|
||||
osc results 2>&1 | head -10
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
echo ""
|
||||
echo "To check detailed status:"
|
||||
echo " cd $WORK_DIR && osc results"
|
||||
echo " cd $WORK_DIR && osc remotebuildlog $OBS_PROJECT $PACKAGE Debian_13 x86_64"
|
||||
echo ""
|
||||
echo "NOTE: Checkout kept at $WORK_DIR for status checking"
|
||||
echo ""
|
||||
echo "✅ Upload complete!"
|
||||
echo ""
|
||||
echo "Check build status with:"
|
||||
echo " ./distro/scripts/obs-status.sh $PACKAGE"
|
||||
169
distro/scripts/test-packaging.sh
Executable file
169
distro/scripts/test-packaging.sh
Executable file
@@ -0,0 +1,169 @@
|
||||
#!/bin/bash
|
||||
# Manual testing script for DMS packaging
|
||||
# Tests OBS (Debian/openSUSE) and PPA (Ubuntu) workflows
|
||||
# Usage: ./distro/test-packaging.sh [obs|ppa|all]
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
DISTRO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
REPO_ROOT="$(cd "$DISTRO_DIR/.." && pwd)"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
|
||||
TEST_MODE="${1:-all}"
|
||||
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "DMS Packaging Test Suite"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
|
||||
# Test 1: OBS Upload (Debian + openSUSE)
|
||||
if [[ "$TEST_MODE" == "obs" ]] || [[ "$TEST_MODE" == "all" ]]; then
|
||||
echo "═══════════════════════════════════════════════════════════════════"
|
||||
echo "TEST 1: OBS Upload (Debian + openSUSE)"
|
||||
echo "═══════════════════════════════════════════════════════════════════"
|
||||
echo ""
|
||||
|
||||
OBS_SCRIPT="$SCRIPT_DIR/obs-upload.sh"
|
||||
|
||||
if [[ ! -f "$OBS_SCRIPT" ]]; then
|
||||
error "OBS script not found: $OBS_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "OBS script location: $OBS_SCRIPT"
|
||||
info "Available packages: dms, dms-git"
|
||||
echo ""
|
||||
|
||||
warn "This will upload to OBS (home:AvengeMedia)"
|
||||
read -p "Continue with OBS test? [y/N] " -n 1 -r
|
||||
echo
|
||||
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
info "Select package to test:"
|
||||
echo " 1. dms (stable)"
|
||||
echo " 2. dms-git (nightly)"
|
||||
echo " 3. all (both packages)"
|
||||
read -p "Choice [1]: " -n 1 -r PKG_CHOICE
|
||||
echo
|
||||
echo ""
|
||||
|
||||
PKG_CHOICE="${PKG_CHOICE:-1}"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
case "$PKG_CHOICE" in
|
||||
1)
|
||||
info "Testing OBS upload for 'dms' package..."
|
||||
bash "$OBS_SCRIPT" dms "Test packaging update"
|
||||
;;
|
||||
2)
|
||||
info "Testing OBS upload for 'dms-git' package..."
|
||||
bash "$OBS_SCRIPT" dms-git "Test packaging update"
|
||||
;;
|
||||
3)
|
||||
info "Testing OBS upload for all packages..."
|
||||
bash "$OBS_SCRIPT" all "Test packaging update"
|
||||
;;
|
||||
*)
|
||||
error "Invalid choice"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
success "OBS test completed"
|
||||
echo ""
|
||||
info "Check build status: https://build.opensuse.org/project/monitor/home:AvengeMedia"
|
||||
else
|
||||
warn "OBS test skipped"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Test 2: PPA Upload (Ubuntu)
|
||||
if [[ "$TEST_MODE" == "ppa" ]] || [[ "$TEST_MODE" == "all" ]]; then
|
||||
echo "═══════════════════════════════════════════════════════════════════"
|
||||
echo "TEST 2: PPA Upload (Ubuntu)"
|
||||
echo "═══════════════════════════════════════════════════════════════════"
|
||||
echo ""
|
||||
|
||||
PPA_SCRIPT="$DISTRO_DIR/ubuntu/ppa/create-and-upload.sh"
|
||||
|
||||
if [[ ! -f "$PPA_SCRIPT" ]]; then
|
||||
error "PPA script not found: $PPA_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "PPA script location: $PPA_SCRIPT"
|
||||
info "Available PPAs: dms, dms-git"
|
||||
info "Ubuntu series: questing (25.10)"
|
||||
echo ""
|
||||
|
||||
warn "This will upload to Launchpad PPA (ppa:avengemedia/dms)"
|
||||
read -p "Continue with PPA test? [y/N] " -n 1 -r
|
||||
echo
|
||||
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
info "Select package to test:"
|
||||
echo " 1. dms (stable)"
|
||||
echo " 2. dms-git (nightly)"
|
||||
read -p "Choice [1]: " -n 1 -r PKG_CHOICE
|
||||
echo
|
||||
echo ""
|
||||
|
||||
PKG_CHOICE="${PKG_CHOICE:-1}"
|
||||
|
||||
case "$PKG_CHOICE" in
|
||||
1)
|
||||
info "Testing PPA upload for 'dms' package..."
|
||||
DMS_PKG="$DISTRO_DIR/ubuntu/dms"
|
||||
PPA_NAME="dms"
|
||||
;;
|
||||
2)
|
||||
info "Testing PPA upload for 'dms-git' package..."
|
||||
DMS_PKG="$DISTRO_DIR/ubuntu/dms-git"
|
||||
PPA_NAME="dms-git"
|
||||
;;
|
||||
*)
|
||||
error "Invalid choice"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
|
||||
if [[ ! -d "$DMS_PKG" ]]; then
|
||||
error "DMS package directory not found: $DMS_PKG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bash "$PPA_SCRIPT" "$DMS_PKG" "$PPA_NAME" questing
|
||||
|
||||
echo ""
|
||||
success "PPA test completed"
|
||||
echo ""
|
||||
info "Check build status: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages"
|
||||
else
|
||||
warn "PPA test skipped"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Summary
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Testing Summary"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
5
distro/ubuntu/danklinux/danksearch/debian/changelog
Normal file
5
distro/ubuntu/danklinux/danksearch/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
danksearch (0.0.7ppa3) questing; urgency=medium
|
||||
|
||||
* Rebuild for packaging fixes (ppa3)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Fri, 21 Nov 2025 14:19:58 -0500
|
||||
24
distro/ubuntu/danklinux/danksearch/debian/control
Normal file
24
distro/ubuntu/danklinux/danksearch/debian/control
Normal file
@@ -0,0 +1,24 @@
|
||||
Source: danksearch
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/danksearch
|
||||
Vcs-Browser: https://github.com/AvengeMedia/danksearch
|
||||
Vcs-Git: https://github.com/AvengeMedia/danksearch.git
|
||||
|
||||
Package: danksearch
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends}
|
||||
Description: Fast file search utility for DMS
|
||||
DankSearch is a fast file search utility designed for DankMaterialShell.
|
||||
It provides efficient file and content search capabilities with minimal
|
||||
dependencies. This package contains the pre-built binary from the official
|
||||
GitHub release.
|
||||
.
|
||||
Features include:
|
||||
- Fast file searching
|
||||
- Lightweight and efficient
|
||||
- Designed for DMS integration
|
||||
- Minimal resource usage
|
||||
24
distro/ubuntu/danklinux/danksearch/debian/copyright
Normal file
24
distro/ubuntu/danklinux/danksearch/debian/copyright
Normal file
@@ -0,0 +1,24 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: danksearch
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/danksearch
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: GPL-3.0-only
|
||||
|
||||
License: GPL-3.0-only
|
||||
This package is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License version 3 as
|
||||
published by the Free Software Foundation.
|
||||
.
|
||||
This package is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>
|
||||
.
|
||||
On Debian systems, the complete text of the GNU General
|
||||
Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||
1
distro/ubuntu/danklinux/danksearch/debian/files
Normal file
1
distro/ubuntu/danklinux/danksearch/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
danksearch_0.0.7ppa3_source.buildinfo utils optional
|
||||
33
distro/ubuntu/danklinux/danksearch/debian/rules
Executable file
33
distro/ubuntu/danklinux/danksearch/debian/rules
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Detect architecture for selecting correct binary
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
# Map Debian arch to binary filename
|
||||
ifeq ($(DEB_HOST_ARCH),amd64)
|
||||
BINARY_FILE := dsearch-amd64
|
||||
else ifeq ($(DEB_HOST_ARCH),arm64)
|
||||
BINARY_FILE := dsearch-arm64
|
||||
else
|
||||
$(error Unsupported architecture: $(DEB_HOST_ARCH))
|
||||
endif
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Binary is already included in source package (native format)
|
||||
# Downloaded by build-source.sh before upload
|
||||
# Just verify it exists and is executable
|
||||
test -f $(BINARY_FILE) || (echo "ERROR: $(BINARY_FILE) not found!" && exit 1)
|
||||
chmod +x $(BINARY_FILE)
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary as danksearch
|
||||
install -Dm755 $(BINARY_FILE) debian/danksearch/usr/bin/danksearch
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete binaries - they're part of the source package (native format)
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/danklinux/danksearch/debian/source/format
Normal file
1
distro/ubuntu/danklinux/danksearch/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
BIN
distro/ubuntu/danklinux/danksearch/dsearch-amd64
Executable file
BIN
distro/ubuntu/danklinux/danksearch/dsearch-amd64
Executable file
Binary file not shown.
BIN
distro/ubuntu/danklinux/danksearch/dsearch-arm64
Executable file
BIN
distro/ubuntu/danklinux/danksearch/dsearch-arm64
Executable file
Binary file not shown.
9
distro/ubuntu/danklinux/dgop/debian/changelog
Normal file
9
distro/ubuntu/danklinux/dgop/debian/changelog
Normal file
@@ -0,0 +1,9 @@
|
||||
dgop (0.1.11ppa2) questing; urgency=medium
|
||||
|
||||
* Rebuild for Questing (25.10) - Ubuntu 25.10+ only
|
||||
* Stateless CPU/GPU monitoring tool
|
||||
* Support for NVIDIA and AMD GPUs
|
||||
* JSON output for integration
|
||||
* Pre-built binary package for amd64 and arm64
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 16 Nov 2025 22:50:00 -0500
|
||||
27
distro/ubuntu/danklinux/dgop/debian/control
Normal file
27
distro/ubuntu/danklinux/dgop/debian/control
Normal file
@@ -0,0 +1,27 @@
|
||||
Source: dgop
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13),
|
||||
wget,
|
||||
gzip
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/dgop
|
||||
Vcs-Browser: https://github.com/AvengeMedia/dgop
|
||||
Vcs-Git: https://github.com/AvengeMedia/dgop.git
|
||||
|
||||
Package: dgop
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends}
|
||||
Description: Stateless CPU/GPU monitor for DankMaterialShell
|
||||
DGOP is a stateless system monitoring tool that provides CPU, GPU,
|
||||
memory, and network statistics. Designed for integration with
|
||||
DankMaterialShell but can be used standalone.
|
||||
.
|
||||
Features:
|
||||
- CPU usage monitoring
|
||||
- GPU usage and temperature (NVIDIA, AMD)
|
||||
- Memory and swap statistics
|
||||
- Network traffic monitoring
|
||||
- Zero-state design (no background processes)
|
||||
- JSON output for easy integration
|
||||
27
distro/ubuntu/danklinux/dgop/debian/copyright
Normal file
27
distro/ubuntu/danklinux/dgop/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dgop
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/dgop
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/danklinux/dgop/debian/files
Normal file
1
distro/ubuntu/danklinux/dgop/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dgop_0.1.11ppa2_source.buildinfo utils optional
|
||||
38
distro/ubuntu/danklinux/dgop/debian/rules
Executable file
38
distro/ubuntu/danklinux/dgop/debian/rules
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Extract version from debian/changelog
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
# Get upstream version (strip -1ppa1 suffix)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
|
||||
# Detect architecture for downloading correct binary
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
# Map Debian arch to GitHub release arch names
|
||||
ifeq ($(DEB_HOST_ARCH),amd64)
|
||||
GITHUB_ARCH := amd64
|
||||
else ifeq ($(DEB_HOST_ARCH),arm64)
|
||||
GITHUB_ARCH := arm64
|
||||
else
|
||||
$(error Unsupported architecture: $(DEB_HOST_ARCH))
|
||||
endif
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Binary is already included in source package (native format)
|
||||
# Just verify it exists and is executable
|
||||
test -f dgop || (echo "ERROR: dgop binary not found!" && exit 1)
|
||||
chmod +x dgop
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dgop debian/dgop/usr/bin/dgop
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete dgop binary - it's part of the source package (native format)
|
||||
rm -f dgop.gz
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/danklinux/dgop/debian/source/format
Normal file
1
distro/ubuntu/danklinux/dgop/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
5
distro/ubuntu/dms-git/debian/changelog
Normal file
5
distro/ubuntu/dms-git/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
dms-git (0.6.2+git2094.6cc6e7c8ppa1) questing; urgency=medium
|
||||
|
||||
* Git snapshot (commit 2094: 6cc6e7c8)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 23 Nov 2025 00:43:28 -0500
|
||||
50
distro/ubuntu/dms-git/debian/control
Normal file
50
distro/ubuntu/dms-git/debian/control
Normal file
@@ -0,0 +1,50 @@
|
||||
Source: dms-git
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms-git
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Replaces: dms
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell (git nightly)
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
This is the nightly/git version built from the latest master branch.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/ubuntu/dms-git/debian/copyright
Normal file
27
distro/ubuntu/dms-git/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/dms-git/debian/files
Normal file
1
distro/ubuntu/dms-git/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms-git_0.6.2+git2094.6cc6e7c8ppa1_source.buildinfo x11 optional
|
||||
45
distro/ubuntu/dms-git/debian/rules
Executable file
45
distro/ubuntu/dms-git/debian/rules
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Get git commit date for version
|
||||
GIT_DATE := $(shell date +%Y%m%d)
|
||||
GIT_COMMIT := HEAD
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Git source is already included in source package (cloned by build-source.sh)
|
||||
# Launchpad build environment has no internet access
|
||||
test -d dms-git-repo || (echo "ERROR: dms-git-repo directory not found!" && exit 1)
|
||||
test -f dms-distropkg-amd64.gz || (echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1)
|
||||
|
||||
# Extract pre-built binary from latest release
|
||||
# Note: For git versions, we use the latest release binary
|
||||
# The QML files come from git master
|
||||
gunzip -c dms-distropkg-amd64.gz > dms
|
||||
chmod +x dms
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dms debian/dms-git/usr/bin/dms
|
||||
|
||||
# Install QML files from git clone
|
||||
mkdir -p debian/dms-git/usr/share/quickshell/dms
|
||||
cp -r dms-git-repo/* debian/dms-git/usr/share/quickshell/dms/
|
||||
|
||||
# Remove unnecessary directories
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/core
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/distro
|
||||
|
||||
# Install systemd user service
|
||||
install -Dm644 dms-git-repo/quickshell/assets/systemd/dms.service \
|
||||
debian/dms-git/usr/lib/systemd/user/dms.service
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete dms-git-repo directory - it's part of the source package (native format)
|
||||
# Clean up build artifacts (but keep dms-distropkg-amd64.gz for Launchpad)
|
||||
rm -f dms
|
||||
# Don't remove dms-distropkg-amd64.gz - it needs to be included in the source package for Launchpad builds
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/dms-git/debian/source/format
Normal file
1
distro/ubuntu/dms-git/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
1
distro/ubuntu/dms-git/debian/source/include-binaries
Normal file
1
distro/ubuntu/dms-git/debian/source/include-binaries
Normal file
@@ -0,0 +1 @@
|
||||
dms-distropkg-amd64.gz
|
||||
4
distro/ubuntu/dms-git/debian/source/options
Normal file
4
distro/ubuntu/dms-git/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad (which has no internet access)
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-git-repo
|
||||
5
distro/ubuntu/dms/debian/changelog
Normal file
5
distro/ubuntu/dms/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
dms (0.6.2ppa3) questing; urgency=medium
|
||||
|
||||
* Rebuild for packaging fixes (ppa3)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 23 Nov 2025 00:40:41 -0500
|
||||
47
distro/ubuntu/dms/debian/control
Normal file
47
distro/ubuntu/dms/debian/control
Normal file
@@ -0,0 +1,47 @@
|
||||
Source: dms
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Conflicts: dms-git
|
||||
Replaces: dms-git
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/ubuntu/dms/debian/copyright
Normal file
27
distro/ubuntu/dms/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/dms/debian/files
Normal file
1
distro/ubuntu/dms/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms_0.6.2ppa3_source.buildinfo x11 optional
|
||||
63
distro/ubuntu/dms/debian/rules
Executable file
63
distro/ubuntu/dms/debian/rules
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Extract version from debian/changelog
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
# Get upstream version (strip -1ppa1 suffix)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
# Strip ppa suffix and handle git versions
|
||||
# Examples: 0.5.2ppa9 -> 0.5.2, 0.5.2+git20251116 -> 0.5.2
|
||||
BASE_VERSION := $(shell echo $(UPSTREAM_VERSION) | sed 's/ppa[0-9]*$$//' | sed 's/+git.*//')
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# All files are included in source package (downloaded by build-source.sh)
|
||||
# Launchpad build environment has no internet access
|
||||
test -f dms-distropkg-amd64.gz || (echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1)
|
||||
test -f dms-source.tar.gz || (echo "ERROR: dms-source.tar.gz not found!" && exit 1)
|
||||
|
||||
# Extract pre-built binary
|
||||
gunzip -c dms-distropkg-amd64.gz > dms
|
||||
chmod +x dms
|
||||
|
||||
# Extract source tarball for QML files
|
||||
tar -xzf dms-source.tar.gz
|
||||
# Find the extracted directory (it might have various names)
|
||||
# and create a symlink to expected name for consistent install
|
||||
SOURCE_DIR=$$(find . -maxdepth 1 -type d -name "DankMaterialShell*" | head -n1); \
|
||||
if [ -n "$$SOURCE_DIR" ]; then \
|
||||
ln -sf $$SOURCE_DIR DankMaterialShell-$(BASE_VERSION); \
|
||||
fi
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dms debian/dms/usr/bin/dms
|
||||
|
||||
# Install QML files from source tarball
|
||||
mkdir -p debian/dms/usr/share/quickshell/dms
|
||||
cp -r DankMaterialShell-$(BASE_VERSION)/* debian/dms/usr/share/quickshell/dms/
|
||||
|
||||
# Remove unnecessary directories
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/core
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/distro
|
||||
|
||||
# Install systemd user service
|
||||
install -Dm644 DankMaterialShell-$(BASE_VERSION)/quickshell/assets/systemd/dms.service \
|
||||
debian/dms/usr/lib/systemd/user/dms.service
|
||||
|
||||
# Generate and install shell completions (if applicable)
|
||||
# Uncomment if dms supports completion generation
|
||||
# ./dms completion bash > dms.bash
|
||||
# ./dms completion zsh > dms.zsh
|
||||
# install -Dm644 dms.bash debian/dms/usr/share/bash-completion/completions/dms
|
||||
# install -Dm644 dms.zsh debian/dms/usr/share/zsh/vendor-completions/_dms
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
rm -rf DankMaterialShell-*
|
||||
# Don't remove dms-distropkg-amd64.gz and dms-source.tar.gz
|
||||
# They need to be included in the source package for Launchpad builds
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/dms/debian/source/format
Normal file
1
distro/ubuntu/dms/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
2
distro/ubuntu/dms/debian/source/include-binaries
Normal file
2
distro/ubuntu/dms/debian/source/include-binaries
Normal file
@@ -0,0 +1,2 @@
|
||||
dms-distropkg-amd64.gz
|
||||
dms-source.tar.gz
|
||||
4
distro/ubuntu/dms/debian/source/options
Normal file
4
distro/ubuntu/dms/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad (which has no internet access)
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-source.tar.gz
|
||||
44
distro/ubuntu/dput.cf.template
Normal file
44
distro/ubuntu/dput.cf.template
Normal file
@@ -0,0 +1,44 @@
|
||||
# dput configuration for AvengeMedia DMS PPAs
|
||||
# Copy this to ~/.dput.cf (or merge with existing ~/.dput.cf)
|
||||
#
|
||||
# Usage:
|
||||
# dput ppa:avengemedia/dms ../package_version_source.changes
|
||||
# dput ppa:avengemedia/dms-git ../package_version_source.changes
|
||||
|
||||
# Stable DMS PPA - for release versions
|
||||
[ppa:avengemedia/dms]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
# Nightly/Git DMS PPA - for development builds
|
||||
[ppa:avengemedia/dms-git]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms-git/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
# Alternative: Use HTTPS instead of FTP (more reliable through firewalls)
|
||||
# Uncomment these if FTP doesn't work:
|
||||
#
|
||||
# [ppa:avengemedia/dms-https]
|
||||
# fqdn = ppa.launchpad.net
|
||||
# method = https
|
||||
# incoming = ~avengemedia/ubuntu/dms/
|
||||
# login = anonymous
|
||||
# allow_unsigned_uploads = 0
|
||||
#
|
||||
# [ppa:avengemedia/dms-git-https]
|
||||
# fqdn = ppa.launchpad.net
|
||||
# method = https
|
||||
# incoming = ~avengemedia/ubuntu/dms-git/
|
||||
# login = anonymous
|
||||
# allow_unsigned_uploads = 0
|
||||
|
||||
# Notes:
|
||||
# - allow_unsigned_uploads = 0 enforces GPG signing (required by Launchpad)
|
||||
# - anonymous login is standard for PPA uploads
|
||||
# - The incoming path must match your Launchpad username and PPA name
|
||||
246
distro/ubuntu/ppa/create-and-upload.sh
Executable file
246
distro/ubuntu/ppa/create-and-upload.sh
Executable file
@@ -0,0 +1,246 @@
|
||||
#!/bin/bash
|
||||
# Build and upload PPA package with automatic cleanup
|
||||
# Usage: ./create-and-upload.sh <package-dir> <ppa-name> [ubuntu-series] [--keep-builds]
|
||||
#
|
||||
# Example:
|
||||
# ./create-and-upload.sh ../dms dms questing
|
||||
# ./create-and-upload.sh ../danklinux/dgop danklinux questing --keep-builds
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Parse arguments
|
||||
KEEP_BUILDS=false
|
||||
ARGS=()
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "--keep-builds" ]; then
|
||||
KEEP_BUILDS=true
|
||||
else
|
||||
ARGS+=("$arg")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#ARGS[@]} -lt 2 ]; then
|
||||
error "Usage: $0 <package-dir> <ppa-name> [ubuntu-series] [--keep-builds]"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " package-dir : Path to package directory (e.g., ../dms, ../danklinux/dgop)"
|
||||
echo " ppa-name : PPA name (danklinux, dms, dms-git)"
|
||||
echo " ubuntu-series : Ubuntu series (optional, default: questing)"
|
||||
echo " Supported: questing (25.10) and newer only"
|
||||
echo " Note: Requires Qt 6.6+ (quickshell requirement)"
|
||||
echo " --keep-builds : Keep build artifacts after upload (optional)"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms dms questing"
|
||||
echo " $0 ../danklinux/dgop danklinux questing --keep-builds"
|
||||
echo " $0 ../dms-git dms-git # Defaults to questing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PACKAGE_DIR="${ARGS[0]}"
|
||||
PPA_NAME="${ARGS[1]}"
|
||||
UBUNTU_SERIES="${ARGS[2]:-questing}"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
BUILD_SCRIPT="$SCRIPT_DIR/create-source.sh"
|
||||
UPLOAD_SCRIPT="$SCRIPT_DIR/upload-ppa.sh"
|
||||
|
||||
# Validate scripts exist
|
||||
if [ ! -f "$BUILD_SCRIPT" ]; then
|
||||
error "Build script not found: $BUILD_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
PACKAGE_DIR=$(cd "$PACKAGE_DIR" && pwd)
|
||||
PACKAGE_NAME=$(basename "$PACKAGE_DIR")
|
||||
PARENT_DIR=$(dirname "$PACKAGE_DIR")
|
||||
|
||||
info "Building and uploading: $PACKAGE_NAME"
|
||||
info "Package directory: $PACKAGE_DIR"
|
||||
info "PPA: ppa:avengemedia/$PPA_NAME"
|
||||
info "Ubuntu series: $UBUNTU_SERIES"
|
||||
echo
|
||||
|
||||
# Step 1: Build source package
|
||||
info "Step 1: Building source package..."
|
||||
if ! "$BUILD_SCRIPT" "$PACKAGE_DIR" "$UBUNTU_SERIES"; then
|
||||
error "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the changes file
|
||||
CHANGES_FILE=$(find "$PARENT_DIR" -maxdepth 1 -name "${PACKAGE_NAME}_*_source.changes" -type f | sort -V | tail -1)
|
||||
|
||||
if [ -z "$CHANGES_FILE" ]; then
|
||||
error "Changes file not found in $PARENT_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "Found changes file: $CHANGES_FILE"
|
||||
echo
|
||||
|
||||
# Step 2: Upload to PPA
|
||||
info "Step 2: Uploading to PPA..."
|
||||
|
||||
# Check if using lftp (for all PPAs) or dput
|
||||
if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "dms-git" ]; then
|
||||
warn "Using lftp for upload"
|
||||
|
||||
# Extract version from changes file
|
||||
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
SOURCE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
|
||||
# Find all files to upload
|
||||
BUILD_DIR=$(dirname "$CHANGES_FILE")
|
||||
CHANGES_BASENAME=$(basename "$CHANGES_FILE")
|
||||
DSC_FILE="${CHANGES_BASENAME/_source.changes/.dsc}"
|
||||
TARBALL="${CHANGES_BASENAME/_source.changes/.tar.xz}"
|
||||
BUILDINFO="${CHANGES_BASENAME/_source.changes/_source.buildinfo}"
|
||||
|
||||
# Check all files exist
|
||||
MISSING_FILES=()
|
||||
[ ! -f "$BUILD_DIR/$DSC_FILE" ] && MISSING_FILES+=("$DSC_FILE")
|
||||
[ ! -f "$BUILD_DIR/$TARBALL" ] && MISSING_FILES+=("$TARBALL")
|
||||
[ ! -f "$BUILD_DIR/$BUILDINFO" ] && MISSING_FILES+=("$BUILDINFO")
|
||||
|
||||
if [ ${#MISSING_FILES[@]} -gt 0 ]; then
|
||||
error "Missing required files:"
|
||||
for file in "${MISSING_FILES[@]}"; do
|
||||
error " - $file"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "Uploading files:"
|
||||
info " - $CHANGES_BASENAME"
|
||||
info " - $DSC_FILE"
|
||||
info " - $TARBALL"
|
||||
info " - $BUILDINFO"
|
||||
echo
|
||||
|
||||
# lftp build dir change
|
||||
LFTP_SCRIPT=$(mktemp)
|
||||
cat > "$LFTP_SCRIPT" <<EOF
|
||||
cd ~avengemedia/ubuntu/$PPA_NAME/
|
||||
lcd $BUILD_DIR
|
||||
mput $CHANGES_BASENAME
|
||||
mput $DSC_FILE
|
||||
mput $TARBALL
|
||||
mput $BUILDINFO
|
||||
bye
|
||||
EOF
|
||||
|
||||
if lftp -d ftp://anonymous:@ppa.launchpad.net < "$LFTP_SCRIPT"; then
|
||||
success "Upload successful!"
|
||||
rm -f "$LFTP_SCRIPT"
|
||||
else
|
||||
error "Upload failed!"
|
||||
rm -f "$LFTP_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Use dput for other PPAs
|
||||
if [ ! -f "$UPLOAD_SCRIPT" ]; then
|
||||
error "Upload script not found: $UPLOAD_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Auto-confirm upload (pipe 'y' to the confirmation prompt)
|
||||
if ! echo "y" | "$UPLOAD_SCRIPT" "$CHANGES_FILE" "$PPA_NAME"; then
|
||||
error "Upload failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
success "Package uploaded successfully!"
|
||||
info "Monitor build progress at:"
|
||||
echo " https://launchpad.net/~avengemedia/+archive/ubuntu/$PPA_NAME/+packages"
|
||||
echo
|
||||
|
||||
# Step 3: Cleanup (unless --keep-builds is specified)
|
||||
if [ "$KEEP_BUILDS" = "false" ]; then
|
||||
info "Step 3: Cleaning up build artifacts..."
|
||||
|
||||
# Find all build artifacts in parent directory
|
||||
ARTIFACTS=(
|
||||
"${PACKAGE_NAME}_*.dsc"
|
||||
"${PACKAGE_NAME}_*.tar.xz"
|
||||
"${PACKAGE_NAME}_*.tar.gz"
|
||||
"${PACKAGE_NAME}_*_source.changes"
|
||||
"${PACKAGE_NAME}_*_source.buildinfo"
|
||||
"${PACKAGE_NAME}_*_source.build"
|
||||
)
|
||||
|
||||
REMOVED=0
|
||||
for pattern in "${ARTIFACTS[@]}"; do
|
||||
for file in "$PARENT_DIR"/$pattern; do
|
||||
if [ -f "$file" ]; then
|
||||
rm -f "$file"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Clean up downloaded binaries in package directory
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-amd64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-arm64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms)
|
||||
# Remove downloaded binaries and source
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-git)
|
||||
# Remove downloaded binary
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
# Remove git source directory
|
||||
if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then
|
||||
rm -rf "$PACKAGE_DIR/dms-git-repo"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ $REMOVED -gt 0 ]; then
|
||||
success "Removed $REMOVED build artifact(s)"
|
||||
else
|
||||
info "No build artifacts to clean up"
|
||||
fi
|
||||
else
|
||||
info "Keeping build artifacts (--keep-builds specified)"
|
||||
info "Build artifacts in: $PARENT_DIR"
|
||||
fi
|
||||
|
||||
echo
|
||||
success "Done!"
|
||||
|
||||
566
distro/ubuntu/ppa/create-source.sh
Executable file
566
distro/ubuntu/ppa/create-source.sh
Executable file
@@ -0,0 +1,566 @@
|
||||
#!/bin/bash
|
||||
# Generic source package builder for DMS PPA packages
|
||||
# Usage: ./create-source.sh <package-dir> [ubuntu-series]
|
||||
#
|
||||
# Example:
|
||||
# ./create-source.sh ../dms questing
|
||||
# ./create-source.sh ../dms-git questing
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
error "Usage: $0 <package-dir> [ubuntu-series]"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " package-dir : Path to package directory (e.g., ../dms)"
|
||||
echo " ubuntu-series : Ubuntu series (optional, default: noble)"
|
||||
echo " Options: noble, jammy, oracular, mantic"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms questing"
|
||||
echo " $0 ../dms-git questing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PACKAGE_DIR="$1"
|
||||
UBUNTU_SERIES="${2:-noble}"
|
||||
|
||||
# Validate package directory
|
||||
if [ ! -d "$PACKAGE_DIR" ]; then
|
||||
error "Package directory not found: $PACKAGE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGE_DIR/debian" ]; then
|
||||
error "No debian/ directory found in $PACKAGE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
PACKAGE_DIR=$(cd "$PACKAGE_DIR" && pwd)
|
||||
PACKAGE_NAME=$(basename "$PACKAGE_DIR")
|
||||
|
||||
info "Building source package for: $PACKAGE_NAME"
|
||||
info "Package directory: $PACKAGE_DIR"
|
||||
info "Target Ubuntu series: $UBUNTU_SERIES"
|
||||
|
||||
# Check for required files
|
||||
REQUIRED_FILES=(
|
||||
"debian/control"
|
||||
"debian/rules"
|
||||
"debian/changelog"
|
||||
"debian/copyright"
|
||||
"debian/source/format"
|
||||
)
|
||||
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$PACKAGE_DIR/$file" ]; then
|
||||
error "Required file missing: $file"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Verify GPG key is set up
|
||||
info "Checking GPG key setup..."
|
||||
if ! gpg --list-secret-keys &> /dev/null; then
|
||||
error "No GPG secret keys found. Please set up GPG first!"
|
||||
error "See GPG_SETUP.md for instructions"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
success "GPG key found"
|
||||
|
||||
# Check if debuild is installed
|
||||
if ! command -v debuild &> /dev/null; then
|
||||
error "debuild not found. Install devscripts:"
|
||||
error " sudo dnf install devscripts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract package info from changelog
|
||||
cd "$PACKAGE_DIR"
|
||||
CHANGELOG_VERSION=$(dpkg-parsechangelog -S Version)
|
||||
SOURCE_NAME=$(dpkg-parsechangelog -S Source)
|
||||
|
||||
info "Source package: $SOURCE_NAME"
|
||||
info "Version: $CHANGELOG_VERSION"
|
||||
|
||||
# Check if version targets correct Ubuntu series
|
||||
CHANGELOG_SERIES=$(dpkg-parsechangelog -S Distribution)
|
||||
if [ "$CHANGELOG_SERIES" != "$UBUNTU_SERIES" ] && [ "$CHANGELOG_SERIES" != "UNRELEASED" ]; then
|
||||
warn "Changelog targets '$CHANGELOG_SERIES' but building for '$UBUNTU_SERIES'"
|
||||
warn "Consider updating changelog with: dch -r '' -D $UBUNTU_SERIES"
|
||||
fi
|
||||
|
||||
# Detect package type and update version automatically
|
||||
cd "$PACKAGE_DIR"
|
||||
|
||||
# Function to get latest tag from GitHub
|
||||
get_latest_tag() {
|
||||
local repo="$1"
|
||||
# Try GitHub API first (faster)
|
||||
if command -v curl &> /dev/null; then
|
||||
LATEST_TAG=$(curl -s "https://api.github.com/repos/$repo/releases/latest" 2>/dev/null | grep '"tag_name":' | sed 's/.*"tag_name": "\(.*\)".*/\1/' | head -1)
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "$LATEST_TAG" | sed 's/^v//'
|
||||
return
|
||||
fi
|
||||
fi
|
||||
# Fallback: clone and get latest tag
|
||||
TEMP_REPO=$(mktemp -d)
|
||||
if git clone --depth=1 --quiet "https://github.com/$repo.git" "$TEMP_REPO" 2>/dev/null; then
|
||||
LATEST_TAG=$(cd "$TEMP_REPO" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "")
|
||||
rm -rf "$TEMP_REPO"
|
||||
echo "$LATEST_TAG"
|
||||
fi
|
||||
}
|
||||
|
||||
# Detect if package is git-based
|
||||
IS_GIT_PACKAGE=false
|
||||
GIT_REPO=""
|
||||
SOURCE_DIR=""
|
||||
|
||||
# Check package name for -git suffix
|
||||
if [[ "$PACKAGE_NAME" == *"-git" ]]; then
|
||||
IS_GIT_PACKAGE=true
|
||||
fi
|
||||
|
||||
# Check rules file for git clone patterns and extract repo
|
||||
if grep -q "git clone" debian/rules 2>/dev/null; then
|
||||
IS_GIT_PACKAGE=true
|
||||
# Extract GitHub repo URL from rules
|
||||
GIT_URL=$(grep -o "git clone.*https://github.com/[^/]*/[^/]*\.git" debian/rules 2>/dev/null | head -1 | sed 's/.*github\.com\///' | sed 's/\.git.*//' || echo "")
|
||||
if [ -n "$GIT_URL" ]; then
|
||||
GIT_REPO="$GIT_URL"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Special handling for known packages
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
IS_GIT_PACKAGE=true
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
SOURCE_DIR="dms-git-repo"
|
||||
;;
|
||||
dms)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading pre-built binaries and source for dms..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download amd64 binary (will be included in source package)
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
info "Downloading dms binary for amd64..."
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download source tarball for QML files
|
||||
if [ ! -f "dms-source.tar.gz" ]; then
|
||||
info "Downloading dms source for QML files..."
|
||||
if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
danksearch)
|
||||
# danksearch uses pre-built binary from releases, like dgop
|
||||
GIT_REPO="AvengeMedia/danksearch"
|
||||
;;
|
||||
dgop)
|
||||
# dgop uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/dgop"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle git packages
|
||||
if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
info "Detected git package: $PACKAGE_NAME"
|
||||
|
||||
# Determine source directory name
|
||||
if [ -z "$SOURCE_DIR" ]; then
|
||||
# Default: use package name without -git suffix + -source or -repo
|
||||
BASE_NAME=$(echo "$PACKAGE_NAME" | sed 's/-git$//')
|
||||
if [ -d "${BASE_NAME}-source" ] 2>/dev/null; then
|
||||
SOURCE_DIR="${BASE_NAME}-source"
|
||||
elif [ -d "${BASE_NAME}-repo" ] 2>/dev/null; then
|
||||
SOURCE_DIR="${BASE_NAME}-repo"
|
||||
elif [ -d "$BASE_NAME" ] 2>/dev/null; then
|
||||
SOURCE_DIR="$BASE_NAME"
|
||||
else
|
||||
SOURCE_DIR="${BASE_NAME}-source"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Always clone fresh source to get latest commit info
|
||||
info "Cloning $GIT_REPO from GitHub (getting latest commit info)..."
|
||||
TEMP_CLONE=$(mktemp -d)
|
||||
if git clone "https://github.com/$GIT_REPO.git" "$TEMP_CLONE"; then
|
||||
# Get git commit info from fresh clone
|
||||
GIT_COMMIT_HASH=$(cd "$TEMP_CLONE" && git rev-parse --short HEAD)
|
||||
GIT_COMMIT_COUNT=$(cd "$TEMP_CLONE" && git rev-list --count HEAD)
|
||||
|
||||
# Get upstream version from latest git tag (e.g., 0.2.1)
|
||||
# Sort all tags by version and get the latest one (not just the one reachable from HEAD)
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l "v*" | sed 's/^v//' | sort -V | tail -1)
|
||||
if [ -z "$UPSTREAM_VERSION" ]; then
|
||||
# Fallback: try without v prefix
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l | grep -E '^[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -1)
|
||||
fi
|
||||
if [ -z "$UPSTREAM_VERSION" ]; then
|
||||
# Last resort: use git describe
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.1")
|
||||
fi
|
||||
|
||||
# Verify we got valid commit info
|
||||
if [ -z "$GIT_COMMIT_COUNT" ] || [ "$GIT_COMMIT_COUNT" = "0" ]; then
|
||||
error "Failed to get commit count from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$GIT_COMMIT_HASH" ]; then
|
||||
error "Failed to get commit hash from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
success "Got commit info: $GIT_COMMIT_COUNT ($GIT_COMMIT_HASH), upstream: $UPSTREAM_VERSION"
|
||||
|
||||
# Update changelog with git commit info
|
||||
info "Updating changelog with git commit info..."
|
||||
# Format: 0.2.1+git705.fdbb86appa1
|
||||
# Check if we're rebuilding the same commit (increment PPA number if so)
|
||||
BASE_VERSION="${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
PPA_NUM=1
|
||||
|
||||
# If current version matches the base version, increment PPA number
|
||||
# Escape special regex characters in BASE_VERSION for pattern matching
|
||||
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/+/\\+/g')
|
||||
if [[ "$CURRENT_VERSION" =~ ^${ESCAPED_BASE}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same commit (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New commit or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
|
||||
# Use sed to update changelog (non-interactive, faster)
|
||||
# Get current changelog content - find the next package header line (starts with package name)
|
||||
# Skip the first entry entirely by finding the second occurrence of the package name at start of line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
# Found second entry, use everything from there
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
else
|
||||
# No second entry found, changelog will only have new entry
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
# Create new changelog entry with proper format
|
||||
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
|
||||
|
||||
* Git snapshot (commit ${GIT_COMMIT_COUNT}: ${GIT_COMMIT_HASH})
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
|
||||
# Write new changelog (new entry, blank line, then old entries)
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
|
||||
# Now clone to source directory (without .git for inclusion in package)
|
||||
rm -rf "$SOURCE_DIR"
|
||||
cp -r "$TEMP_CLONE" "$SOURCE_DIR"
|
||||
rm -rf "$SOURCE_DIR/.git"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
|
||||
# Vendor Rust dependencies for packages that need it
|
||||
if false; then
|
||||
# No current packages need Rust vendoring
|
||||
if [ -f "$SOURCE_DIR/Cargo.toml" ]; then
|
||||
info "Vendoring Rust dependencies (Launchpad has no internet access)..."
|
||||
cd "$SOURCE_DIR"
|
||||
|
||||
# Clean up any existing vendor directory and .orig files
|
||||
# (prevents cargo from including .orig files in checksums)
|
||||
rm -rf vendor .cargo
|
||||
find . -type f -name "*.orig" -exec rm -f {} + || true
|
||||
|
||||
# Download all dependencies (crates.io + git repos) to vendor/
|
||||
# cargo vendor outputs the config to stderr, capture it
|
||||
mkdir -p .cargo
|
||||
cargo vendor 2>&1 | awk '
|
||||
/^\[source\.crates-io\]/ { printing=1 }
|
||||
printing { print }
|
||||
/^directory = "vendor"$/ { exit }
|
||||
' > .cargo/config.toml
|
||||
|
||||
# Verify vendor directory was created
|
||||
if [ ! -d "vendor" ]; then
|
||||
error "Failed to vendor dependencies"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify config was created
|
||||
if [ ! -s .cargo/config.toml ]; then
|
||||
error "Failed to create cargo config"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# CRITICAL: Remove ALL .orig files from vendor directory
|
||||
# These break cargo checksums when dh_clean tries to use them
|
||||
info "Cleaning .orig files from vendor directory..."
|
||||
find vendor -type f -name "*.orig" -exec rm -fv {} + || true
|
||||
find vendor -type f -name "*.rej" -exec rm -fv {} + || true
|
||||
|
||||
# Verify no .orig files remain
|
||||
ORIG_COUNT=$(find vendor -type f -name "*.orig" | wc -l)
|
||||
if [ "$ORIG_COUNT" -gt 0 ]; then
|
||||
warn "Found $ORIG_COUNT .orig files still in vendor directory"
|
||||
fi
|
||||
|
||||
success "Rust dependencies vendored (including git dependencies)"
|
||||
cd "$PACKAGE_DIR"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download pre-built binary for dms-git
|
||||
# dms-git uses latest release binary with git master QML files
|
||||
if [ "$PACKAGE_NAME" = "dms-git" ]; then
|
||||
info "Downloading latest release binary for dms-git..."
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz"; then
|
||||
success "Latest release binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
info "Release binary already downloaded"
|
||||
fi
|
||||
fi
|
||||
|
||||
success "Source prepared for packaging"
|
||||
else
|
||||
error "Failed to clone $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
# Handle stable packages - get latest tag
|
||||
elif [ -n "$GIT_REPO" ]; then
|
||||
info "Detected stable package: $PACKAGE_NAME"
|
||||
info "Fetching latest tag from $GIT_REPO..."
|
||||
|
||||
LATEST_TAG=$(get_latest_tag "$GIT_REPO")
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
# Check source format - native packages can't use dashes
|
||||
SOURCE_FORMAT=$(cat debian/source/format 2>/dev/null | head -1 || echo "3.0 (quilt)")
|
||||
|
||||
# Get current version to check if we need to increment PPA number
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
PPA_NUM=1
|
||||
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
|
||||
# Native format: 0.2.1ppa1 (no dash, no revision)
|
||||
BASE_VERSION="${LATEST_TAG}"
|
||||
# Check if we're rebuilding the same version (increment PPA number if so)
|
||||
if [[ "$CURRENT_VERSION" =~ ^${LATEST_TAG}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same version (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New version or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
else
|
||||
# Quilt format: 0.2.1-1ppa1 (with revision)
|
||||
BASE_VERSION="${LATEST_TAG}-1"
|
||||
# Check if we're rebuilding the same version (increment PPA number if so)
|
||||
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/-/\\-/g')
|
||||
if [[ "$CURRENT_VERSION" =~ ^${ESCAPED_BASE}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same version (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New version or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
fi
|
||||
|
||||
# Check if version needs updating (either new version or PPA number changed)
|
||||
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
|
||||
if [ "$PPA_NUM" -gt 1 ]; then
|
||||
info "Updating changelog for rebuild (PPA number incremented to $PPA_NUM)"
|
||||
else
|
||||
info "Updating changelog to latest tag: $LATEST_TAG"
|
||||
fi
|
||||
# Use sed to update changelog (non-interactive)
|
||||
# Get current changelog content - find the next package header line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
else
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
# Create appropriate changelog message
|
||||
if [ "$PPA_NUM" -gt 1 ]; then
|
||||
CHANGELOG_MSG="Rebuild for packaging fixes (ppa${PPA_NUM})"
|
||||
else
|
||||
CHANGELOG_MSG="Upstream release ${LATEST_TAG}"
|
||||
fi
|
||||
|
||||
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
|
||||
|
||||
* ${CHANGELOG_MSG}
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
else
|
||||
info "Version already at latest tag: $LATEST_TAG"
|
||||
fi
|
||||
else
|
||||
warn "Could not determine latest tag for $GIT_REPO, using existing version"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Handle packages that need pre-built binaries downloaded
|
||||
cd "$PACKAGE_DIR"
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
info "Downloading pre-built binaries for danksearch..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download both amd64 and arm64 binaries (will be included in source package)
|
||||
# Launchpad can't download during build, so we include both architectures
|
||||
if [ ! -f "dsearch-amd64" ]; then
|
||||
info "Downloading dsearch binary for amd64..."
|
||||
if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then
|
||||
gunzip dsearch-amd64.gz
|
||||
chmod +x dsearch-amd64
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f "dsearch-arm64" ]; then
|
||||
info "Downloading dsearch binary for arm64..."
|
||||
if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then
|
||||
gunzip dsearch-arm64.gz
|
||||
chmod +x dsearch-arm64
|
||||
success "arm64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-arm64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
dgop)
|
||||
# dgop binary should already be committed in the repo
|
||||
if [ ! -f "dgop" ]; then
|
||||
warn "dgop binary not found - should be committed to repo"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
cd - > /dev/null
|
||||
|
||||
# Check if this version already exists on PPA (only in CI environment)
|
||||
if command -v rmadison >/dev/null 2>&1; then
|
||||
info "Checking if version already exists on PPA..."
|
||||
PPA_VERSION_CHECK=$(rmadison -u ppa:avengemedia/dms "$PACKAGE_NAME" 2>/dev/null | grep "$VERSION" || true)
|
||||
if [ -n "$PPA_VERSION_CHECK" ]; then
|
||||
warn "Version $VERSION already exists on PPA:"
|
||||
echo "$PPA_VERSION_CHECK"
|
||||
echo
|
||||
warn "Skipping upload to avoid duplicate. If this is a rebuild, increment the ppa number."
|
||||
cd "$PACKAGE_DIR"
|
||||
# Still clean up extracted sources
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
rm -rf DankMaterialShell-*
|
||||
success "Cleaned up DankMaterialShell-*/ directory"
|
||||
;;
|
||||
esac
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Build source package
|
||||
info "Building source package..."
|
||||
echo
|
||||
|
||||
# Determine if we need to include orig tarball (-sa) or just debian changes (-sd)
|
||||
# Check if .orig.tar.xz already exists in parent directory (previous build)
|
||||
ORIG_TARBALL="${PACKAGE_NAME}_${VERSION%.ppa*}.orig.tar.xz"
|
||||
if [ -f "../$ORIG_TARBALL" ]; then
|
||||
info "Found existing orig tarball, using -sd (debian changes only)"
|
||||
DEBUILD_SOURCE_FLAG="-sd"
|
||||
else
|
||||
info "No existing orig tarball found, using -sa (include original source)"
|
||||
DEBUILD_SOURCE_FLAG="-sa"
|
||||
fi
|
||||
|
||||
# Use -S for source only, -sa/-sd for source inclusion
|
||||
# -d skips dependency checking (we're building on Fedora, not Ubuntu)
|
||||
# Pipe yes to automatically answer prompts (e.g., "continue anyway?")
|
||||
if yes | DEBIAN_FRONTEND=noninteractive debuild -S $DEBUILD_SOURCE_FLAG -d; then
|
||||
echo
|
||||
success "Source package built successfully!"
|
||||
|
||||
# List generated files
|
||||
info "Generated files in $(dirname "$PACKAGE_DIR"):"
|
||||
ls -lh "$(dirname "$PACKAGE_DIR")"/${SOURCE_NAME}_${CHANGELOG_VERSION}* 2>/dev/null || true
|
||||
|
||||
# Show what to do next
|
||||
echo
|
||||
info "Next steps:"
|
||||
echo " 1. Review the source package:"
|
||||
echo " cd $(dirname "$PACKAGE_DIR")"
|
||||
echo " ls -lh ${SOURCE_NAME}_${CHANGELOG_VERSION}*"
|
||||
echo
|
||||
echo " 2. Upload to PPA (stable):"
|
||||
echo " dput ppa:avengemedia/dms ${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes"
|
||||
echo
|
||||
echo " 3. Or upload to PPA (nightly):"
|
||||
echo " dput ppa:avengemedia/dms-git ${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes"
|
||||
echo
|
||||
echo " 4. Or use the upload script:"
|
||||
echo " ./upload-ppa.sh $(dirname "$PACKAGE_DIR")/${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes dms"
|
||||
|
||||
else
|
||||
error "Source package build failed!"
|
||||
exit 1
|
||||
fi
|
||||
179
distro/ubuntu/ppa/upload-ppa.sh
Executable file
179
distro/ubuntu/ppa/upload-ppa.sh
Executable file
@@ -0,0 +1,179 @@
|
||||
#!/bin/bash
|
||||
# Generic PPA uploader for DMS packages
|
||||
# Usage: ./upload-ppa.sh <changes-file> <ppa-name>
|
||||
#
|
||||
# Example:
|
||||
# ./upload-ppa.sh ../dms_0.5.2ppa1_source.changes dms
|
||||
# ./upload-ppa.sh ../dms_0.5.2+git705.fdbb86appa1_source.changes dms-git
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
error "Usage: $0 <changes-file> <ppa-name>"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " changes-file : Path to .changes file (e.g., ../dms_0.5.2ppa1_source.changes)"
|
||||
echo " ppa-name : PPA to upload to (dms or dms-git)"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms_0.5.2ppa1_source.changes dms"
|
||||
echo " $0 ../dms_0.5.2+git705.fdbb86appa1_source.changes dms-git"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHANGES_FILE="$1"
|
||||
PPA_NAME="$2"
|
||||
|
||||
# Validate changes file
|
||||
if [ ! -f "$CHANGES_FILE" ]; then
|
||||
error "Changes file not found: $CHANGES_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$CHANGES_FILE" =~ \.changes$ ]]; then
|
||||
error "File must be a .changes file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate PPA name
|
||||
if [ "$PPA_NAME" != "dms" ] && [ "$PPA_NAME" != "dms-git" ] && [ "$PPA_NAME" != "danklinux" ]; then
|
||||
error "PPA name must be 'dms', 'dms-git', or 'danklinux'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
CHANGES_FILE=$(realpath "$CHANGES_FILE")
|
||||
|
||||
info "Uploading to PPA: ppa:avengemedia/$PPA_NAME"
|
||||
info "Changes file: $CHANGES_FILE"
|
||||
|
||||
# Check if dput or lftp is installed
|
||||
UPLOAD_METHOD=""
|
||||
if command -v dput &> /dev/null; then
|
||||
UPLOAD_METHOD="dput"
|
||||
elif command -v lftp &> /dev/null; then
|
||||
UPLOAD_METHOD="lftp"
|
||||
warn "dput not found, using lftp as fallback"
|
||||
else
|
||||
error "Neither dput nor lftp found. Install one with:"
|
||||
error " sudo dnf install dput-ng # Preferred but broken on Fedora"
|
||||
error " sudo dnf install lftp # Alternative upload method"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if ~/.dput.cf exists
|
||||
if [ ! -f "$HOME/.dput.cf" ]; then
|
||||
error "~/.dput.cf not found!"
|
||||
echo
|
||||
info "Create it from template:"
|
||||
echo " cp $(dirname "$0")/../dput.cf.template ~/.dput.cf"
|
||||
echo
|
||||
info "Or create it manually with:"
|
||||
cat <<'EOF'
|
||||
[ppa:avengemedia/dms]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
[ppa:avengemedia/dms-git]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms-git/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if PPA is configured in dput.cf
|
||||
if ! grep -q "^\[ppa:avengemedia/$PPA_NAME\]" "$HOME/.dput.cf"; then
|
||||
error "PPA 'ppa:avengemedia/$PPA_NAME' not found in ~/.dput.cf"
|
||||
echo
|
||||
info "Add this to ~/.dput.cf:"
|
||||
cat <<EOF
|
||||
[ppa:avengemedia/$PPA_NAME]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/$PPA_NAME/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract package info from changes file
|
||||
PACKAGE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
|
||||
info "Package: $PACKAGE_NAME"
|
||||
info "Version: $VERSION"
|
||||
|
||||
# Show files that will be uploaded
|
||||
echo
|
||||
info "Files to be uploaded:"
|
||||
grep "^ [a-f0-9]" "$CHANGES_FILE" | awk '{print " - " $5}' || true
|
||||
|
||||
# Verify GPG signature
|
||||
info "Verifying GPG signature..."
|
||||
if gpg --verify "$CHANGES_FILE" 2>/dev/null; then
|
||||
success "GPG signature valid"
|
||||
else
|
||||
error "GPG signature verification failed!"
|
||||
error "The .changes file must be signed with your GPG key"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ask for confirmation
|
||||
echo
|
||||
warn "About to upload to: ppa:avengemedia/$PPA_NAME"
|
||||
read -p "Continue? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
info "Upload cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Upload to PPA
|
||||
info "Uploading to Launchpad..."
|
||||
echo
|
||||
|
||||
if dput "ppa:avengemedia/$PPA_NAME" "$CHANGES_FILE"; then
|
||||
echo
|
||||
success "Upload successful!"
|
||||
echo
|
||||
info "Monitor build progress at:"
|
||||
echo " https://launchpad.net/~avengemedia/+archive/ubuntu/$PPA_NAME/+packages"
|
||||
echo
|
||||
info "Builds typically take 5-30 minutes depending on:"
|
||||
echo " - Build queue length"
|
||||
echo " - Package complexity"
|
||||
echo " - Number of target Ubuntu series"
|
||||
echo
|
||||
info "Once built, users can install with:"
|
||||
echo " sudo add-apt-repository ppa:avengemedia/$PPA_NAME"
|
||||
echo " sudo apt update"
|
||||
echo " sudo apt install $PACKAGE_NAME"
|
||||
|
||||
else
|
||||
error "Upload failed!"
|
||||
echo
|
||||
info "Common issues:"
|
||||
echo " - GPG key not verified on Launchpad (check https://launchpad.net/~/+editpgpkeys)"
|
||||
echo " - Version already uploaded (must increment version number)"
|
||||
echo " - Network/firewall blocking FTP (try HTTPS method in dput.cf)"
|
||||
echo " - Email in changelog doesn't match GPG key email"
|
||||
exit 1
|
||||
fi
|
||||
21
flake.nix
21
flake.nix
@@ -24,6 +24,11 @@
|
||||
dgop = dgop.packages.${pkgs.stdenv.hostPlatform.system}.dgop;
|
||||
dankMaterialShell = self.packages.${pkgs.stdenv.hostPlatform.system}.dankMaterialShell;
|
||||
};
|
||||
mkModuleWithDmsPkgs = path: args @ {pkgs, ...}: {
|
||||
imports = [
|
||||
(import path (args // {dmsPkgs = buildDmsPkgs pkgs;}))
|
||||
];
|
||||
};
|
||||
in {
|
||||
formatter = forEachSystem (_: pkgs: pkgs.alejandra);
|
||||
|
||||
@@ -81,20 +86,12 @@
|
||||
}
|
||||
);
|
||||
|
||||
homeModules.dankMaterialShell.default = {pkgs, ...}: let
|
||||
dmsPkgs = buildDmsPkgs pkgs;
|
||||
in {
|
||||
imports = [./distro/nix/default.nix];
|
||||
_module.args.dmsPkgs = dmsPkgs;
|
||||
};
|
||||
homeModules.dankMaterialShell.default = mkModuleWithDmsPkgs ./distro/nix/home.nix;
|
||||
|
||||
homeModules.dankMaterialShell.niri = import ./distro/nix/niri.nix;
|
||||
|
||||
nixosModules.greeter = {pkgs, ...}: let
|
||||
dmsPkgs = buildDmsPkgs pkgs;
|
||||
in {
|
||||
imports = [./distro/nix/greeter.nix];
|
||||
_module.args.dmsPkgs = dmsPkgs;
|
||||
};
|
||||
nixosModules.dankMaterialShell = mkModuleWithDmsPkgs ./distro/nix/nixos.nix;
|
||||
|
||||
nixosModules.greeter = mkModuleWithDmsPkgs ./distro/nix/greeter.nix;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pragma Singleton
|
||||
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import QtCore
|
||||
@@ -23,79 +22,79 @@ Singleton {
|
||||
property string profileLastPath: ""
|
||||
|
||||
property var fileBrowserSettings: ({
|
||||
"wallpaper": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"profile": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_save": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_load": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"generic": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"default": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
})
|
||||
"wallpaper": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"profile": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_save": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_load": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"generic": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"default": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
})
|
||||
|
||||
Component.onCompleted: {
|
||||
if (!isGreeterMode) {
|
||||
loadCache()
|
||||
loadCache();
|
||||
}
|
||||
}
|
||||
|
||||
function loadCache() {
|
||||
_loading = true
|
||||
parseCache(cacheFile.text())
|
||||
_loading = false
|
||||
_loading = true;
|
||||
parseCache(cacheFile.text());
|
||||
_loading = false;
|
||||
}
|
||||
|
||||
function parseCache(content) {
|
||||
_loading = true
|
||||
_loading = true;
|
||||
try {
|
||||
if (content && content.trim()) {
|
||||
const cache = JSON.parse(content)
|
||||
const cache = JSON.parse(content);
|
||||
|
||||
wallpaperLastPath = cache.wallpaperLastPath !== undefined ? cache.wallpaperLastPath : ""
|
||||
profileLastPath = cache.profileLastPath !== undefined ? cache.profileLastPath : ""
|
||||
wallpaperLastPath = cache.wallpaperLastPath !== undefined ? cache.wallpaperLastPath : "";
|
||||
profileLastPath = cache.profileLastPath !== undefined ? cache.profileLastPath : "";
|
||||
|
||||
if (cache.fileBrowserSettings !== undefined) {
|
||||
fileBrowserSettings = cache.fileBrowserSettings
|
||||
fileBrowserSettings = cache.fileBrowserSettings;
|
||||
} else if (cache.fileBrowserViewMode !== undefined) {
|
||||
fileBrowserSettings = {
|
||||
"wallpaper": {
|
||||
@@ -122,65 +121,60 @@ Singleton {
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (cache.configVersion === undefined) {
|
||||
migrateFromUndefinedToV1(cache)
|
||||
cleanupUnusedKeys()
|
||||
saveCache()
|
||||
migrateFromUndefinedToV1(cache);
|
||||
cleanupUnusedKeys();
|
||||
saveCache();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("CacheData: Failed to parse cache:", e.message)
|
||||
console.warn("CacheData: Failed to parse cache:", e.message);
|
||||
} finally {
|
||||
_loading = false
|
||||
_loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function saveCache() {
|
||||
if (_loading)
|
||||
return
|
||||
return;
|
||||
cacheFile.setText(JSON.stringify({
|
||||
"wallpaperLastPath": wallpaperLastPath,
|
||||
"profileLastPath": profileLastPath,
|
||||
"fileBrowserSettings": fileBrowserSettings,
|
||||
"configVersion": cacheConfigVersion
|
||||
}, null, 2))
|
||||
"wallpaperLastPath": wallpaperLastPath,
|
||||
"profileLastPath": profileLastPath,
|
||||
"fileBrowserSettings": fileBrowserSettings,
|
||||
"configVersion": cacheConfigVersion
|
||||
}, null, 2));
|
||||
}
|
||||
|
||||
function migrateFromUndefinedToV1(cache) {
|
||||
console.info("CacheData: Migrating configuration from undefined to version 1")
|
||||
console.info("CacheData: Migrating configuration from undefined to version 1");
|
||||
}
|
||||
|
||||
function cleanupUnusedKeys() {
|
||||
const validKeys = [
|
||||
"wallpaperLastPath",
|
||||
"profileLastPath",
|
||||
"fileBrowserSettings",
|
||||
"configVersion"
|
||||
]
|
||||
const validKeys = ["wallpaperLastPath", "profileLastPath", "fileBrowserSettings", "configVersion"];
|
||||
|
||||
try {
|
||||
const content = cacheFile.text()
|
||||
if (!content || !content.trim()) return
|
||||
|
||||
const cache = JSON.parse(content)
|
||||
let needsSave = false
|
||||
const content = cacheFile.text();
|
||||
if (!content || !content.trim())
|
||||
return;
|
||||
const cache = JSON.parse(content);
|
||||
let needsSave = false;
|
||||
|
||||
for (const key in cache) {
|
||||
if (!validKeys.includes(key)) {
|
||||
console.log("CacheData: Removing unused key:", key)
|
||||
delete cache[key]
|
||||
needsSave = true
|
||||
console.log("CacheData: Removing unused key:", key);
|
||||
delete cache[key];
|
||||
needsSave = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (needsSave) {
|
||||
cacheFile.setText(JSON.stringify(cache, null, 2))
|
||||
cacheFile.setText(JSON.stringify(cache, null, 2));
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("CacheData: Failed to cleanup unused keys:", e.message)
|
||||
console.warn("CacheData: Failed to cleanup unused keys:", e.message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,12 +188,12 @@ Singleton {
|
||||
watchChanges: !isGreeterMode
|
||||
onLoaded: {
|
||||
if (!isGreeterMode) {
|
||||
parseCache(cacheFile.text())
|
||||
parseCache(cacheFile.text());
|
||||
}
|
||||
}
|
||||
onLoadFailed: error => {
|
||||
if (!isGreeterMode) {
|
||||
console.info("CacheData: No cache file found, starting fresh")
|
||||
console.info("CacheData: No cache file found, starting fresh");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,24 @@
|
||||
import Quickshell
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
import Quickshell
|
||||
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
// Clear all image cache
|
||||
function clearImageCache() {
|
||||
Quickshell.execDetached(["rm", "-rf", Paths.stringify(
|
||||
Paths.imagecache)])
|
||||
Paths.mkdir(Paths.imagecache)
|
||||
Quickshell.execDetached(["rm", "-rf", Paths.stringify(Paths.imagecache)]);
|
||||
Paths.mkdir(Paths.imagecache);
|
||||
}
|
||||
|
||||
// Clear cache older than specified minutes
|
||||
function clearOldCache(ageInMinutes) {
|
||||
Quickshell.execDetached(
|
||||
["find", Paths.stringify(
|
||||
Paths.imagecache), "-name", "*.png", "-mmin", `+${ageInMinutes}`, "-delete"])
|
||||
Quickshell.execDetached(["find", Paths.stringify(Paths.imagecache), "-name", "*.png", "-mmin", `+${ageInMinutes}`, "-delete"]);
|
||||
}
|
||||
|
||||
// Clear cache for specific size
|
||||
function clearCacheForSize(size) {
|
||||
Quickshell.execDetached(
|
||||
["find", Paths.stringify(
|
||||
Paths.imagecache), "-name", `*@${size}x${size}.png`, "-delete"])
|
||||
Quickshell.execDetached(["find", Paths.stringify(Paths.imagecache), "-name", `*@${size}x${size}.png`, "-delete"]);
|
||||
}
|
||||
|
||||
// Get cache size in MB
|
||||
@@ -30,8 +26,7 @@ Singleton {
|
||||
var process = Qt.createQmlObject(`
|
||||
import Quickshell.Io
|
||||
Process {
|
||||
command: ["du", "-sm", "${Paths.stringify(
|
||||
Paths.imagecache)}"]
|
||||
command: ["du", "-sm", "${Paths.stringify(Paths.imagecache)}"]
|
||||
running: true
|
||||
stdout: StdioCollector {
|
||||
onStreamFinished: {
|
||||
@@ -40,6 +35,6 @@ Singleton {
|
||||
}
|
||||
}
|
||||
}
|
||||
`, root)
|
||||
`, root);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,26 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
import QtQuick
|
||||
import Qt.labs.folderlistmodel
|
||||
import Quickshell
|
||||
import Quickshell.Io
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
readonly property string _rawLocale: Qt.locale().name
|
||||
readonly property string _lang: _rawLocale.split(/[_-]/)[0]
|
||||
readonly property var _candidates: {
|
||||
readonly property var _candidates: {
|
||||
const fullUnderscore = _rawLocale;
|
||||
const fullHyphen = _rawLocale.replace("_", "-");
|
||||
const fullHyphen = _rawLocale.replace("_", "-");
|
||||
return [fullUnderscore, fullHyphen, _lang].filter(c => c && c !== "en");
|
||||
}
|
||||
|
||||
|
||||
readonly property url translationsFolder: Qt.resolvedUrl("../translations/poexports")
|
||||
|
||||
property string currentLocale: "en"
|
||||
property var translations: ({})
|
||||
property bool translationsLoaded: false
|
||||
property var translations: ({})
|
||||
property bool translationsLoaded: false
|
||||
|
||||
property url _selectedPath: ""
|
||||
|
||||
@@ -32,7 +31,8 @@ Singleton {
|
||||
showDirs: false
|
||||
showDotAndDotDot: false
|
||||
|
||||
onStatusChanged: if (status === FolderListModel.Ready) root._pickTranslation()
|
||||
onStatusChanged: if (status === FolderListModel.Ready)
|
||||
root._pickTranslation()
|
||||
}
|
||||
|
||||
FileView {
|
||||
@@ -41,73 +41,75 @@ Singleton {
|
||||
|
||||
onLoaded: {
|
||||
try {
|
||||
root.translations = JSON.parse(text())
|
||||
root.translationsLoaded = true
|
||||
console.info(`I18n: Loaded translations for '${root.currentLocale}' ` +
|
||||
`(${Object.keys(root.translations).length} contexts)`)
|
||||
root.translations = JSON.parse(text());
|
||||
root.translationsLoaded = true;
|
||||
console.info(`I18n: Loaded translations for '${root.currentLocale}' ` + `(${Object.keys(root.translations).length} contexts)`);
|
||||
} catch (e) {
|
||||
console.warn(`I18n: Error parsing '${root.currentLocale}':`, e,
|
||||
"- falling back to English")
|
||||
root._fallbackToEnglish()
|
||||
console.warn(`I18n: Error parsing '${root.currentLocale}':`, e, "- falling back to English");
|
||||
root._fallbackToEnglish();
|
||||
}
|
||||
}
|
||||
|
||||
onLoadFailed: (error) => {
|
||||
console.warn(`I18n: Failed to load '${root.currentLocale}' (${error}), ` +
|
||||
"falling back to English")
|
||||
root._fallbackToEnglish()
|
||||
onLoadFailed: error => {
|
||||
console.warn(`I18n: Failed to load '${root.currentLocale}' (${error}), ` + "falling back to English");
|
||||
root._fallbackToEnglish();
|
||||
}
|
||||
}
|
||||
|
||||
function _pickTranslation() {
|
||||
const present = new Set()
|
||||
const present = new Set();
|
||||
for (let i = 0; i < dir.count; i++) {
|
||||
const name = dir.get(i, "fileName") // e.g. "zh_CN.json"
|
||||
const name = dir.get(i, "fileName"); // e.g. "zh_CN.json"
|
||||
if (name && name.endsWith(".json")) {
|
||||
present.add(name.slice(0, -5))
|
||||
present.add(name.slice(0, -5));
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < _candidates.length; i++) {
|
||||
const cand = _candidates[i]
|
||||
const cand = _candidates[i];
|
||||
if (present.has(cand)) {
|
||||
_useLocale(cand, dir.folder + "/" + cand + ".json")
|
||||
return
|
||||
_useLocale(cand, dir.folder + "/" + cand + ".json");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_fallbackToEnglish()
|
||||
_fallbackToEnglish();
|
||||
}
|
||||
|
||||
function _useLocale(localeTag, fileUrl) {
|
||||
currentLocale = localeTag
|
||||
_selectedPath = fileUrl
|
||||
translationsLoaded = false
|
||||
translations = ({})
|
||||
console.info(`I18n: Using locale '${localeTag}' from ${fileUrl}`)
|
||||
currentLocale = localeTag;
|
||||
_selectedPath = fileUrl;
|
||||
translationsLoaded = false;
|
||||
translations = ({});
|
||||
console.info(`I18n: Using locale '${localeTag}' from ${fileUrl}`);
|
||||
}
|
||||
|
||||
function _fallbackToEnglish() {
|
||||
currentLocale = "en"
|
||||
_selectedPath = ""
|
||||
translationsLoaded = false
|
||||
translations = ({})
|
||||
console.warn("I18n: Falling back to built-in English strings")
|
||||
currentLocale = "en";
|
||||
_selectedPath = "";
|
||||
translationsLoaded = false;
|
||||
translations = ({});
|
||||
console.warn("I18n: Falling back to built-in English strings");
|
||||
}
|
||||
|
||||
function tr(term, context) {
|
||||
if (!translationsLoaded || !translations) return term
|
||||
const ctx = context || term
|
||||
if (translations[ctx] && translations[ctx][term]) return translations[ctx][term]
|
||||
if (!translationsLoaded || !translations)
|
||||
return term;
|
||||
const ctx = context || term;
|
||||
if (translations[ctx] && translations[ctx][term])
|
||||
return translations[ctx][term];
|
||||
for (const c in translations) {
|
||||
if (translations[c] && translations[c][term]) return translations[c][term]
|
||||
if (translations[c] && translations[c][term])
|
||||
return translations[c][term];
|
||||
}
|
||||
return term
|
||||
return term;
|
||||
}
|
||||
|
||||
function trContext(context, term) {
|
||||
if (!translationsLoaded || !translations) return term
|
||||
if (translations[context] && translations[context][term]) return translations[context][term]
|
||||
return term
|
||||
if (!translationsLoaded || !translations)
|
||||
return term;
|
||||
if (translations[context] && translations[context][term])
|
||||
return translations[context][term];
|
||||
return term;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtQuick
|
||||
@@ -10,7 +11,11 @@ Singleton {
|
||||
|
||||
function openModal(modal) {
|
||||
if (!modal.allowStacking) {
|
||||
closeAllModalsExcept(modal)
|
||||
closeAllModalsExcept(modal);
|
||||
}
|
||||
if (!modal.keepPopoutsOpen) {
|
||||
PopoutManager.closeAllPopouts();
|
||||
}
|
||||
TrayMenuManager.closeAllMenus();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtQuick
|
||||
@@ -10,15 +11,14 @@ Singleton {
|
||||
|
||||
function showOSD(osd) {
|
||||
if (!osd || !osd.screen)
|
||||
return
|
||||
|
||||
const screenName = osd.screen.name
|
||||
const currentOSD = currentOSDsByScreen[screenName]
|
||||
return;
|
||||
const screenName = osd.screen.name;
|
||||
const currentOSD = currentOSDsByScreen[screenName];
|
||||
|
||||
if (currentOSD && currentOSD !== osd) {
|
||||
currentOSD.hide()
|
||||
currentOSD.hide();
|
||||
}
|
||||
|
||||
currentOSDsByScreen[screenName] = osd
|
||||
currentOSDsByScreen[screenName] = osd;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtCore
|
||||
@@ -6,60 +7,74 @@ import QtCore
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
readonly property url home: StandardPaths.standardLocations(
|
||||
StandardPaths.HomeLocation)[0]
|
||||
readonly property url pictures: StandardPaths.standardLocations(
|
||||
StandardPaths.PicturesLocation)[0]
|
||||
readonly property url home: StandardPaths.standardLocations(StandardPaths.HomeLocation)[0]
|
||||
readonly property url pictures: StandardPaths.standardLocations(StandardPaths.PicturesLocation)[0]
|
||||
|
||||
readonly property url data: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericDataLocation)[0]}/DankMaterialShell`
|
||||
readonly property url state: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericStateLocation)[0]}/DankMaterialShell`
|
||||
readonly property url cache: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericCacheLocation)[0]}/DankMaterialShell`
|
||||
readonly property url config: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericConfigLocation)[0]}/DankMaterialShell`
|
||||
readonly property url data: `${StandardPaths.standardLocations(StandardPaths.GenericDataLocation)[0]}/DankMaterialShell`
|
||||
readonly property url state: `${StandardPaths.standardLocations(StandardPaths.GenericStateLocation)[0]}/DankMaterialShell`
|
||||
readonly property url cache: `${StandardPaths.standardLocations(StandardPaths.GenericCacheLocation)[0]}/DankMaterialShell`
|
||||
readonly property url config: `${StandardPaths.standardLocations(StandardPaths.GenericConfigLocation)[0]}/DankMaterialShell`
|
||||
|
||||
readonly property url imagecache: `${cache}/imagecache`
|
||||
|
||||
function stringify(path: url): string {
|
||||
return path.toString().replace(/%20/g, " ")
|
||||
return path.toString().replace(/%20/g, " ");
|
||||
}
|
||||
|
||||
function expandTilde(path: string): string {
|
||||
return strip(path.replace("~", stringify(root.home)))
|
||||
return strip(path.replace("~", stringify(root.home)));
|
||||
}
|
||||
|
||||
function shortenHome(path: string): string {
|
||||
return path.replace(strip(root.home), "~")
|
||||
return path.replace(strip(root.home), "~");
|
||||
}
|
||||
|
||||
function strip(path: url): string {
|
||||
return stringify(path).replace("file://", "")
|
||||
return stringify(path).replace("file://", "");
|
||||
}
|
||||
|
||||
function toFileUrl(path: string): string {
|
||||
return path.startsWith("file://") ? path : "file://" + path
|
||||
return path.startsWith("file://") ? path : "file://" + path;
|
||||
}
|
||||
|
||||
function mkdir(path: url): void {
|
||||
Quickshell.execDetached(["mkdir", "-p", strip(path)])
|
||||
Quickshell.execDetached(["mkdir", "-p", strip(path)]);
|
||||
}
|
||||
|
||||
function copy(from: url, to: url): void {
|
||||
Quickshell.execDetached(["cp", strip(from), strip(to)])
|
||||
Quickshell.execDetached(["cp", strip(from), strip(to)]);
|
||||
}
|
||||
|
||||
// ! Spotify and maybe some other apps report the wrong app id in toplevels, hardcode special case
|
||||
function moddedAppId(appId: string): string {
|
||||
if (appId === "Spotify")
|
||||
return "spotify"
|
||||
return "spotify";
|
||||
if (appId === "beepertexts")
|
||||
return "beeper"
|
||||
return "beeper";
|
||||
if (appId === "home assistant desktop")
|
||||
return "homeassistant-desktop"
|
||||
return "homeassistant-desktop";
|
||||
if (appId.includes("com.transmissionbt.transmission"))
|
||||
return "transmission-gtk"
|
||||
return appId
|
||||
return "transmission-gtk";
|
||||
return appId;
|
||||
}
|
||||
|
||||
function getAppIcon(appId: string, desktopEntry: var): string {
|
||||
if (appId === "org.quickshell") {
|
||||
return Qt.resolvedUrl("../assets/danklogo.svg");
|
||||
}
|
||||
|
||||
const moddedId = moddedAppId(appId);
|
||||
if (moddedId.toLowerCase().includes("steam_app")) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return desktopEntry && desktopEntry.icon ? Quickshell.iconPath(desktopEntry.icon, true) : "";
|
||||
}
|
||||
|
||||
function getAppName(appId: string, desktopEntry: var): string {
|
||||
if (appId === "org.quickshell") {
|
||||
return "dms";
|
||||
}
|
||||
|
||||
return desktopEntry && desktopEntry.name ? desktopEntry.name : appId;
|
||||
}
|
||||
}
|
||||
|
||||
176
quickshell/Common/PopoutManager.qml
Normal file
176
quickshell/Common/PopoutManager.qml
Normal file
@@ -0,0 +1,176 @@
|
||||
pragma Singleton
|
||||
|
||||
import Quickshell
|
||||
import QtQuick
|
||||
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
property var currentPopoutsByScreen: ({})
|
||||
property var currentPopoutTriggers: ({})
|
||||
|
||||
signal popoutOpening
|
||||
signal popoutChanged
|
||||
|
||||
function showPopout(popout) {
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
popoutOpening();
|
||||
|
||||
const screenName = popout.screen.name;
|
||||
|
||||
for (const otherScreenName in currentPopoutsByScreen) {
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName];
|
||||
if (!otherPopout || otherPopout === popout)
|
||||
continue;
|
||||
if (otherPopout.dashVisible !== undefined) {
|
||||
otherPopout.dashVisible = false;
|
||||
} else if (otherPopout.notificationHistoryVisible !== undefined) {
|
||||
otherPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
otherPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
currentPopoutsByScreen[screenName] = popout;
|
||||
popoutChanged();
|
||||
ModalManager.closeAllModalsExcept(null);
|
||||
}
|
||||
|
||||
function hidePopout(popout) {
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
const screenName = popout.screen.name;
|
||||
if (currentPopoutsByScreen[screenName] === popout) {
|
||||
currentPopoutsByScreen[screenName] = null;
|
||||
currentPopoutTriggers[screenName] = null;
|
||||
popoutChanged();
|
||||
}
|
||||
}
|
||||
|
||||
function closeAllPopouts() {
|
||||
for (const screenName in currentPopoutsByScreen) {
|
||||
const popout = currentPopoutsByScreen[screenName];
|
||||
if (!popout)
|
||||
continue;
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close();
|
||||
}
|
||||
}
|
||||
currentPopoutsByScreen = {};
|
||||
}
|
||||
|
||||
function getActivePopout(screen) {
|
||||
if (!screen)
|
||||
return null;
|
||||
return currentPopoutsByScreen[screen.name] || null;
|
||||
}
|
||||
|
||||
function requestPopout(popout, tabIndex, triggerSource) {
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
const screenName = popout.screen.name;
|
||||
const currentPopout = currentPopoutsByScreen[screenName];
|
||||
const triggerId = triggerSource !== undefined ? triggerSource : tabIndex;
|
||||
|
||||
const willOpen = !(currentPopout === popout && popout.shouldBeVisible && triggerId !== undefined && currentPopoutTriggers[screenName] === triggerId);
|
||||
if (willOpen) {
|
||||
popoutOpening();
|
||||
}
|
||||
|
||||
let justClosedSamePopout = false;
|
||||
for (const otherScreenName in currentPopoutsByScreen) {
|
||||
if (otherScreenName === screenName)
|
||||
continue;
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName];
|
||||
if (!otherPopout)
|
||||
continue;
|
||||
if (otherPopout === popout) {
|
||||
justClosedSamePopout = true;
|
||||
}
|
||||
|
||||
if (otherPopout.dashVisible !== undefined) {
|
||||
otherPopout.dashVisible = false;
|
||||
} else if (otherPopout.notificationHistoryVisible !== undefined) {
|
||||
otherPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
otherPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (currentPopout && currentPopout !== popout) {
|
||||
if (currentPopout.dashVisible !== undefined) {
|
||||
currentPopout.dashVisible = false;
|
||||
} else if (currentPopout.notificationHistoryVisible !== undefined) {
|
||||
currentPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
currentPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (currentPopout === popout && popout.shouldBeVisible) {
|
||||
if (triggerId !== undefined && currentPopoutTriggers[screenName] === triggerId) {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (triggerId === undefined) {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (tabIndex !== undefined && popout.currentTabIndex !== undefined) {
|
||||
popout.currentTabIndex = tabIndex;
|
||||
}
|
||||
currentPopoutTriggers[screenName] = triggerId;
|
||||
}
|
||||
|
||||
currentPopoutTriggers[screenName] = triggerId;
|
||||
currentPopoutsByScreen[screenName] = popout;
|
||||
popoutChanged();
|
||||
|
||||
if (tabIndex !== undefined && popout.currentTabIndex !== undefined) {
|
||||
popout.currentTabIndex = tabIndex;
|
||||
}
|
||||
|
||||
if (currentPopout !== popout) {
|
||||
ModalManager.closeAllModalsExcept(null);
|
||||
}
|
||||
|
||||
if (justClosedSamePopout) {
|
||||
Qt.callLater(() => {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = true;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = true;
|
||||
} else {
|
||||
popout.open();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = true;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = true;
|
||||
} else {
|
||||
popout.open();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user