1
0
mirror of https://github.com/AvengeMedia/DankMaterialShell.git synced 2026-01-24 21:42:51 -05:00

Compare commits

..

32 Commits

Author SHA1 Message Date
bbedward
1926db95de dankinstall fix plasma session collision 2025-12-26 13:14:01 -05:00
bbedward
5ad2a9d704 remove tests from master 2025-12-15 23:59:25 -05:00
Lucas
e0ab20dbda nix: fix greeter per-monitor and per-mode wallpapers (#974) 2025-12-15 22:47:42 -05:00
bbedward
aadc3111a2 fix undefined modal warnings 2025-12-15 22:06:54 -05:00
bbedward
741d492084 v1.0.3 2025-12-15 21:32:07 -05:00
bbedward
604d55015c gamma: guard against application - QML will sync its desired state with GO, when IE settings are changed or opened. Go was applying gamma even if unchanged - Track last applied gamma to avoid sends 2025-12-15 21:31:45 -05:00
bbedward
a4ce39caa5 core: add test coverage for some of the wayland stack - mostly targeting any race issue detection 2025-12-15 21:31:22 -05:00
tsukasa
0a82c9877d dankmodal: removed backgroundWindow to fix clicking twice (#1030)
* dankmodal: removed backgroundWindow

removed 'backgroundWindow' but combined it with 'contentWindow'

* made single window behavior specific to hyprland

this should keep other compositor behavior the same and fix double
clicking to exit out of Spotlight/ClipboardHist/Powermenu
2025-12-15 21:28:15 -05:00
tsukasa
56f5c5eccb Fixed having to click twice to exit out of Spotlight/Cliphist/Powermenu (#1022)
There's possibly more but this fix the need of having to click the
background twice to close those modals.
2025-12-15 21:28:04 -05:00
bbedward
d20b5adbfa battery: fix button group sclaing 2025-12-15 21:27:29 -05:00
bbedward
10dc86a5dc vpn: optim cc and dankbar widget 2025-12-15 21:27:15 -05:00
bbedward
5463aed213 binds: fix to scale with arbitrary font sizes 2025-12-15 21:19:12 -05:00
bbedward
f435f0d413 dwl: fix layout popout 2025-12-15 21:18:40 -05:00
Souyama
521d804763 Change DPMS off to DPMS toggle in hyprland.conf (#1011) 2025-12-15 21:18:12 -05:00
bbedward
e203ec960a cava: dont set method/source 2025-12-15 21:17:39 -05:00
bbedward
830ca10b45 vpn: just try and import all types on errors 2025-12-15 21:17:00 -05:00
bbedward
4ffa06945a wallpaper: scale texture to physical pixels - reverts a regression 2025-12-15 21:16:43 -05:00
bbedward
b1406fc49a matugen: scrub the never implemented dynamic contrast palette 2025-12-15 21:16:20 -05:00
bbedward
f8179167a8 niri: fix gap reactivity 2025-12-15 21:15:47 -05:00
bbedward
32998a5219 wallpaper: clamp max texture size 2025-12-15 21:14:46 -05:00
bbedward
7fb358bada v1.0.2 2025-12-12 10:18:54 -05:00
bbedward
73cf3130e1 ci: disable pkg builds from main release wf 2025-12-12 10:18:31 -05:00
bbedward
119b5df6df gamma: fix initial night mode enablement 2025-12-12 10:15:38 -05:00
bbedward
8ede810d32 settings: make default height screen-aware 2025-12-12 10:14:33 -05:00
bbedward
830dd93af5 chore: bump version to v1.0.1 2025-12-12 10:04:47 -05:00
bbedward
75f28c5ea7 ci: switch to dispatch-based release flow 2025-12-12 10:04:20 -05:00
bbedward
6c9b8c590e dankinstall: call add-wants for niri/hyprland with dms service 2025-12-12 10:04:20 -05:00
bbedward
24d9b77307 niri: fix keybind handling of cooldown-ms parameter 2025-12-12 10:04:20 -05:00
bbedward
d4be68912c workspaces: make icons scale with bar size, fixi valign of numbers fixes #990 2025-12-12 10:04:20 -05:00
bbedward
a443721000 core: fix socket reported CLI version 2025-12-12 10:03:32 -05:00
bbedward
786b097187 plugins: hide uninstall and update buttons for system plugins 2025-12-12 10:03:18 -05:00
bbedward
8ca60c7d2a dwl: fix layout popout not opening fixes #980 2025-12-12 10:03:04 -05:00
558 changed files with 13590 additions and 65446 deletions

View File

@@ -1,8 +0,0 @@
[*.sh]
# like -i=4
indent_style = space
indent_size = 4
[*.nix]
# like -i=4
indent_style = space
indent_size = 4

69
.githooks/pre-commit Executable file
View File

@@ -0,0 +1,69 @@
#!/usr/bin/env bash
set -euo pipefail
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$HOOK_DIR/.." && pwd)"
cd "$REPO_ROOT"
# =============================================================================
# Go CI checks (when core/ files are staged)
# =============================================================================
STAGED_CORE_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep '^core/' || true)
if [[ -n "$STAGED_CORE_FILES" ]]; then
echo "Go files staged in core/, running CI checks..."
cd "$REPO_ROOT/core"
# Format check
echo " Checking gofmt..."
UNFORMATTED=$(gofmt -s -l . 2>/dev/null || true)
if [[ -n "$UNFORMATTED" ]]; then
echo "The following files are not formatted:"
echo "$UNFORMATTED"
echo ""
echo "Run: cd core && gofmt -s -w ."
exit 1
fi
# golangci-lint
if command -v golangci-lint &>/dev/null; then
echo " Running golangci-lint..."
golangci-lint run ./...
else
echo " Warning: golangci-lint not installed, skipping lint"
echo " Install: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"
fi
# Tests
echo " Running tests..."
if ! go test ./... >/dev/null 2>&1; then
echo "Tests failed! Run 'go test ./...' for details."
exit 1
fi
# Build checks
echo " Building..."
mkdir -p bin
go build -buildvcs=false -o bin/dms ./cmd/dms
go build -buildvcs=false -o bin/dms-distro -tags distro_binary ./cmd/dms
go build -buildvcs=false -o bin/dankinstall ./cmd/dankinstall
echo "All Go CI checks passed!"
cd "$REPO_ROOT"
fi
# =============================================================================
# i18n sync check (DISABLED for now)
# =============================================================================
# if [[ -n "${POEDITOR_API_TOKEN:-}" ]] && [[ -n "${POEDITOR_PROJECT_ID:-}" ]]; then
# if command -v python3 &>/dev/null; then
# if ! python3 scripts/i18nsync.py check &>/dev/null; then
# echo "Translations out of sync"
# echo "Run: python3 scripts/i18nsync.py sync"
# exit 1
# fi
# fi
# fi
exit 0

65
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,65 @@
---
name: Bug Report
about: Crashes or unexpected behaviors
title: ""
labels: "bug"
assignees: ""
---
<!-- If your issue is related to ICONS
- Purple and black checkerboards are QT's way of signalling an icon doesn't exist
- FIX: Configure a QT6 or Icon Pack in DMS Settings that has the icon you want
- Follow the [THEMING](https://danklinux.com/docs/dankmaterialshell/icon-theming) section to ensure your QT environment variable is configured correctly for themes.
- Once done, configure an icon theme - either however you normally do with gtk3 or qt6ct, or through the built-in settings modal. -->
## Compositor
- [ ] niri
- [ ] Hyprland
- [ ] dwl (MangoWC)
- [ ] sway
- [ ] Other (specify)
## Distribution
<!-- Arch, Fedora, Debian, etc. -->
## dms version
<!-- Output of dms version command -->
## Description
<!-- Brief description of the issue -->
## Expected Behavior
<!-- Describe what you expected to happen -->
## Steps to Reproduce
<!-- Please provide detailed steps to reproduce the issue -->
1.
2.
3.
## Error Messages/Logs
<!-- Please include any error messages, stack traces, or relevant logs -->
<!-- you can get a log file with the following steps:
dms kill
mkdir ~/dms_logs
nohup dms run > ~/dms_logs/dms-$(date +%s).txt 2>&1 &
Then trigger your issue, and share the contents of ~/dms_logs/dms-<timestamp>.txt
-->
```
Paste error messages or logs here
```
## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings -->

View File

@@ -1,96 +0,0 @@
name: Bug Report
description: Crashes or unexpected behaviors
labels:
- bug
body:
- type: markdown
attributes:
value: |
## DankMaterialShell Bug Report
Limit your report to one issue per submission unless closely related
- type: checkboxes
id: compositor
attributes:
label: Compositor
options:
- label: Niri
- label: Hyprland
- label: MangoWC (dwl)
- label: Sway
validations:
required: true
- type: checkboxes
id: distribution
attributes:
label: Distribution
options:
- label: Arch Linux
- label: CachyOS
- label: Fedora
- label: NixOS
- label: Debian
- label: Ubuntu
- label: Gentoo
- label: OpenSUSE
- label: Other (specify below)
validations:
required: true
- type: input
id: distribution_other
attributes:
label: If Other, please specify
placeholder: e.g., PikaOS, Void Linux, etc.
validations:
required: false
- type: input
id: dms_version
attributes:
label: dms version
description: Output of dms version command
placeholder: e.g., 1.2.3
validations:
required: true
- type: textarea
id: description
attributes:
label: Description
description: Brief description of the issue
placeholder: What happened?
validations:
required: true
- type: textarea
id: expected_behavior
attributes:
label: Expected Behavior
description: What did you expect to happen?
placeholder: Describe the expected behavior
validations:
required: false
- type: textarea
id: steps_to_reproduce
attributes:
label: Steps to Reproduce & Installation Method
description: Please provide detailed steps to reproduce the issue
placeholder: |
1. ...
2. ...
3. ...
validations:
required: true
- type: textarea
id: logs
attributes:
label: Error Messages/Logs
description: Please include any error messages, stack traces, or relevant logs
placeholder: |
Paste error messages or logs here
validations:
required: false
- type: textarea
id: screenshots
attributes:
label: Screenshots/Recordings
description: If applicable, add screenshots or screen recordings
placeholder: Attach images or videos here
validations:
required: false

View File

@@ -0,0 +1,33 @@
---
name: Request a Feature
about: New widgets, new widget behavior, etc.
title: ""
labels: "enhancement"
assignees: ""
---
## Feature Description
<!-- Brief description of the feature requested -->
## Use Case
<!-- Explain the purpose of this feature/why it'd be useful to you -->
## Compositor
Is this feature specific to one compositor?
- [ ] All compositors
- [ ] niri
- [ ] Hyprland
- [ ] dwl (MangoWC)
- [ ] sway
## Proposed Solution
<!-- If you have any ideas for how to implement this, please share! -->
## Alternatives/Existing Solutions
<!-- Include any similar/pre-existing products that solve this problem -->

View File

@@ -1,55 +0,0 @@
name: Feature Request
description: Suggest a new feature or improvement for DMS
labels:
- enhancement
body:
- type: markdown
attributes:
value: |
## DankMaterialShell Feature Request
- type: textarea
id: feature_description
attributes:
label: Feature Description
description: Brief description of the feature requested
placeholder: What feature would you like to see?
validations:
required: true
- type: textarea
id: use_case
attributes:
label: Use Case
description: Explain the purpose of this feature/why it'd be useful to you
placeholder: Why is this feature important?
validations:
required: false
- type: checkboxes
id: compositor
attributes:
label: Compositor(s)
description: Is this feature specific to one or more compositors?
options:
- label: All compositors
- label: Niri
- label: Hyprland
- label: MangoWC (dwl)
- label: Sway
- label: Other (specify below)
validations:
required: false
- type: textarea
id: proposed_solution
attributes:
label: Proposed Solution
description: If you have any ideas for how to implement this, please share!
placeholder: Suggest a solution or approach
validations:
required: false
- type: textarea
id: alternatives
attributes:
label: Alternatives/Existing Solutions
description: Include any similar/pre-existing products that solve this problem
placeholder: List alternatives or existing solutions
validations:
required: false

View File

@@ -0,0 +1,40 @@
---
name: Request Assistance or Support
about: Help with installation, usage, or general questions.
title: ""
labels: "support"
assignees: ""
---
## Compositor
- [ ] niri
- [ ] Hyprland
- [ ] dwl (MangoWC)
- [ ] sway
- [ ] other
## Distribution
<!-- Arch, Fedora, Debian, etc. -->
## dms version
<!-- Output of dms version command -->
## Description
<!-- Brief description of the support needed -->
## Solutions Tried
<!-- Describe what you've tried so far -->
<!-- Outlining what you've tried so far helps us make improvements to the user experience and documentation to avoid recurrent issues -->
## Configuration Details
<!-- Include any configuration if relevant -->
## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings -->

View File

@@ -1,69 +0,0 @@
name: Support Request
description: Help with installation, usage, or general questions about DankMaterialShell
labels:
- support
body:
- type: markdown
attributes:
value: |
## DankMaterialShell Support Request
- type: checkboxes
id: compositor
attributes:
label: Compositor
options:
- label: Niri
- label: Hyprland
- label: MangoWC (dwl)
- label: Sway
- label: Other (specify below)
validations:
required: false
- type: input
id: distribution
attributes:
label: Distribution
description: Which Linux distribution are you using? (e.g., Arch, Fedora, Debian, etc.)
placeholder: Your Linux distribution
validations:
required: false
- type: input
id: dms_version
attributes:
label: dms version
description: Output of dms version command
placeholder: e.g., 1.2.3
validations:
required: false
- type: textarea
id: description
attributes:
label: Description
description: Brief description of the support needed
placeholder: What do you need help with?
validations:
required: true
- type: textarea
id: solutions_tried
attributes:
label: Solutions Tried
description: Describe what you've tried so far (commands, documentation, etc.)
placeholder: List steps or resources you've already tried
validations:
required: false
- type: textarea
id: configuration
attributes:
label: Configuration Details
description: Include any relevant configuration if relevant
placeholder: Add configuration or environment info
validations:
required: false
- type: textarea
id: screenshots
attributes:
label: Screenshots/Recordings
description: If applicable, add screenshots or screen recordings
placeholder: Attach images or videos here
validations:
required: false

View File

@@ -1,383 +0,0 @@
name: Update OBS Packages
on:
workflow_dispatch:
inputs:
package:
description: "Package to update (dms, dms-git, or all)"
required: false
default: "all"
force_upload:
description: "Force upload without version check"
required: false
default: "false"
type: choice
options:
- "false"
- "true"
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)"
required: false
default: ""
push:
tags:
- "v*"
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
version: ${{ steps.check.outputs.version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install OSC
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Check for updates
id: check
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION (always update)"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match spec format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Check OBS for last uploaded commit
OBS_BASE="$HOME/.cache/osc-checkouts"
mkdir -p "$OBS_BASE"
OBS_PROJECT="home:AvengeMedia:dms-git"
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
osc up -q 2>/dev/null || true
# Extract commit hash from spec Version line & format like; 0.6.2+git2264.a679be68
if [[ -f "dms-git.spec" ]]; then
OBS_COMMIT=$(grep "^Version:" "dms-git.spec" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already uploaded to OBS, skipping"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (OBS has $OBS_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract OBS commit, proceeding with update"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No spec file in OBS, proceeding with update"
fi
cd "${{ github.workspace }}"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 First upload to OBS, update needed"
fi
elif [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
PKG="${{ github.event.inputs.package }}"
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🚀 Force upload: all packages"
else
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🚀 Force upload: $PKG"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
update-obs:
name: Upload to OBS
needs: check-updates
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
if: |
github.event.inputs.force_upload == 'true' ||
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Generate GitHub App Token
id: generate_token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.generate_token.outputs.token }}
- name: Check if last commit was automated
id: check-loop
run: |
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "✅ Last commit was not automated, proceeding"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Determine packages to update
if: steps.check-loop.outputs.skip != 'true'
id: packages
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: updating git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Update dms-git spec version
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
# Update version in spec
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
# Add changelog entry
DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
- name: Update Debian dms-git changelog version
if: steps.check-loop.outputs.skip != 'true' && (contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all')
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
# Debian version format: 0.6.2+git2256.9162e314
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
CHANGELOG_DATE=$(date -R)
CHANGELOG_FILE="distro/debian/dms-git/debian/changelog"
# Get current version from changelog
CURRENT_VERSION=$(head -1 "$CHANGELOG_FILE" | sed 's/.*(\([^)]*\)).*/\1/')
echo "Current Debian version: $CURRENT_VERSION"
echo "New version: $NEW_VERSION"
# Only update if version changed
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
# Create new changelog entry at top
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms-git ($NEW_VERSION) nightly; urgency=medium
* Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
# Prepend to existing changelog
cat "$CHANGELOG_FILE" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "$CHANGELOG_FILE"
echo "✓ Updated Debian changelog: $CURRENT_VERSION → $NEW_VERSION"
else
echo "✓ Debian changelog already at version $NEW_VERSION"
fi
- name: Update dms stable version
if: steps.check-loop.outputs.skip != 'true' && steps.packages.outputs.version != ''
run: |
VERSION="${{ steps.packages.outputs.version }}"
VERSION_NO_V="${VERSION#v}"
echo "Updating packaging to version $VERSION_NO_V"
# Update openSUSE dms spec (stable only)
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
# Update openSUSE spec changelog
DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1\\n- Update to stable $VERSION release\\n- Bug fixes and improvements"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY\\n" distro/opensuse/dms.spec
# Update Debian _service files (both tar_scm and download_url formats)
for service in distro/debian/*/_service; do
if [[ -f "$service" ]]; then
# Update tar_scm revision parameter (for dms-git)
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
# Update download_url paths (for dms stable)
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
fi
done
# Update Debian changelog for dms stable
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
CHANGELOG_DATE=$(date -R)
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms ($VERSION_NO_V) stable; urgency=medium
* Update to $VERSION stable release
* Bug fixes and improvements
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
cat "distro/debian/dms/debian/changelog" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "distro/debian/dms/debian/changelog"
echo "✓ Updated Debian changelog to $VERSION_NO_V"
fi
- name: Install Go
if: steps.check-loop.outputs.skip != 'true'
uses: actions/setup-go@v5
with:
go-version: "1.24"
- name: Install OSC
if: steps.check-loop.outputs.skip != 'true'
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Upload to OBS
if: steps.check-loop.outputs.skip != 'true'
env:
FORCE_UPLOAD: ${{ github.event.inputs.force_upload }}
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
MESSAGE="Automated update from GitHub Actions"
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
fi
if [[ "$PACKAGES" == "all" ]]; then
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
fi
- name: Get changed packages
if: steps.check-loop.outputs.skip != 'true'
id: changed-packages
run: |
# Check if there are any changes to commit
if git diff --exit-code distro/debian/ distro/opensuse/ >/dev/null 2>&1; then
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "📋 No changelog or spec changes to commit"
else
echo "has_changes=true" >> $GITHUB_OUTPUT
# Get list of changed packages for commit message
CHANGED_DEB=$(git diff --name-only distro/debian/ 2>/dev/null | grep 'debian/changelog' | xargs dirname 2>/dev/null | xargs dirname 2>/dev/null | xargs basename 2>/dev/null | tr '\n' ', ' | sed 's/, $//' || echo "")
CHANGED_SUSE=$(git diff --name-only distro/opensuse/ 2>/dev/null | grep '\.spec$' | sed 's|distro/opensuse/||' | sed 's/\.spec$//' | tr '\n' ', ' | sed 's/, $//' || echo "")
PKGS=$(echo "$CHANGED_DEB,$CHANGED_SUSE" | tr ',' '\n' | grep -v '^$' | sort -u | tr '\n' ',' | sed 's/,$//')
echo "packages=$PKGS" >> $GITHUB_OUTPUT
echo "📋 Changed packages: $PKGS"
fi
- name: Commit packaging changes
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
run: |
git config user.name "dms-ci[bot]"
git config user.email "dms-ci[bot]@users.noreply.github.com"
git add distro/debian/*/debian/changelog distro/opensuse/*.spec
git commit -m "ci: Auto-update OBS packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
git pull --rebase origin master
git push
- name: Summary
run: |
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY

View File

@@ -1,298 +0,0 @@
name: Update PPA Packages
on:
workflow_dispatch:
inputs:
package:
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
required: false
default: "dms-git"
force_upload:
description: "Force upload without version check"
required: false
default: "false"
type: choice
options:
- "false"
- "true"
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
required: false
default: ""
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for updates
id: check
run: |
if [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match changelog format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Extract commit hash from changelog
# Format: dms-git (0.6.2+git2264.c5c5ce84) questing; urgency=medium
CHANGELOG_FILE="distro/ubuntu/dms-git/debian/changelog"
if [[ -f "$CHANGELOG_FILE" ]]; then
CHANGELOG_COMMIT=$(head -1 "$CHANGELOG_FILE" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$CHANGELOG_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$CHANGELOG_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already in changelog, skipping upload"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (changelog has $CHANGELOG_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract commit from changelog, proceeding with upload"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No changelog file found, proceeding with upload"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
upload-ppa:
name: Upload to PPA
needs: check-updates
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
if: |
github.event.inputs.force_upload == 'true' ||
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Generate GitHub App Token
id: generate_token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.generate_token.outputs.token }}
- name: Check if last commit was automated
id: check-loop
run: |
LAST_COMMIT_MSG=$(git log -1 --pretty=%B | head -1)
if [[ "$LAST_COMMIT_MSG" == "ci: Auto-update PPA packages"* ]] || [[ "$LAST_COMMIT_MSG" == "ci: Auto-update OBS packages"* ]]; then
echo "⏭️ Last commit was automated ($LAST_COMMIT_MSG), skipping to prevent infinite loop"
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "✅ Last commit was not automated, proceeding"
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Set up Go
if: steps.check-loop.outputs.skip != 'true'
uses: actions/setup-go@v5
with:
go-version: "1.24"
cache: false
- name: Install build dependencies
if: steps.check-loop.outputs.skip != 'true'
run: |
sudo apt-get update
sudo apt-get install -y \
debhelper \
devscripts \
dput \
lftp \
build-essential \
fakeroot \
dpkg-dev
- name: Configure GPG
if: steps.check-loop.outputs.skip != 'true'
env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
run: |
echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Determine packages to upload
if: steps.check-loop.outputs.skip != 'true'
id: packages
run: |
if [[ "${{ github.event.inputs.force_upload }}" == "true" ]]; then
PKG="${{ github.event.inputs.package }}"
if [[ -z "$PKG" || "$PKG" == "all" ]]; then
echo "packages=all" >> $GITHUB_OUTPUT
echo "🚀 Force upload: all packages"
else
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "🚀 Force upload: $PKG"
fi
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: uploading git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual package selection should respect change detection
SELECTED_PKG="${{ github.event.inputs.package }}"
UPDATED_PKG="${{ needs.check-updates.outputs.packages }}"
# Check if manually selected package is in the updated list
if [[ "$UPDATED_PKG" == *"$SELECTED_PKG"* ]] || [[ "$SELECTED_PKG" == "all" ]]; then
echo "packages=$SELECTED_PKG" >> $GITHUB_OUTPUT
echo "📦 Manual selection (has updates): $SELECTED_PKG"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "⚠️ Manual selection '$SELECTED_PKG' has no updates - skipping (use force_upload to override)"
fi
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Upload to PPA
if: steps.check-loop.outputs.skip != 'true'
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
REBUILD_RELEASE="${{ github.event.inputs.rebuild_release }}"
if [[ -z "$PACKAGES" ]]; then
echo "No packages selected for upload. Skipping."
exit 0
fi
# Build command arguments
BUILD_ARGS=()
if [[ -n "$REBUILD_RELEASE" ]]; then
BUILD_ARGS+=("$REBUILD_RELEASE")
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
fi
if [[ "$PACKAGES" == "all" ]]; then
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms to PPA..."
if [ -n "$REBUILD_RELEASE" ]; then
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms dms questing "${BUILD_ARGS[@]}"
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-git to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms-git dms-git questing "${BUILD_ARGS[@]}"
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-greeter to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh dms-greeter danklinux questing "${BUILD_ARGS[@]}"
else
# Map package to PPA name
case "$PACKAGES" in
dms)
PPA_NAME="dms"
;;
dms-git)
PPA_NAME="dms-git"
;;
dms-greeter)
PPA_NAME="danklinux"
;;
*)
PPA_NAME="$PACKAGES"
;;
esac
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PACKAGES to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "$PACKAGES" "$PPA_NAME" questing "${BUILD_ARGS[@]}"
fi
- name: Get changed packages
if: steps.check-loop.outputs.skip != 'true'
id: changed-packages
run: |
# Check if there are any changelog changes to commit
if git diff --exit-code distro/ubuntu/ >/dev/null 2>&1; then
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "📋 No changelog changes to commit"
else
echo "has_changes=true" >> $GITHUB_OUTPUT
# Get list of changed packages for commit message (deduplicate)
CHANGED=$(git diff --name-only distro/ubuntu/ | grep 'debian/changelog' | sed 's|/debian/changelog||' | xargs -I{} basename {} | sort -u | tr '\n' ',' | sed 's/,$//')
echo "packages=$CHANGED" >> $GITHUB_OUTPUT
echo "📋 Changed packages: $CHANGED"
echo "📋 Debug - Changed files:"
git diff --name-only distro/ubuntu/ | grep 'debian/changelog' || echo "No changelog files found"
fi
- name: Commit changelog changes
if: steps.check-loop.outputs.skip != 'true' && steps.changed-packages.outputs.has_changes == 'true'
run: |
git config user.name "dms-ci[bot]"
git config user.email "dms-ci[bot]@users.noreply.github.com"
git add distro/ubuntu/*/debian/changelog
git commit -m "ci: Auto-update PPA packages [${{ steps.changed-packages.outputs.packages }}]" -m "🤖 Automated by GitHub Actions"
git pull --rebase origin master
git push
- name: Summary
run: |
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ "$PACKAGES" == "all" ]]; then
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA danklinux**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-git" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY

View File

@@ -28,20 +28,25 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Install flatpak
run: sudo apt update && sudo apt install -y flatpak
- name: Add flathub
run: sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
- name: Add a flatpak that mutagen could support
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version-file: ./core/go.mod
- name: Format check
run: |
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
echo "The following files are not formatted:"
gofmt -s -l .
exit 1
fi
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.6
working-directory: core
- name: Test
run: go test -v ./...

View File

@@ -1,24 +0,0 @@
name: Pre-commit Checks
on:
push:
pull_request:
branches: [master, main]
jobs:
pre-commit-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install flatpak
run: sudo apt update && sudo apt install -y flatpak
- name: Add flathub
run: sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
- name: Add a flatpak that mutagen could support
run: sudo flatpak install -y org.freedesktop.Platform/x86_64/24.08 app.zen_browser.zen
- name: run pre-commit hooks
uses: j178/prek-action@v1

View File

@@ -281,9 +281,6 @@ jobs:
# Copy root LICENSE and CONTRIBUTING.md to quickshell/ for packaging
cp LICENSE CONTRIBUTING.md quickshell/
# Copy root assets directory to quickshell for systemd service and desktop file
cp -r assets quickshell/
# Tar the CONTENTS of quickshell/, not the directory itself
(cd quickshell && tar --exclude='.git' \
--exclude='.github' \
@@ -398,3 +395,297 @@ jobs:
prerelease: ${{ contains(env.TAG, '-') }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# trigger-obs-update:
# runs-on: ubuntu-latest
# needs: release
# env:
# TAG: ${{ inputs.tag }}
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# with:
# ref: ${{ inputs.tag }}
# - name: Install OSC
# run: |
# sudo apt-get update
# sudo apt-get install -y osc
# mkdir -p ~/.config/osc
# cat > ~/.config/osc/oscrc << EOF
# [general]
# apiurl = https://api.opensuse.org
# [https://api.opensuse.org]
# user = ${{ secrets.OBS_USERNAME }}
# pass = ${{ secrets.OBS_PASSWORD }}
# EOF
# chmod 600 ~/.config/osc/oscrc
# - name: Update OBS packages
# run: |
# cd distro
# bash scripts/obs-upload.sh dms "Update to ${TAG}"
# trigger-ppa-update:
# runs-on: ubuntu-latest
# needs: release
# env:
# TAG: ${{ inputs.tag }}
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# with:
# ref: ${{ inputs.tag }}
# - name: Install build dependencies
# run: |
# sudo apt-get update
# sudo apt-get install -y \
# debhelper \
# devscripts \
# dput \
# lftp \
# build-essential \
# fakeroot \
# dpkg-dev
# - name: Configure GPG
# env:
# GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
# run: |
# echo "$GPG_KEY" | gpg --import
# GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
# echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
# - name: Upload to PPA
# run: |
# cd distro/ubuntu/ppa
# bash create-and-upload.sh ../dms dms questing
# copr-build:
# runs-on: ubuntu-latest
# needs: release
# env:
# TAG: ${{ inputs.tag }}
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# with:
# ref: ${{ inputs.tag }}
# - name: Determine version
# id: version
# run: |
# VERSION="${TAG#v}"
# echo "version=$VERSION" >> $GITHUB_OUTPUT
# echo "Building DMS stable version: $VERSION"
# - name: Setup build environment
# run: |
# sudo apt-get update
# sudo apt-get install -y rpm wget curl jq gzip
# mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
# - name: Download release assets
# run: |
# VERSION="${{ steps.version.outputs.version }}"
# cd ~/rpmbuild/SOURCES
# wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
# echo "Failed to download dms-qml.tar.gz for v${VERSION}"
# exit 1
# }
# - name: Generate stable spec file
# run: |
# VERSION="${{ steps.version.outputs.version }}"
# CHANGELOG_DATE="$(date '+%a %b %d %Y')"
# cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
# # Spec for DMS stable releases - Generated by GitHub Actions
# %global debug_package %{nil}
# %global version VERSION_PLACEHOLDER
# %global pkg_summary DankMaterialShell - Material 3 inspired shell for Wayland compositors
# Name: dms
# Version: %{version}
# Release: 1%{?dist}
# Summary: %{pkg_summary}
# License: MIT
# URL: https://github.com/AvengeMedia/DankMaterialShell
# Source0: dms-qml.tar.gz
# BuildRequires: gzip
# BuildRequires: wget
# BuildRequires: systemd-rpm-macros
# Requires: (quickshell or quickshell-git)
# Requires: accountsservice
# Requires: dms-cli = %{version}-%{release}
# Requires: dgop
# Recommends: cava
# Recommends: cliphist
# Recommends: danksearch
# Recommends: matugen
# Recommends: wl-clipboard
# Recommends: NetworkManager
# Recommends: qt6-qtmultimedia
# Suggests: qt6ct
# %description
# DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
# and optimized for the niri and hyprland compositors. Features notifications,
# app launcher, wallpaper customization, and fully customizable with plugins.
# Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
# process monitoring, notification center, clipboard history, dock, control center,
# lock screen, and comprehensive plugin system.
# %package -n dms-cli
# Summary: DankMaterialShell CLI tool
# License: MIT
# URL: https://github.com/AvengeMedia/DankMaterialShell
# %description -n dms-cli
# Command-line interface for DankMaterialShell configuration and management.
# Provides native DBus bindings, NetworkManager integration, and system utilities.
# %prep
# %setup -q -c -n dms-qml
# # Download architecture-specific binaries during build
# case "%{_arch}" in
# x86_64)
# ARCH_SUFFIX="amd64"
# ;;
# aarch64)
# ARCH_SUFFIX="arm64"
# ;;
# *)
# echo "Unsupported architecture: %{_arch}"
# exit 1
# ;;
# esac
# wget -O %{_builddir}/dms-cli.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-${ARCH_SUFFIX}.gz" || {
# echo "Failed to download dms-cli for architecture %{_arch}"
# exit 1
# }
# gunzip -c %{_builddir}/dms-cli.gz > %{_builddir}/dms-cli
# chmod +x %{_builddir}/dms-cli
# %build
# %install
# install -Dm755 %{_builddir}/dms-cli %{buildroot}%{_bindir}/dms
# install -d %{buildroot}%{_datadir}/bash-completion/completions
# install -d %{buildroot}%{_datadir}/zsh/site-functions
# install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
# %{_builddir}/dms-cli completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
# %{_builddir}/dms-cli completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
# %{_builddir}/dms-cli completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
# install -Dm644 assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
# install -Dm644 assets/dms-open.desktop %{buildroot}%{_datadir}/applications/dms-open.desktop
# install -Dm644 assets/danklogo.svg %{buildroot}%{_datadir}/icons/hicolor/scalable/apps/danklogo.svg
# install -dm755 %{buildroot}%{_datadir}/quickshell/dms
# cp -r %{_builddir}/dms-qml/* %{buildroot}%{_datadir}/quickshell/dms/
# rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
# rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
# rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
# rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
# echo "%{version}" > %{buildroot}%{_datadir}/quickshell/dms/VERSION
# %posttrans
# if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
# rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
# rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
# rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
# fi
# # Signal running DMS instances to reload
# pkill -USR1 -x dms >/dev/null 2>&1 || :
# %files
# %license LICENSE
# %doc README.md CONTRIBUTING.md
# %{_datadir}/quickshell/dms/
# %{_userunitdir}/dms.service
# %{_datadir}/applications/dms-open.desktop
# %{_datadir}/icons/hicolor/scalable/apps/danklogo.svg
# %files -n dms-cli
# %{_bindir}/dms
# %{_datadir}/bash-completion/completions/dms
# %{_datadir}/zsh/site-functions/_dms
# %{_datadir}/fish/vendor_completions.d/dms.fish
# %changelog
# * CHANGELOG_DATE_PLACEHOLDER AvengeMedia <contact@avengemedia.com> - VERSION_PLACEHOLDER-1
# - Stable release VERSION_PLACEHOLDER
# - Built from GitHub release
# SPECEOF
# sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/dms.spec
# sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/dms.spec
# - name: Build SRPM
# id: build
# run: |
# cd ~/rpmbuild/SPECS
# rpmbuild -bs dms.spec
# SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
# SRPM_NAME=$(basename "$SRPM")
# echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
# echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
# echo "SRPM built: $SRPM_NAME"
# - name: Upload SRPM artifact
# uses: actions/upload-artifact@v4
# with:
# name: dms-stable-srpm-${{ steps.version.outputs.version }}
# path: ${{ steps.build.outputs.srpm_path }}
# retention-days: 90
# - name: Install Copr CLI
# run: |
# sudo apt-get install -y python3-pip
# pip3 install copr-cli
# mkdir -p ~/.config
# cat > ~/.config/copr << EOF
# [copr-cli]
# login = ${{ secrets.COPR_LOGIN }}
# username = avengemedia
# token = ${{ secrets.COPR_TOKEN }}
# copr_url = https://copr.fedorainfracloud.org
# EOF
# chmod 600 ~/.config/copr
# - name: Upload to Copr
# run: |
# SRPM="${{ steps.build.outputs.srpm_path }}"
# VERSION="${{ steps.version.outputs.version }}"
# echo "Uploading SRPM to avengemedia/dms..."
# BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
# echo "$BUILD_OUTPUT"
# BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
# if [ "$BUILD_ID" != "unknown" ]; then
# echo "Build submitted: https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
# fi

View File

@@ -3,17 +3,8 @@ name: DMS Copr Stable Release
on:
workflow_dispatch:
inputs:
package:
description: 'Package to build (dms, dms-greeter, or both)'
required: false
default: 'dms'
type: choice
options:
- dms
- dms-greeter
- both
version:
description: 'Versioning (e.g., 1.0.3, leave empty for latest release)'
description: 'Versioning (e.g., 0.1.14, leave empty for latest release)'
required: false
default: ''
release:
@@ -22,32 +13,13 @@ on:
default: '1'
jobs:
determine-packages:
runs-on: ubuntu-latest
outputs:
packages: ${{ steps.set-packages.outputs.packages }}
steps:
- name: Set package list
id: set-packages
run: |
PACKAGE_INPUT="${{ github.event.inputs.package || 'dms' }}"
if [ "$PACKAGE_INPUT" = "both" ]; then
echo 'packages=["dms","dms-greeter"]' >> $GITHUB_OUTPUT
else
echo "packages=[\"$PACKAGE_INPUT\"]" >> $GITHUB_OUTPUT
fi
build-and-upload:
needs: determine-packages
runs-on: ubuntu-latest
strategy:
matrix:
package: ${{ fromJSON(needs.determine-packages.outputs.packages) }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Determine version
id: version
run: |
@@ -67,84 +39,210 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release=$RELEASE" >> $GITHUB_OUTPUT
echo "✅ Building ${{ matrix.package }} version: $VERSION-$RELEASE"
echo "✅ Building DMS hotfix version: $VERSION-$RELEASE"
- name: Setup build environment
run: |
sudo apt-get update
sudo apt-get install -y rpm wget curl jq gzip
mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
echo "✅ RPM build environment ready"
- name: Download release assets
run: |
VERSION="${{ steps.version.outputs.version }}"
cd ~/rpmbuild/SOURCES
echo "📦 Downloading DMS QML source for v${VERSION}..."
# Download DMS QML source
wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
echo "❌ Failed to download dms-qml.tar.gz for v${VERSION}"
exit 1
}
echo "✅ Source downloaded"
echo "Note: dms-cli binary will be downloaded during build based on target architecture"
ls -lh
- name: Generate stable spec file
run: |
VERSION="${{ steps.version.outputs.version }}"
RELEASE="${{ steps.version.outputs.release }}"
CHANGELOG_DATE="$(date '+%a %b %d %Y')"
PACKAGE="${{ matrix.package }}"
# Copy spec file from repository
cp distro/fedora/${PACKAGE}.spec ~/rpmbuild/SPECS/${PACKAGE}.spec
cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
# Spec for DMS stable releases - Generated by GitHub Actions
# Replace placeholders with actual values
sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
sed -i "s/RELEASE_PLACEHOLDER/${RELEASE}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/${PACKAGE}.spec
%global debug_package %{nil}
%global version VERSION_PLACEHOLDER
%global pkg_summary DankMaterialShell - Material 3 inspired shell for Wayland compositors
echo "✅ Spec file generated for ${PACKAGE} v${VERSION}-${RELEASE}"
Name: dms
Version: %{version}
Release: RELEASE_PLACEHOLDER%{?dist}
Summary: %{pkg_summary}
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
Source0: dms-qml.tar.gz
BuildRequires: gzip
BuildRequires: wget
BuildRequires: systemd-rpm-macros
Requires: (quickshell or quickshell-git)
Requires: accountsservice
Requires: dms-cli = %{version}-%{release}
Requires: dgop
Recommends: cava
Recommends: cliphist
Recommends: danksearch
Recommends: hyprpicker
Recommends: matugen
Recommends: wl-clipboard
Recommends: NetworkManager
Recommends: qt6-qtmultimedia
Suggests: qt6ct
%description
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
and optimized for the niri and hyprland compositors. Features notifications,
app launcher, wallpaper customization, and fully customizable with plugins.
Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
process monitoring, notification center, clipboard history, dock, control center,
lock screen, and comprehensive plugin system.
%package -n dms-cli
Summary: DankMaterialShell CLI tool
License: MIT
URL: https://github.com/AvengeMedia/DankMaterialShell
%description -n dms-cli
Command-line interface for DankMaterialShell configuration and management.
Provides native DBus bindings, NetworkManager integration, and system utilities.
%prep
%setup -q -c -n dms-qml
# Download architecture-specific binaries during build
# This ensures the correct architecture is used for each build target
case "%{_arch}" in
x86_64)
ARCH_SUFFIX="amd64"
;;
aarch64)
ARCH_SUFFIX="arm64"
;;
*)
echo "Unsupported architecture: %{_arch}"
exit 1
;;
esac
# Download dms-cli for target architecture
wget -O %{_builddir}/dms-cli.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-${ARCH_SUFFIX}.gz" || {
echo "Failed to download dms-cli for architecture %{_arch}"
exit 1
}
gunzip -c %{_builddir}/dms-cli.gz > %{_builddir}/dms-cli
chmod +x %{_builddir}/dms-cli
%build
%install
install -Dm755 %{_builddir}/dms-cli %{buildroot}%{_bindir}/dms
# Shell completions
install -d %{buildroot}%{_datadir}/bash-completion/completions
install -d %{buildroot}%{_datadir}/zsh/site-functions
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
%{_builddir}/dms-cli completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
%{_builddir}/dms-cli completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
%{_builddir}/dms-cli completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
install -Dm644 %{_builddir}/dms-qml/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
cp -r %{_builddir}/dms-qml/* %{buildroot}%{_datadir}/quickshell/dms/
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
%posttrans
# Clean up old installation path from previous versions (only if empty)
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
# Remove directories only if empty (preserves any user-added files)
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
fi
# Signal running DMS instances to reload (harmless if none running)
pkill -USR1 -x dms >/dev/null 2>&1 || :
%files
%license LICENSE
%doc README.md CONTRIBUTING.md
%{_datadir}/quickshell/dms/
%{_userunitdir}/dms.service
%files -n dms-cli
%{_bindir}/dms
%{_datadir}/bash-completion/completions/dms
%{_datadir}/zsh/site-functions/_dms
%{_datadir}/fish/vendor_completions.d/dms.fish
%changelog
* CHANGELOG_DATE_PLACEHOLDER AvengeMedia <contact@avengemedia.com> - VERSION_PLACEHOLDER-RELEASE_PLACEHOLDER
- Stable release VERSION_PLACEHOLDER
- Built from GitHub release
SPECEOF
sed -i "s/VERSION_PLACEHOLDER/${VERSION}/g" ~/rpmbuild/SPECS/dms.spec
sed -i "s/RELEASE_PLACEHOLDER/${RELEASE}/g" ~/rpmbuild/SPECS/dms.spec
sed -i "s/CHANGELOG_DATE_PLACEHOLDER/${CHANGELOG_DATE}/g" ~/rpmbuild/SPECS/dms.spec
echo "✅ Spec file generated for v${VERSION}-${RELEASE}"
echo ""
echo "=== Spec file preview ==="
head -40 ~/rpmbuild/SPECS/${PACKAGE}.spec
head -40 ~/rpmbuild/SPECS/dms.spec
- name: Build SRPM
id: build
run: |
cd ~/rpmbuild/SPECS
PACKAGE="${{ matrix.package }}"
echo "🔨 Building SRPM for ${PACKAGE}..."
rpmbuild -bs ${PACKAGE}.spec
SRPM=$(ls ~/rpmbuild/SRPMS/${PACKAGE}-*.src.rpm | tail -n 1)
echo "🔨 Building SRPM..."
rpmbuild -bs dms.spec
SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
SRPM_NAME=$(basename "$SRPM")
echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
echo "✅ SRPM built: $SRPM_NAME"
echo ""
echo "=== SRPM Info ==="
rpm -qpi "$SRPM"
- name: Upload SRPM artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.package }}-stable-srpm-${{ steps.version.outputs.version }}
name: dms-stable-srpm-${{ steps.version.outputs.version }}
path: ${{ steps.build.outputs.srpm_path }}
retention-days: 90
- name: Install Copr CLI
run: |
sudo apt-get install -y python3-pip
pip3 install copr-cli
mkdir -p ~/.config
cat > ~/.config/copr << EOF
[copr-cli]
@@ -154,57 +252,37 @@ jobs:
copr_url = https://copr.fedorainfracloud.org
EOF
chmod 600 ~/.config/copr
echo "✅ Copr CLI configured"
- name: Determine Copr project
id: copr_project
run: |
PACKAGE="${{ matrix.package }}"
if [ "$PACKAGE" = "dms" ]; then
COPR_PROJECT="avengemedia/dms"
elif [ "$PACKAGE" = "dms-greeter" ]; then
COPR_PROJECT="avengemedia/danklinux"
else
echo "❌ Unknown package: $PACKAGE"
exit 1
fi
echo "copr_project=$COPR_PROJECT" >> $GITHUB_OUTPUT
echo "✅ Copr project: $COPR_PROJECT"
- name: Upload to Copr
run: |
SRPM="${{ steps.build.outputs.srpm_path }}"
VERSION="${{ steps.version.outputs.version }}"
COPR_PROJECT="${{ steps.copr_project.outputs.copr_project }}"
PACKAGE="${{ matrix.package }}"
echo "🚀 Uploading ${PACKAGE} SRPM to ${COPR_PROJECT}..."
echo "🚀 Uploading SRPM to avengemedia/dms..."
echo " SRPM: $(basename $SRPM)"
echo " Version: $VERSION"
BUILD_OUTPUT=$(copr-cli build "$COPR_PROJECT" "$SRPM" --nowait 2>&1)
BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
echo "$BUILD_OUTPUT"
BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
if [ "$BUILD_ID" != "unknown" ]; then
echo "✅ Build submitted successfully!"
echo "🔗 https://copr.fedorainfracloud.org/coprs/${COPR_PROJECT}/build/$BUILD_ID/"
echo "🔗 https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
else
echo "⚠️ Could not extract build ID, but upload may have succeeded"
fi
- name: Build summary
if: always()
run: |
PACKAGE="${{ matrix.package }}"
COPR_PROJECT="${{ steps.copr_project.outputs.copr_project }}"
echo "### 🎉 ${PACKAGE} Stable Build Summary" >> $GITHUB_STEP_SUMMARY
echo "### 🎉 DMS Stable Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Package:** ${PACKAGE}" >> $GITHUB_STEP_SUMMARY
echo "- **Version:** ${{ steps.version.outputs.version }}-${{ steps.version.outputs.release }}" >> $GITHUB_STEP_SUMMARY
echo "- **SRPM:** ${{ steps.build.outputs.srpm_name }}" >> $GITHUB_STEP_SUMMARY
echo "- **Project:** https://copr.fedorainfracloud.org/coprs/${COPR_PROJECT}/" >> $GITHUB_STEP_SUMMARY
echo "- **Project:** https://copr.fedorainfracloud.org/coprs/avengemedia/dms/" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Stable release has been built and uploaded to Copr!" >> $GITHUB_STEP_SUMMARY

View File

@@ -4,144 +4,107 @@ on:
workflow_dispatch:
inputs:
package:
description: "Package to update (dms, dms-git, or all)"
description: 'Package to update (dms, dms-git, or all)'
required: false
default: "all"
tag_version:
description: "Specific tag version for dms stable (e.g., v1.0.2). Leave empty to auto-detect latest release."
required: false
default: ""
default: 'all'
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)"
description: 'Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)'
required: false
default: ""
default: ''
push:
tags:
- 'v*'
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
jobs:
check-updates:
name: Check for updates
runs-on: ubuntu-latest
outputs:
has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }}
version: ${{ steps.check.outputs.version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install OSC
run: |
sudo apt-get update
sudo apt-get install -y osc
mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF
[general]
apiurl = https://api.opensuse.org
[https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }}
EOF
chmod 600 ~/.config/osc/oscrc
- name: Check for updates
id: check
env:
OBS_USERNAME: ${{ secrets.OBS_USERNAME }}
OBS_PASSWORD: ${{ secrets.OBS_PASSWORD }}
run: |
# Helper function to check dms-git commit
check_dms_git() {
local CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
local OBS_SPEC=$(curl -s -u "$OBS_USERNAME:$OBS_PASSWORD" "https://api.opensuse.org/source/home:AvengeMedia:dms-git/dms-git/dms-git.spec" 2>/dev/null || echo "")
local OBS_COMMIT=$(echo "$OBS_SPEC" | grep "^Version:" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" && "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "📋 dms-git: Commit $CURRENT_COMMIT already exists, skipping"
return 1 # No update needed
else
echo "📋 dms-git: New commit $CURRENT_COMMIT (OBS has ${OBS_COMMIT:-none})"
return 0 # Update needed
fi
}
# Helper function to check dms stable tag
check_dms_stable() {
local LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "v\?\([^"]*\)".*/\1/' || echo "")
local OBS_SPEC=$(curl -s -u "$OBS_USERNAME:$OBS_PASSWORD" "https://api.opensuse.org/source/home:AvengeMedia:dms/dms/dms.spec" 2>/dev/null || echo "")
local OBS_VERSION=$(echo "$OBS_SPEC" | grep "^Version:" | awk '{print $2}' | xargs || echo "")
if [[ -n "$LATEST_TAG" && "$LATEST_TAG" == "$OBS_VERSION" ]]; then
echo "📋 dms: Tag $LATEST_TAG already exists, skipping"
return 1 # No update needed
else
echo "📋 dms: New tag ${LATEST_TAG:-unknown} (OBS has ${OBS_VERSION:-none})"
return 0 # Update needed
fi
}
# Main logic
REBUILD="${{ github.event.inputs.rebuild_release }}"
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag push - always update stable package
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION (always update)"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - check dms-git only
echo "packages=dms-git" >> $GITHUB_OUTPUT
if check_dms_git; then
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match spec format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Check OBS for last uploaded commit
OBS_BASE="$HOME/.cache/osc-checkouts"
mkdir -p "$OBS_BASE"
OBS_PROJECT="home:AvengeMedia:dms-git"
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
osc up -q 2>/dev/null || true
# Extract commit hash from spec Version line & format like; 0.6.2+git2264.a679be68
if [[ -f "dms-git.spec" ]]; then
OBS_COMMIT=$(grep "^Version:" "dms-git.spec" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$OBS_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$OBS_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already uploaded to OBS, skipping"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (OBS has $OBS_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract OBS commit, proceeding with update"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No spec file in OBS, proceeding with update"
fi
cd "${{ github.workspace }}"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 First upload to OBS, update needed"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual workflow trigger
PKG="${{ github.event.inputs.package }}"
if [[ -n "$REBUILD" ]]; then
# Rebuild requested - always proceed
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🔄 Manual rebuild requested: $PKG (db$REBUILD)"
elif [[ "$PKG" == "all" ]]; then
# Check each package and build list of those needing updates
PACKAGES_TO_UPDATE=()
check_dms_git && PACKAGES_TO_UPDATE+=("dms-git")
check_dms_stable && PACKAGES_TO_UPDATE+=("dms")
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "✓ Packages to update: ${PACKAGES_TO_UPDATE[*]}"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "✓ All packages up to date"
fi
elif [[ "$PKG" == "dms-git" ]]; then
if check_dms_git; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms" ]]; then
if check_dms_stable; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
else
# Unknown package - proceed anyway
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: $PKG"
fi
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
# Fallback - proceed
echo "packages=all" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
@@ -150,166 +113,119 @@ jobs:
name: Upload to OBS
needs: check-updates
runs-on: ubuntu-latest
if: needs.check-updates.outputs.has_updates == 'true'
if: |
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Determine packages to update
id: packages
run: |
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag push event - use the pushed tag
echo "packages=dms" >> $GITHUB_OUTPUT
VERSION="${GITHUB_REF#refs/tags/}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Triggered by tag: $VERSION"
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - dms-git only
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: updating git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual workflow dispatch
# Determine version for dms stable
if [[ "${{ github.event.inputs.package }}" == "dms" ]]; then
# For explicit dms selection, require tag_version
if [[ -n "${{ github.event.inputs.tag_version }}" ]]; then
VERSION="${{ github.event.inputs.tag_version }}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Using specified tag: $VERSION"
else
echo "ERROR: tag_version is required when package=dms"
echo "Please specify a tag version (e.g., v1.0.2) or use package=all for auto-detection"
exit 1
fi
elif [[ "${{ github.event.inputs.package }}" == "all" ]]; then
# For "all", auto-detect if tag_version not specified
if [[ -n "${{ github.event.inputs.tag_version }}" ]]; then
VERSION="${{ github.event.inputs.tag_version }}"
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Using specified tag: $VERSION"
else
# Auto-detect latest release for "all"
LATEST_TAG=$(curl -s https://api.github.com/repos/AvengeMedia/DankMaterialShell/releases/latest | grep '"tag_name"' | sed 's/.*"tag_name": "\([^"]*\)".*/\1/' || echo "")
if [[ -n "$LATEST_TAG" ]]; then
echo "version=$LATEST_TAG" >> $GITHUB_OUTPUT
echo "Auto-detected latest release: $LATEST_TAG"
else
echo "ERROR: Could not auto-detect latest release"
exit 1
fi
fi
fi
# Use filtered packages from check-updates when package="all" and no rebuild/tag specified
if [[ "${{ github.event.inputs.package }}" == "all" ]] && [[ -z "${{ github.event.inputs.rebuild_release }}" ]] && [[ -z "${{ github.event.inputs.tag_version }}" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Manual trigger: all (filtered to: ${{ needs.check-updates.outputs.packages }})"
else
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
fi
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Update dms-git spec version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "1.0.2")
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
# Update version in spec
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
# Single changelog entry (git snapshots don't need history)
# Add changelog entry
DATE_STR=$(date "+%a %b %d %Y")
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms-git.spec)
{
echo "$LOCAL_SPEC_HEAD"
echo "%changelog"
echo "* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1"
echo "- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
} > distro/opensuse/dms-git.spec
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
- name: Update Debian dms-git changelog version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: |
# Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "1.0.2")
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
# Debian version format: 0.6.2+git2256.9162e314
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
# Single changelog entry (git snapshots don't need history)
CHANGELOG_DATE=$(date -R)
{
echo "dms-git (${NEW_VERSION}db1) nightly; urgency=medium"
echo ""
echo " * Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
echo ""
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
} > "distro/debian/dms-git/debian/changelog"
CHANGELOG_FILE="distro/debian/dms-git/debian/changelog"
# Get current version from changelog
CURRENT_VERSION=$(head -1 "$CHANGELOG_FILE" | sed 's/.*(\([^)]*\)).*/\1/')
echo "Current Debian version: $CURRENT_VERSION"
echo "New version: $NEW_VERSION"
# Only update if version changed
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
# Create new changelog entry at top
TEMP_CHANGELOG=$(mktemp)
cat > "$TEMP_CHANGELOG" << EOF
dms-git ($NEW_VERSION) nightly; urgency=medium
* Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)
-- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE
EOF
# Prepend to existing changelog
cat "$CHANGELOG_FILE" >> "$TEMP_CHANGELOG"
mv "$TEMP_CHANGELOG" "$CHANGELOG_FILE"
echo "✓ Updated Debian changelog: $CURRENT_VERSION → $NEW_VERSION"
else
echo "✓ Debian changelog already at version $NEW_VERSION"
fi
- name: Update dms stable version
if: steps.packages.outputs.version != ''
run: |
VERSION="${{ steps.packages.outputs.version }}"
VERSION_NO_V="${VERSION#v}"
echo "==> Updating packaging files to version: $VERSION_NO_V"
# Update spec file
echo "Updating packaging to version $VERSION_NO_V"
# Update openSUSE dms spec (stable only)
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
# Verify the update
UPDATED_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1)
echo "✓ Spec file now shows Version: $UPDATED_VERSION"
# Single changelog entry (full history on OBS website)
DATE_STR=$(date "+%a %b %d %Y")
LOCAL_SPEC_HEAD=$(sed -n '1,/%changelog/{ /%changelog/d; p }' distro/opensuse/dms.spec)
{
echo "$LOCAL_SPEC_HEAD"
echo "%changelog"
echo "* $DATE_STR AvengeMedia <maintainer@avengemedia.com> - ${VERSION_NO_V}-1"
echo "- Update to stable $VERSION release"
} > distro/opensuse/dms.spec
# Update Debian _service files (both tar_scm and download_url formats)
# Update Debian _service files
for service in distro/debian/*/_service; do
if [[ -f "$service" ]]; then
# Update tar_scm revision parameter (for dms-git)
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
# Update download_url paths (for dms stable)
sed -i "s|/v[0-9.]\+/|/$VERSION/|g" "$service"
sed -i "s|/tags/v[0-9.]\+\.tar\.gz|/tags/$VERSION.tar.gz|g" "$service"
fi
done
# Update Debian changelog for dms stable (single entry, history on OBS website)
if [[ -f "distro/debian/dms/debian/changelog" ]]; then
CHANGELOG_DATE=$(date -R)
{
echo "dms (${VERSION_NO_V}db1) stable; urgency=medium"
echo ""
echo " * Update to $VERSION stable release"
echo ""
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $CHANGELOG_DATE"
} > "distro/debian/dms/debian/changelog"
echo "✓ Updated Debian changelog to ${VERSION_NO_V}db1"
fi
- name: Install Go
uses: actions/setup-go@v5
with:
go-version: "1.24"
go-version: '1.24'
- name: Install OSC
run: |
@@ -329,78 +245,32 @@ jobs:
- name: Upload to OBS
env:
FORCE_REBUILD: ${{ github.event_name == 'workflow_dispatch' && 'true' || '' }}
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
TAG_VERSION: ${{ steps.packages.outputs.version }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ -z "$PACKAGES" ]]; then
echo "✓ No packages need uploading. All up to date!"
exit 0
fi
MESSAGE="Automated update from GitHub Actions"
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
echo "==> Version being uploaded: ${{ steps.packages.outputs.version }}"
fi
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
# Loop through each package and upload
for PKG in $PACKAGES; do
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PKG to OBS..."
if [[ -n "$REBUILD_RELEASE" ]]; then
echo "🔄 Using rebuild release number: db$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [[ "$PKG" == "dms-git" ]]; then
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
bash distro/scripts/obs-upload.sh "$PKG" "$MESSAGE"
fi
done
- name: Summary
if: always()
run: |
echo "### OBS Package Upload Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ -z "$PACKAGES" ]]; then
echo "**Status:** ✅ All packages up to date (no uploads needed)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All packages are current. Run completed successfully." >> $GITHUB_STEP_SUMMARY
if [[ "$PACKAGES" == "all" ]]; then
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else
echo "**Packages Uploaded:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
for PKG in $PACKAGES; do
case "$PKG" in
dms)
echo "- ✅ **dms** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms/dms)" >> $GITHUB_STEP_SUMMARY
;;
dms-git)
echo "- ✅ **dms-git** → [View builds](https://build.opensuse.org/package/show/home:AvengeMedia:dms-git/dms-git)" >> $GITHUB_STEP_SUMMARY
;;
esac
done
echo "" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
echo "**Rebuild Number:** db${{ github.event.inputs.rebuild_release }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "**Version:** ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "Monitor build progress on [OBS project page](https://build.opensuse.org/project/show/home:AvengeMedia)." >> $GITHUB_STEP_SUMMARY
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
fi
- name: Summary
run: |
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY

View File

@@ -4,15 +4,15 @@ on:
workflow_dispatch:
inputs:
package:
description: "Package to upload (dms, dms-git, dms-greeter, or all)"
description: 'Package to upload (dms, dms-git, dms-greeter, or all)'
required: false
default: "dms-git"
default: 'dms-git'
rebuild_release:
description: "Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)"
description: 'Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)'
required: false
default: ""
default: ''
schedule:
- cron: "0 */3 * * *" # Every 3 hours for dms-git builds
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
jobs:
check-updates:
@@ -32,113 +32,41 @@ jobs:
- name: Check for updates
id: check
run: |
# Helper function to check dms-git commit
check_dms_git() {
local CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
local PPA_VERSION=$(curl -s "https://api.launchpad.net/1.0/~avengemedia/+archive/ubuntu/dms-git?ws.op=getPublishedSources&source_name=dms-git&status=Published" | grep -oP '"source_package_version":\s*"\K[^"]+' | head -1 || echo "")
local PPA_COMMIT=$(echo "$PPA_VERSION" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$PPA_COMMIT" && "$CURRENT_COMMIT" == "$PPA_COMMIT" ]]; then
echo "📋 dms-git: Commit $CURRENT_COMMIT already exists, skipping"
return 1 # No update needed
else
echo "📋 dms-git: New commit $CURRENT_COMMIT (PPA has ${PPA_COMMIT:-none})"
return 0 # Update needed
fi
}
# Helper function to check stable package tag
check_stable_package() {
local PKG="$1"
local PPA_NAME="$2"
# Use git ls-remote to find the latest tag, sorted by version (descending)
local LATEST_TAG=$(git ls-remote --tags --refs --sort='-v:refname' https://github.com/AvengeMedia/DankMaterialShell.git | head -n1 | awk -F/ '{print $NF}' | sed 's/^v//')
local PPA_VERSION=$(curl -s "https://api.launchpad.net/1.0/~avengemedia/+archive/ubuntu/$PPA_NAME?ws.op=getPublishedSources&source_name=$PKG&status=Published" | grep -oP '"source_package_version":\s*"\K[^"]+' | head -1 || echo "")
local PPA_BASE_VERSION=$(echo "$PPA_VERSION" | sed 's/ppa[0-9]*$//')
if [[ -n "$LATEST_TAG" && "$LATEST_TAG" == "$PPA_BASE_VERSION" ]]; then
echo "📋 $PKG: Tag $LATEST_TAG already exists, skipping"
return 1 # No update needed
else
echo "📋 $PKG: New tag ${LATEST_TAG:-unknown} (PPA has ${PPA_BASE_VERSION:-none})"
return 0 # Update needed
fi
}
# Main logic
REBUILD="${{ github.event.inputs.rebuild_release }}"
if [[ "${{ github.event_name }}" == "schedule" ]]; then
# Scheduled run - check dms-git only
echo "packages=dms-git" >> $GITHUB_OUTPUT
if check_dms_git; then
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
echo "Checking if dms-git source has changed..."
# Get current commit hash (8 chars to match changelog format)
CURRENT_COMMIT=$(git rev-parse --short=8 HEAD)
# Extract commit hash from changelog
# Format: dms-git (0.6.2+git2264.c5c5ce84) questing; urgency=medium
CHANGELOG_FILE="distro/ubuntu/dms-git/debian/changelog"
if [[ -f "$CHANGELOG_FILE" ]]; then
CHANGELOG_COMMIT=$(head -1 "$CHANGELOG_FILE" | grep -oP '\.[a-f0-9]{8}' | tr -d '.' || echo "")
if [[ -n "$CHANGELOG_COMMIT" ]]; then
if [[ "$CURRENT_COMMIT" == "$CHANGELOG_COMMIT" ]]; then
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "📋 Commit $CURRENT_COMMIT already in changelog, skipping upload"
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 New commit detected: $CURRENT_COMMIT (changelog has $CHANGELOG_COMMIT)"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 Could not extract commit from changelog, proceeding with upload"
fi
else
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "📋 No changelog file found, proceeding with upload"
fi
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
# Manual workflow trigger
PKG="${{ github.event.inputs.package }}"
if [[ -n "$REBUILD" ]]; then
# Rebuild requested - always proceed
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "🔄 Manual rebuild requested: $PKG (ppa$REBUILD)"
elif [[ "$PKG" == "all" ]]; then
# Check each package and build list of those needing updates
PACKAGES_TO_UPDATE=()
check_dms_git && PACKAGES_TO_UPDATE+=("dms-git")
check_stable_package "dms" "dms" && PACKAGES_TO_UPDATE+=("dms")
check_stable_package "dms-greeter" "danklinux" && PACKAGES_TO_UPDATE+=("dms-greeter")
if [[ ${#PACKAGES_TO_UPDATE[@]} -gt 0 ]]; then
echo "packages=${PACKAGES_TO_UPDATE[*]}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "✓ Packages to update: ${PACKAGES_TO_UPDATE[*]}"
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
echo "✓ All packages up to date"
fi
elif [[ "$PKG" == "dms-git" ]]; then
if check_dms_git; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms" ]]; then
if check_stable_package "dms" "dms"; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
elif [[ "$PKG" == "dms-greeter" ]]; then
if check_stable_package "dms-greeter" "danklinux"; then
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
else
echo "packages=" >> $GITHUB_OUTPUT
echo "has_updates=false" >> $GITHUB_OUTPUT
fi
else
# Unknown package - proceed anyway
echo "packages=$PKG" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: $PKG"
fi
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
# Fallback
echo "packages=dms-git" >> $GITHUB_OUTPUT
echo "has_updates=true" >> $GITHUB_OUTPUT
fi
@@ -147,19 +75,21 @@ jobs:
name: Upload to PPA
needs: check-updates
runs-on: ubuntu-latest
if: needs.check-updates.outputs.has_updates == 'true'
if: |
github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: "1.24"
cache: false
go-version: '1.24'
cache: false
- name: Install build dependencies
run: |
@@ -172,7 +102,7 @@ jobs:
build-essential \
fakeroot \
dpkg-dev
- name: Configure GPG
env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
@@ -180,101 +110,79 @@ jobs:
echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Determine packages to upload
id: packages
run: |
# Use packages determined by check-updates job
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
if [[ "${{ github.event_name }}" == "schedule" ]]; then
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
echo "Triggered by schedule: uploading git package"
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
echo "Manual trigger: ${{ needs.check-updates.outputs.packages }}"
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
echo "Manual trigger: ${{ github.event.inputs.package }}"
else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi
- name: Upload to PPA
env:
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
run: |
PACKAGES="${{ steps.packages.outputs.packages }}"
REBUILD_RELEASE="${{ github.event.inputs.rebuild_release }}"
if [[ -z "$PACKAGES" ]]; then
echo "✓ No packages need uploading. All up to date!"
exit 0
fi
# Export REBUILD_RELEASE so ppa-build.sh can use it
if [[ -n "$REBUILD_RELEASE" ]]; then
export REBUILD_RELEASE
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
fi
# PACKAGES can be space-separated list (e.g., "dms-git dms" from "all" check)
# Loop through each package and upload
for PKG in $PACKAGES; do
# Map package to PPA name
case "$PKG" in
dms)
PPA_NAME="dms"
;;
dms-git)
PPA_NAME="dms-git"
;;
dms-greeter)
PPA_NAME="danklinux"
;;
*)
echo "⚠️ Unknown package: $PKG, skipping"
continue
;;
esac
echo ""
if [[ "$PACKAGES" == "all" ]]; then
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PKG to PPA $PPA_NAME..."
if [[ -n "$REBUILD_RELEASE" ]]; then
echo "Uploading dms to PPA..."
if [ -n "$REBUILD_RELEASE" ]; then
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "$PKG" "$PPA_NAME" questing ${REBUILD_RELEASE:+"$REBUILD_RELEASE"}
done
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-git to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-greeter to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-greeter" danklinux questing
else
PPA_NAME="$PACKAGES"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading $PACKAGES to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing
fi
- name: Summary
if: always()
run: |
echo "### PPA Package Upload Summary" >> $GITHUB_STEP_SUMMARY
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
fi
PACKAGES="${{ steps.packages.outputs.packages }}"
if [[ -z "$PACKAGES" ]]; then
echo "**Status:** ✅ All packages up to date (no uploads needed)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All packages are current. Run will complete successfully." >> $GITHUB_STEP_SUMMARY
else
echo "**Packages Uploaded:**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
for PKG in $PACKAGES; do
case "$PKG" in
dms)
echo "- ✅ **dms** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages)" >> $GITHUB_STEP_SUMMARY
;;
dms-git)
echo "- ✅ **dms-git** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages)" >> $GITHUB_STEP_SUMMARY
;;
dms-greeter)
echo "- ✅ **dms-greeter** → [View builds](https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages)" >> $GITHUB_STEP_SUMMARY
;;
esac
done
echo "" >> $GITHUB_STEP_SUMMARY
if [[ -n "${{ github.event.inputs.rebuild_release }}" ]]; then
echo "**Rebuild Number:** ppa${{ github.event.inputs.rebuild_release }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY
if [[ "$PACKAGES" == "all" ]]; then
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
echo "- **PPA danklinux**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-git" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY

View File

@@ -1,19 +0,0 @@
name: Update stable branch
on:
push:
tags:
- "v*"
jobs:
update-stable:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Push to stable branch
run: git push origin HEAD:refs/heads/stable --force

View File

@@ -36,7 +36,7 @@ jobs:
run: |
set -euo pipefail
echo "Attempting nix build to get new vendorHash..."
if output=$(nix build .#dms-shell 2>&1); then
if output=$(nix build .#dmsCli 2>&1); then
echo "Build succeeded, no hash update needed"
exit 0
fi
@@ -46,7 +46,7 @@ jobs:
[ "$current_hash" = "$new_hash" ] && { echo "vendorHash already up to date"; exit 0; }
sed -i "s|vendorHash = \"$current_hash\"|vendorHash = \"$new_hash\"|" flake.nix
echo "Verifying build with new vendorHash..."
nix build .#dms-shell
nix build .#dmsCli
echo "vendorHash updated successfully!"
- name: Commit and push vendorHash update

9
.gitignore vendored
View File

@@ -96,15 +96,20 @@ go.work
go.work.sum
# env file
.env*
.env
# Editor/IDE
# .idea/
# .vscode/
vim/
bin/
# Extracted source trees in Ubuntu package directories
distro/ubuntu/*/dms-git-repo/
distro/ubuntu/*/DankMaterialShell-*/
distro/ubuntu/danklinux/*/dsearch-*/
distro/ubuntu/danklinux/*/dgop-*/
# direnv
.envrc
.direnv/

View File

@@ -1,12 +0,0 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: check-yaml
- id: end-of-file-fixer
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
args: [-e, SC2164, -e, SC2001, -e, SC2012, -e, SC2317]

View File

@@ -1,18 +0,0 @@
This file is more of a quick reference so I know what to account for before next releases.
# 1.2.0
- Added clipboard and clipboard history integration
- Added swipe to dismiss notification popups and from center
- Added paste from clipboard history view - requires wtype
- Optimize surface damage of OSD & Toast
- Add monitor configurator (niri, Hyprland, MangoWC)
- **BREAKING** ghostty theme changed to ~/.config/ghostty/themes/danktheme
- requires intervention and doc update
- Added desktop widget plugins
- dev guidance available
- builtin clock & dgop widgets
- new IPC targets
- Initial RTL support/i18n
- Theme registry
- Notification persistence & history

View File

@@ -6,10 +6,10 @@ To contribute fork this repository, make your changes, and open a pull request.
## Setup
Install [prek](https://prek.j178.dev/) then activate pre-commit hooks:
Enable pre-commit hooks to catch CI failures before pushing:
```bash
prek install
git config core.hooksPath .githooks
```
### Nix Development Shell
@@ -21,7 +21,6 @@ nix develop
```
This will provide:
- Go 1.24 toolchain (go, gopls, delve, go-tools) and GNU Make
- Quickshell and required QML packages
- Properly configured QML2_IMPORT_PATH
@@ -55,20 +54,6 @@ touch .qmlls.ini
5. Make your changes, test, and open a pull request.
### I18n/Localization
When adding user-facing strings, ensure they are wrapped in `I18n.tr()` with context, for example.
```qml
import qs.Common
Text {
text: I18n.tr("Hello World", "<This is context for the translators, example> Hello world greeting that appears on the lock screen")
}
```
Preferably, try to keep new terms to a minimum and re-use existing terms where possible. See `quickshell/translations/en.json` for the list of existing terms. (This isn't always possible obviously, but instead of using `Auto-connect` you would use `Autoconnect` since it's already translated)
### GO (`core` directory)
1. Install the [Go Extension](https://code.visualstudio.com/docs/languages/go)

View File

@@ -163,7 +163,7 @@ quickshell -p quickshell/
inputs.dms.url = "github:AvengeMedia/DankMaterialShell";
# Use in home-manager or NixOS configuration
imports = [ inputs.dms.homeModules.dank-material-shell ];
imports = [ inputs.dms.homeModules.dankMaterialShell.default ];
}
```

View File

@@ -1,10 +1,10 @@
[Desktop Entry]
Type=Application
Name=DMS
Name=DMS Application Picker
Comment=Select an application to open links and files
Exec=dms open %u
Icon=danklogo
Terminal=false
NoDisplay=true
MimeType=x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/dms;text/html;application/xhtml+xml;
MimeType=x-scheme-handler/http;x-scheme-handler/https;text/html;application/xhtml+xml;
Categories=Utility;

View File

@@ -9,9 +9,9 @@ Type=dbus
BusName=org.freedesktop.Notifications
ExecStart=/usr/bin/dms run --session
ExecReload=/usr/bin/pkill -USR1 -x dms
Restart=on-failure
Restart=always
RestartSec=1.23
TimeoutStopSec=10
[Install]
WantedBy=graphical-session.target
WantedBy=graphical-session.target

View File

@@ -102,11 +102,7 @@ linters:
- linters:
- ineffassign
path: internal/proto/
# binary.Write/Read to bytes.Buffer can't fail
# binary.Write to bytes.Buffer can't fail
- linters:
- errcheck
text: "Error return value of `binary\\.(Write|Read)` is not checked"
# bytes.Reader.Read can't fail (reads from memory)
- linters:
- errcheck
text: "Error return value of `buf\\.Read` is not checked"
text: "Error return value of `binary\\.Write` is not checked"

View File

@@ -56,15 +56,3 @@ packages:
outpkg: mocks_version
interfaces:
VersionFetcher:
github.com/AvengeMedia/DankMaterialShell/core/internal/server/wlcontext:
config:
dir: "internal/mocks/wlcontext"
outpkg: mocks_wlcontext
interfaces:
WaylandContext:
github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client:
config:
dir: "internal/mocks/wlclient"
outpkg: mocks_wlclient
interfaces:
WaylandDisplay:

View File

@@ -1,16 +0,0 @@
repos:
- repo: https://github.com/golangci/golangci-lint
rev: v2.6.2
hooks:
- id: golangci-lint-fmt
require_serial: true
- id: golangci-lint-full
- id: golangci-lint-config-verify
- repo: local
hooks:
- id: go-test
name: go test
entry: go test ./...
language: system
pass_filenames: false
types: [go]

View File

@@ -14,63 +14,34 @@ Distribution-aware installer with TUI for deploying DMS and compositor configura
## System Integration
### Wayland Protocols (Client)
**Wayland Protocols**
- `wlr-gamma-control-unstable-v1` - Night mode and gamma control
- `wlr-screencopy-unstable-v1` - Screen capture for color picker
- `wlr-layer-shell-unstable-v1` - Overlay surfaces for color picker
- `wp-viewporter` - Fractional scaling support
- `dwl-ipc-unstable-v2` - dwl/MangoWC workspace integration
- `ext-workspace-v1` - Workspace protocol support
- `wlr-output-management-unstable-v1` - Display configuration
All Wayland protocols are consumed as a client - connecting to the compositor.
**DBus Interfaces**
- NetworkManager/iwd - Network management
- logind - Session control and inhibit locks
- accountsservice - User account information
- CUPS - Printer management
- Custom IPC via unix socket (JSON API)
| Protocol | Purpose |
| ----------------------------------------- | ----------------------------------------------------------- |
| `wlr-gamma-control-unstable-v1` | Night mode color temperature control |
| `wlr-screencopy-unstable-v1` | Screen capture for color picker/screenshot |
| `wlr-layer-shell-unstable-v1` | Overlay surfaces for color picker UI/screenshot |
| `wlr-output-management-unstable-v1` | Display configuration |
| `wlr-output-power-management-unstable-v1` | DPMS on/off CLI |
| `wp-viewporter` | Fractional scaling support (color picker/screenshot UIs) |
| `keyboard-shortcuts-inhibit-unstable-v1` | Inhibit compositor shortcuts during color picker/screenshot |
| `ext-data-control-v1` | Clipboard history and persistence |
| `ext-workspace-v1` | Workspace integration |
| `dwl-ipc-unstable-v2` | dwl/MangoWC IPC for tags, outputs, etc. |
### DBus Interfaces
**Client (consuming external services):**
| Interface | Purpose |
| -------------------------------- | --------------------------------------------- |
| `org.bluez` | Bluetooth management with pairing agent |
| `org.freedesktop.NetworkManager` | Network management |
| `net.connman.iwd` | iwd Wi-Fi backend |
| `org.freedesktop.network1` | systemd-networkd integration |
| `org.freedesktop.login1` | Session control, sleep inhibitors, brightness |
| `org.freedesktop.Accounts` | User account information |
| `org.freedesktop.portal.Desktop` | Desktop appearance settings (color scheme) |
| CUPS via IPP + D-Bus | Printer management with job notifications |
**Server (implementing interfaces):**
| Interface | Purpose |
| ----------------------------- | -------------------------------------- |
| `org.freedesktop.ScreenSaver` | Screensaver inhibit for video playback |
Custom IPC via unix socket (JSON API) for shell communication.
### Hardware Control
| Subsystem | Method | Purpose |
| --------- | ------------------- | ---------------------------------- |
| DDC/CI | I2C direct | External monitor brightness |
| Backlight | logind or sysfs | Internal display brightness |
| evdev | `/dev/input/event*` | Keyboard state (caps lock LED) |
| udev | netlink monitor | Backlight device updates (for OSD) |
### Plugin System
**Hardware Control**
- DDC/CI protocol - External monitor brightness control (like `ddcutil`)
- Backlight control - Internal display brightness via `login1` or sysfs
- LED control - Keyboard/device LED management
- evdev input monitoring - Keyboard state tracking (caps lock, etc.)
**Plugin System**
- Plugin registry integration
- Plugin lifecycle management
- Settings persistence
## CLI Commands
- `dms run [-d]` - Start shell (optionally as daemon)
- `dms restart` / `dms kill` - Manage running processes
- `dms ipc <command>` - Send IPC commands (toggle launcher, notifications, etc.)
@@ -99,7 +70,6 @@ The on-screen preview displays the selected format. JSON output includes hex, RG
Requires Go 1.24+
**Development build:**
```bash
make # Build dms CLI
make dankinstall # Build installer
@@ -107,7 +77,6 @@ make test # Run tests
```
**Distribution build:**
```bash
make dist # Build without update/greeter features
```
@@ -115,7 +84,6 @@ make dist # Build without update/greeter features
Produces `bin/dms-linux-amd64` and `bin/dms-linux-arm64`
**Installation:**
```bash
sudo make install # Install to /usr/local/bin/dms
```
@@ -123,7 +91,6 @@ sudo make install # Install to /usr/local/bin/dms
## Development
**Setup pre-commit hooks:**
```bash
git config core.hooksPath .githooks
```
@@ -131,7 +98,6 @@ git config core.hooksPath .githooks
This runs gofmt, golangci-lint, tests, and builds before each commit when `core/` files are staged.
**Regenerating Wayland Protocol Bindings:**
```bash
go install github.com/rajveermalviya/go-wayland/cmd/go-wayland-scanner@latest
go-wayland-scanner -i internal/proto/xml/wlr-gamma-control-unstable-v1.xml \
@@ -139,7 +105,6 @@ go-wayland-scanner -i internal/proto/xml/wlr-gamma-control-unstable-v1.xml \
```
**Module Structure:**
- `cmd/` - Binary entrypoints (dms, dankinstall)
- `internal/distros/` - Distribution-specific installation logic
- `internal/proto/` - Wayland protocol bindings
@@ -174,4 +139,4 @@ Most packages available in standard repos. Minimal building required.
**Gentoo**
Uses Portage with GURU overlay. Automatically configures USE flags. Variable success depending on system configuration.
See installer output for distribution-specific details during installation.
See installer output for distribution-specific details during installation.

View File

@@ -8,7 +8,7 @@
<rect x="0" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="20" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="0" y="37" width="24" height="8" fill="#CCBEFF"/>
<!-- A -->
<rect x="36" y="5" width="20" height="8" fill="#CCBEFF"/>
<rect x="32" y="13" width="8" height="8" fill="#CCBEFF"/>
@@ -18,7 +18,7 @@
<rect x="52" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="32" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="52" y="37" width="8" height="8" fill="#CCBEFF"/>
<!-- N -->
<rect x="64" y="5" width="12" height="8" fill="#CCBEFF"/>
<rect x="92" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -32,7 +32,7 @@
<rect x="92" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="64" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="84" y="37" width="16" height="8" fill="#CCBEFF"/>
<!-- K -->
<rect x="104" y="5" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -43,4 +43,4 @@
<rect x="120" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="104" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="37" width="8" height="8" fill="#CCBEFF"/>
</svg>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -179,7 +179,7 @@ func runBrightnessList(cmd *cobra.Command, args []string) {
fmt.Printf("%-*s %-12s %-*s %s\n", idPad, "Device", "Class", namePad, "Name", "Brightness")
sepLen := idPad + 2 + 12 + 2 + namePad + 2 + 15
for range sepLen {
for i := 0; i < sepLen; i++ {
fmt.Print("─")
}
fmt.Println()

View File

@@ -1,608 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"os/exec"
"os/signal"
"strconv"
"syscall"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/spf13/cobra"
)
var clipboardCmd = &cobra.Command{
Use: "clipboard",
Aliases: []string{"cl"},
Short: "Manage clipboard",
Long: "Interact with the clipboard manager",
}
var clipCopyCmd = &cobra.Command{
Use: "copy [text]",
Short: "Copy text to clipboard",
Long: "Copy text to clipboard. If no text provided, reads from stdin. Works without server.",
Run: runClipCopy,
}
var (
clipCopyForeground bool
clipCopyPasteOnce bool
clipCopyType string
clipJSONOutput bool
)
var clipPasteCmd = &cobra.Command{
Use: "paste",
Short: "Paste text from clipboard",
Long: "Paste text from clipboard to stdout. Works without server.",
Run: runClipPaste,
}
var clipWatchCmd = &cobra.Command{
Use: "watch [command]",
Short: "Watch clipboard for changes",
Long: `Watch clipboard for changes and optionally execute a command.
Works like wl-paste --watch. Does not require server.
If a command is provided, it will be executed each time the clipboard changes,
with the clipboard content piped to its stdin.
Examples:
dms cl watch # Print clipboard changes to stdout
dms cl watch cat # Same as above
dms cl watch notify-send # Send notification on clipboard change`,
Run: runClipWatch,
}
var clipHistoryCmd = &cobra.Command{
Use: "history",
Short: "Show clipboard history",
Long: "Show clipboard history with previews (requires server)",
Run: runClipHistory,
}
var clipGetCmd = &cobra.Command{
Use: "get <id>",
Short: "Get clipboard entry by ID",
Long: "Get full clipboard entry data by ID (requires server). Use --copy to copy it to clipboard.",
Args: cobra.ExactArgs(1),
Run: runClipGet,
}
var clipGetCopy bool
var clipDeleteCmd = &cobra.Command{
Use: "delete <id>",
Short: "Delete clipboard entry",
Long: "Delete a clipboard history entry by ID (requires server)",
Args: cobra.ExactArgs(1),
Run: runClipDelete,
}
var clipClearCmd = &cobra.Command{
Use: "clear",
Short: "Clear clipboard history",
Long: "Clear all clipboard history (requires server)",
Run: runClipClear,
}
var clipWatchStore bool
var clipSearchCmd = &cobra.Command{
Use: "search [query]",
Short: "Search clipboard history",
Long: "Search clipboard history with filters (requires server)",
Run: runClipSearch,
}
var (
clipSearchLimit int
clipSearchOffset int
clipSearchMimeType string
clipSearchImages bool
clipSearchText bool
)
var clipConfigCmd = &cobra.Command{
Use: "config",
Short: "Manage clipboard config",
Long: "Get or set clipboard configuration (requires server)",
}
var clipConfigGetCmd = &cobra.Command{
Use: "get",
Short: "Get clipboard config",
Run: runClipConfigGet,
}
var clipConfigSetCmd = &cobra.Command{
Use: "set",
Short: "Set clipboard config",
Long: `Set clipboard configuration options.
Examples:
dms cl config set --max-history 200
dms cl config set --auto-clear-days 7
dms cl config set --clear-at-startup`,
Run: runClipConfigSet,
}
var (
clipConfigMaxHistory int
clipConfigAutoClearDays int
clipConfigClearAtStartup bool
clipConfigNoClearStartup bool
clipConfigDisabled bool
clipConfigEnabled bool
)
func init() {
clipCopyCmd.Flags().BoolVarP(&clipCopyForeground, "foreground", "f", false, "Stay in foreground instead of forking")
clipCopyCmd.Flags().BoolVarP(&clipCopyPasteOnce, "paste-once", "o", false, "Exit after first paste")
clipCopyCmd.Flags().StringVarP(&clipCopyType, "type", "t", "text/plain;charset=utf-8", "MIME type")
clipWatchCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipHistoryCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipGetCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipGetCmd.Flags().BoolVarP(&clipGetCopy, "copy", "c", false, "Copy entry to clipboard")
clipSearchCmd.Flags().IntVarP(&clipSearchLimit, "limit", "l", 50, "Max results")
clipSearchCmd.Flags().IntVarP(&clipSearchOffset, "offset", "o", 0, "Result offset")
clipSearchCmd.Flags().StringVarP(&clipSearchMimeType, "mime", "m", "", "Filter by MIME type")
clipSearchCmd.Flags().BoolVar(&clipSearchImages, "images", false, "Only images")
clipSearchCmd.Flags().BoolVar(&clipSearchText, "text", false, "Only text")
clipSearchCmd.Flags().BoolVar(&clipJSONOutput, "json", false, "Output as JSON")
clipConfigSetCmd.Flags().IntVar(&clipConfigMaxHistory, "max-history", 0, "Max history entries")
clipConfigSetCmd.Flags().IntVar(&clipConfigAutoClearDays, "auto-clear-days", -1, "Auto-clear entries older than N days (0 to disable)")
clipConfigSetCmd.Flags().BoolVar(&clipConfigClearAtStartup, "clear-at-startup", false, "Clear history on startup")
clipConfigSetCmd.Flags().BoolVar(&clipConfigNoClearStartup, "no-clear-at-startup", false, "Don't clear history on startup")
clipConfigSetCmd.Flags().BoolVar(&clipConfigDisabled, "disable", false, "Disable clipboard tracking")
clipConfigSetCmd.Flags().BoolVar(&clipConfigEnabled, "enable", false, "Enable clipboard tracking")
clipWatchCmd.Flags().BoolVarP(&clipWatchStore, "store", "s", false, "Store clipboard changes to history (no server required)")
clipConfigCmd.AddCommand(clipConfigGetCmd, clipConfigSetCmd)
clipboardCmd.AddCommand(clipCopyCmd, clipPasteCmd, clipWatchCmd, clipHistoryCmd, clipGetCmd, clipDeleteCmd, clipClearCmd, clipSearchCmd, clipConfigCmd)
}
func runClipCopy(cmd *cobra.Command, args []string) {
var data []byte
if len(args) > 0 {
data = []byte(args[0])
} else {
var err error
data, err = io.ReadAll(os.Stdin)
if err != nil {
log.Fatalf("read stdin: %v", err)
}
}
if err := clipboard.CopyOpts(data, clipCopyType, clipCopyForeground, clipCopyPasteOnce); err != nil {
log.Fatalf("copy: %v", err)
}
}
func runClipPaste(cmd *cobra.Command, args []string) {
data, _, err := clipboard.Paste()
if err != nil {
log.Fatalf("paste: %v", err)
}
os.Stdout.Write(data)
}
func runClipWatch(cmd *cobra.Command, args []string) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM)
go func() {
<-sigCh
cancel()
}()
switch {
case len(args) > 0:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
runCommand(args, data)
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
case clipWatchStore:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
if err := clipboard.Store(data, mimeType); err != nil {
log.Errorf("store: %v", err)
}
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
case clipJSONOutput:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
out := map[string]any{
"data": string(data),
"mimeType": mimeType,
"timestamp": time.Now().Format(time.RFC3339),
"size": len(data),
}
j, _ := json.Marshal(out)
fmt.Println(string(j))
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
default:
if err := clipboard.Watch(ctx, func(data []byte, mimeType string) {
os.Stdout.Write(data)
os.Stdout.WriteString("\n")
}); err != nil && err != context.Canceled {
log.Fatalf("Watch error: %v", err)
}
}
}
func runCommand(args []string, stdin []byte) {
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if len(stdin) == 0 {
cmd.Run()
return
}
r, w, err := os.Pipe()
if err != nil {
cmd.Run()
return
}
cmd.Stdin = r
go func() {
w.Write(stdin)
w.Close()
}()
cmd.Run()
}
func runClipHistory(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.getHistory",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get clipboard history: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
if clipJSONOutput {
fmt.Println("[]")
} else {
fmt.Println("No clipboard history")
}
return
}
historyList, ok := (*resp.Result).([]any)
if !ok {
log.Fatal("Invalid response format")
}
if clipJSONOutput {
out, _ := json.MarshalIndent(historyList, "", " ")
fmt.Println(string(out))
return
}
if len(historyList) == 0 {
fmt.Println("No clipboard history")
return
}
fmt.Println("Clipboard History:")
fmt.Println()
for _, item := range historyList {
entry, ok := item.(map[string]any)
if !ok {
continue
}
id := uint64(entry["id"].(float64))
preview := entry["preview"].(string)
timestamp := entry["timestamp"].(string)
isImage := entry["isImage"].(bool)
typeStr := "text"
if isImage {
typeStr = "image"
}
fmt.Printf("ID: %d | %s | %s\n", id, typeStr, timestamp)
fmt.Printf(" %s\n", preview)
fmt.Println()
}
}
func runClipGet(cmd *cobra.Command, args []string) {
id, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
log.Fatalf("Invalid ID: %v", err)
}
if clipGetCopy {
req := models.Request{
ID: 1,
Method: "clipboard.copyEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to copy clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Printf("Copied entry %d to clipboard\n", id)
return
}
req := models.Request{
ID: 1,
Method: "clipboard.getEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("Entry not found")
}
entry, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
switch {
case clipJSONOutput:
output, _ := json.MarshalIndent(entry, "", " ")
fmt.Println(string(output))
default:
if data, ok := entry["data"].(string); ok {
fmt.Print(data)
} else {
output, _ := json.MarshalIndent(entry, "", " ")
fmt.Println(string(output))
}
}
}
func runClipDelete(cmd *cobra.Command, args []string) {
id, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
log.Fatalf("Invalid ID: %v", err)
}
req := models.Request{
ID: 1,
Method: "clipboard.deleteEntry",
Params: map[string]any{
"id": id,
},
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to delete clipboard entry: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Printf("Deleted entry %d\n", id)
}
func runClipClear(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.clearHistory",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to clear clipboard history: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Println("Clipboard history cleared")
}
func runClipSearch(cmd *cobra.Command, args []string) {
params := map[string]any{
"limit": clipSearchLimit,
"offset": clipSearchOffset,
}
if len(args) > 0 {
params["query"] = args[0]
}
if clipSearchMimeType != "" {
params["mimeType"] = clipSearchMimeType
}
if clipSearchImages {
params["isImage"] = true
} else if clipSearchText {
params["isImage"] = false
}
req := models.Request{
ID: 1,
Method: "clipboard.search",
Params: params,
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to search clipboard: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("No results")
}
result, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
if clipJSONOutput {
out, _ := json.MarshalIndent(result, "", " ")
fmt.Println(string(out))
return
}
entries, _ := result["entries"].([]any)
total := int(result["total"].(float64))
hasMore := result["hasMore"].(bool)
if len(entries) == 0 {
fmt.Println("No results found")
return
}
fmt.Printf("Results: %d of %d\n\n", len(entries), total)
for _, item := range entries {
entry, ok := item.(map[string]any)
if !ok {
continue
}
id := uint64(entry["id"].(float64))
preview := entry["preview"].(string)
timestamp := entry["timestamp"].(string)
isImage := entry["isImage"].(bool)
typeStr := "text"
if isImage {
typeStr = "image"
}
fmt.Printf("ID: %d | %s | %s\n", id, typeStr, timestamp)
fmt.Printf(" %s\n\n", preview)
}
if hasMore {
fmt.Printf("Use --offset %d to see more results\n", clipSearchOffset+clipSearchLimit)
}
}
func runClipConfigGet(cmd *cobra.Command, args []string) {
req := models.Request{
ID: 1,
Method: "clipboard.getConfig",
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to get config: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
if resp.Result == nil {
log.Fatal("No config returned")
}
cfg, ok := (*resp.Result).(map[string]any)
if !ok {
log.Fatal("Invalid response format")
}
output, _ := json.MarshalIndent(cfg, "", " ")
fmt.Println(string(output))
}
func runClipConfigSet(cmd *cobra.Command, args []string) {
params := map[string]any{}
if cmd.Flags().Changed("max-history") {
params["maxHistory"] = clipConfigMaxHistory
}
if cmd.Flags().Changed("auto-clear-days") {
params["autoClearDays"] = clipConfigAutoClearDays
}
if clipConfigClearAtStartup {
params["clearAtStartup"] = true
}
if clipConfigNoClearStartup {
params["clearAtStartup"] = false
}
if clipConfigDisabled {
params["disabled"] = true
}
if clipConfigEnabled {
params["disabled"] = false
}
if len(params) == 0 {
fmt.Println("No config options specified")
return
}
req := models.Request{
ID: 1,
Method: "clipboard.setConfig",
Params: params,
}
resp, err := sendServerRequest(req)
if err != nil {
log.Fatalf("Failed to set config: %v", err)
}
if resp.Error != "" {
log.Fatalf("Error: %s", resp.Error)
}
fmt.Println("Config updated")
}

View File

@@ -3,8 +3,8 @@ package main
import (
"fmt"
"os"
"os/exec"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/colorpicker"
"github.com/spf13/cobra"
)
@@ -121,7 +121,13 @@ func runColorPick(cmd *cobra.Command, args []string) {
}
func copyToClipboard(text string) {
if err := clipboard.CopyText(text); err != nil {
fmt.Fprintln(os.Stderr, "clipboard copy failed:", err)
var cmd *exec.Cmd
if _, err := exec.LookPath("wl-copy"); err == nil {
cmd = exec.Command("wl-copy", text)
} else {
fmt.Fprintln(os.Stderr, "wl-copy not found, cannot copy to clipboard")
return
}
_ = cmd.Run()
}

View File

@@ -171,6 +171,7 @@ var pluginsUpdateCmd = &cobra.Command{
}
func runVersion(cmd *cobra.Command, args []string) {
printASCII()
fmt.Printf("%s\n", formatVersion(Version))
}
@@ -219,7 +220,7 @@ func getBaseVersion() string {
}
// Fallback
return "1.0.2"
return "0.6.2"
}
func startDebugServer() error {
@@ -512,7 +513,5 @@ func getCommonCommands() []*cobra.Command {
screenshotCmd,
notifyActionCmd,
matugenCmd,
clipboardCmd,
doctorCmd,
}
}

View File

@@ -22,7 +22,6 @@ func init() {
dank16Cmd.Flags().Bool("json", false, "Output in JSON format")
dank16Cmd.Flags().Bool("kitty", false, "Output in Kitty terminal format")
dank16Cmd.Flags().Bool("foot", false, "Output in Foot terminal format")
dank16Cmd.Flags().Bool("neovim", false, "Output in Neovim plugin format")
dank16Cmd.Flags().Bool("alacritty", false, "Output in Alacritty terminal format")
dank16Cmd.Flags().Bool("ghostty", false, "Output in Ghostty terminal format")
dank16Cmd.Flags().Bool("wezterm", false, "Output in Wezterm terminal format")
@@ -41,7 +40,6 @@ func runDank16(cmd *cobra.Command, args []string) {
isJson, _ := cmd.Flags().GetBool("json")
isKitty, _ := cmd.Flags().GetBool("kitty")
isFoot, _ := cmd.Flags().GetBool("foot")
isNeovim, _ := cmd.Flags().GetBool("neovim")
isAlacritty, _ := cmd.Flags().GetBool("alacritty")
isGhostty, _ := cmd.Flags().GetBool("ghostty")
isWezterm, _ := cmd.Flags().GetBool("wezterm")
@@ -118,8 +116,6 @@ func runDank16(cmd *cobra.Command, args []string) {
fmt.Print(dank16.GenerateGhosttyTheme(colors))
} else if isWezterm {
fmt.Print(dank16.GenerateWeztermTheme(colors))
} else if isNeovim {
fmt.Print(dank16.GenerateNeovimTheme(colors))
} else {
fmt.Print(dank16.GenerateGhosttyTheme(colors))
}

View File

@@ -1,919 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"slices"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/config"
"github.com/AvengeMedia/DankMaterialShell/core/internal/distros"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/brightness"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/network"
"github.com/AvengeMedia/DankMaterialShell/core/internal/tui"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/AvengeMedia/DankMaterialShell/core/internal/version"
"github.com/charmbracelet/lipgloss"
"github.com/spf13/cobra"
)
type status string
const (
statusOK status = "ok"
statusWarn status = "warn"
statusError status = "error"
statusInfo status = "info"
)
func (s status) IconStyle(styles tui.Styles) (string, lipgloss.Style) {
switch s {
case statusOK:
return "●", styles.Success
case statusWarn:
return "●", styles.Warning
case statusError:
return "●", styles.Error
default:
return "○", styles.Subtle
}
}
type DoctorStatus struct {
Errors []checkResult
Warnings []checkResult
OK []checkResult
Info []checkResult
}
func (ds *DoctorStatus) Add(r checkResult) {
switch r.status {
case statusError:
ds.Errors = append(ds.Errors, r)
case statusWarn:
ds.Warnings = append(ds.Warnings, r)
case statusOK:
ds.OK = append(ds.OK, r)
case statusInfo:
ds.Info = append(ds.Info, r)
}
}
func (ds *DoctorStatus) HasIssues() bool {
return len(ds.Errors) > 0 || len(ds.Warnings) > 0
}
func (ds *DoctorStatus) ErrorCount() int {
return len(ds.Errors)
}
func (ds *DoctorStatus) WarningCount() int {
return len(ds.Warnings)
}
func (ds *DoctorStatus) OKCount() int {
return len(ds.OK)
}
var (
quickshellVersionRegex = regexp.MustCompile(`quickshell (\d+\.\d+\.\d+)`)
hyprlandVersionRegex = regexp.MustCompile(`v?(\d+\.\d+\.\d+)`)
niriVersionRegex = regexp.MustCompile(`niri (\d+\.\d+)`)
swayVersionRegex = regexp.MustCompile(`sway version (\d+\.\d+)`)
riverVersionRegex = regexp.MustCompile(`river (\d+\.\d+)`)
wayfireVersionRegex = regexp.MustCompile(`wayfire (\d+\.\d+)`)
)
var doctorCmd = &cobra.Command{
Use: "doctor",
Short: "Diagnose DMS installation and dependencies",
Long: "Check system health, verify dependencies, and diagnose configuration issues for DMS",
Run: runDoctor,
}
var (
doctorVerbose bool
doctorJSON bool
)
func init() {
doctorCmd.Flags().BoolVarP(&doctorVerbose, "verbose", "v", false, "Show detailed output including paths and versions")
doctorCmd.Flags().BoolVarP(&doctorJSON, "json", "j", false, "Output results in JSON format")
}
type category int
const (
catSystem category = iota
catVersions
catInstallation
catCompositor
catQuickshellFeatures
catOptionalFeatures
catConfigFiles
catServices
catEnvironment
)
func (c category) String() string {
switch c {
case catSystem:
return "System"
case catVersions:
return "Versions"
case catInstallation:
return "Installation"
case catCompositor:
return "Compositor"
case catQuickshellFeatures:
return "Quickshell Features"
case catOptionalFeatures:
return "Optional Features"
case catConfigFiles:
return "Config Files"
case catServices:
return "Services"
case catEnvironment:
return "Environment"
default:
return "Unknown"
}
}
const (
checkNameMaxLength = 21
)
type checkResult struct {
category category
name string
status status
message string
details string
}
type checkResultJSON struct {
Category string `json:"category"`
Name string `json:"name"`
Status string `json:"status"`
Message string `json:"message"`
Details string `json:"details,omitempty"`
}
type doctorOutputJSON struct {
Summary struct {
Errors int `json:"errors"`
Warnings int `json:"warnings"`
OK int `json:"ok"`
Info int `json:"info"`
} `json:"summary"`
Results []checkResultJSON `json:"results"`
}
func (r checkResult) toJSON() checkResultJSON {
return checkResultJSON{
Category: r.category.String(),
Name: r.name,
Status: string(r.status),
Message: r.message,
Details: r.details,
}
}
func runDoctor(cmd *cobra.Command, args []string) {
if !doctorJSON {
printDoctorHeader()
}
qsFeatures, qsMissingFeatures := checkQuickshellFeatures()
results := slices.Concat(
checkSystemInfo(),
checkVersions(qsMissingFeatures),
checkDMSInstallation(),
checkWindowManagers(),
qsFeatures,
checkOptionalDependencies(),
checkConfigurationFiles(),
checkSystemdServices(),
checkEnvironmentVars(),
)
if doctorJSON {
printResultsJSON(results)
} else {
printResults(results)
printSummary(results, qsMissingFeatures)
}
}
func printDoctorHeader() {
theme := tui.TerminalTheme()
styles := tui.NewStyles(theme)
fmt.Println(getThemedASCII())
fmt.Println(styles.Title.Render("System Health Check"))
fmt.Println(styles.Subtle.Render("──────────────────────────────────────"))
fmt.Println()
}
func checkSystemInfo() []checkResult {
var results []checkResult
osInfo, err := distros.GetOSInfo()
if err != nil {
status, message, details := statusWarn, fmt.Sprintf("Unknown (%v)", err), ""
if strings.Contains(err.Error(), "Unsupported distribution") {
osRelease := readOSRelease()
if osRelease["ID"] == "nixos" {
status = statusOK
message = osRelease["PRETTY_NAME"]
if message == "" {
message = fmt.Sprintf("NixOS %s", osRelease["VERSION_ID"])
}
details = "Supported for runtime (install via NixOS module or Flake)"
} else if osRelease["PRETTY_NAME"] != "" {
message = fmt.Sprintf("%s (not supported by dms setup)", osRelease["PRETTY_NAME"])
details = "DMS may work but automatic installation is not available"
}
}
results = append(results, checkResult{catSystem, "Operating System", status, message, details})
} else {
status := statusOK
message := osInfo.PrettyName
if message == "" {
message = fmt.Sprintf("%s %s", osInfo.Distribution.ID, osInfo.VersionID)
}
if distros.IsUnsupportedDistro(osInfo.Distribution.ID, osInfo.VersionID) {
status = statusWarn
message += " (version may not be fully supported)"
}
results = append(results, checkResult{
catSystem, "Operating System", status, message,
fmt.Sprintf("ID: %s, Version: %s, Arch: %s", osInfo.Distribution.ID, osInfo.VersionID, osInfo.Architecture),
})
}
arch := runtime.GOARCH
archStatus := statusOK
if arch != "amd64" && arch != "arm64" {
archStatus = statusError
}
results = append(results, checkResult{catSystem, "Architecture", archStatus, arch, ""})
waylandDisplay := os.Getenv("WAYLAND_DISPLAY")
xdgSessionType := os.Getenv("XDG_SESSION_TYPE")
switch {
case waylandDisplay != "" || xdgSessionType == "wayland":
results = append(results, checkResult{
catSystem, "Display Server", statusOK, "Wayland",
fmt.Sprintf("WAYLAND_DISPLAY=%s", waylandDisplay),
})
case xdgSessionType == "x11":
results = append(results, checkResult{catSystem, "Display Server", statusError, "X11 (DMS requires Wayland)", ""})
default:
results = append(results, checkResult{
catSystem, "Display Server", statusWarn, "Unknown (ensure you're running Wayland)",
fmt.Sprintf("XDG_SESSION_TYPE=%s", xdgSessionType),
})
}
return results
}
func checkEnvironmentVars() []checkResult {
var results []checkResult
results = append(results, checkEnvVar("QT_QPA_PLATFORMTHEME")...)
results = append(results, checkEnvVar("QS_ICON_THEME")...)
return results
}
func checkEnvVar(name string) []checkResult {
value := os.Getenv(name)
if value != "" {
return []checkResult{{catEnvironment, name, statusInfo, value, ""}}
} else if doctorVerbose {
return []checkResult{{catEnvironment, name, statusInfo, "Not set", ""}}
}
return nil
}
func readOSRelease() map[string]string {
result := make(map[string]string)
data, err := os.ReadFile("/etc/os-release")
if err != nil {
return result
}
for line := range strings.SplitSeq(string(data), "\n") {
if parts := strings.SplitN(line, "=", 2); len(parts) == 2 {
result[parts[0]] = strings.Trim(parts[1], "\"")
}
}
return result
}
func checkVersions(qsMissingFeatures bool) []checkResult {
dmsCliPath, _ := os.Executable()
dmsCliDetails := ""
if doctorVerbose {
dmsCliDetails = dmsCliPath
}
results := []checkResult{
{catVersions, "DMS CLI", statusOK, formatVersion(Version), dmsCliDetails},
}
qsVersion, qsStatus, qsPath := getQuickshellVersionInfo(qsMissingFeatures)
qsDetails := ""
if doctorVerbose && qsPath != "" {
qsDetails = qsPath
}
results = append(results, checkResult{catVersions, "Quickshell", qsStatus, qsVersion, qsDetails})
dmsVersion, dmsPath := getDMSShellVersion()
if dmsVersion != "" {
results = append(results, checkResult{catVersions, "DMS Shell", statusOK, dmsVersion, dmsPath})
} else {
results = append(results, checkResult{catVersions, "DMS Shell", statusError, "Not installed or not detected", "Run 'dms setup' to install"})
}
return results
}
func getDMSShellVersion() (version, path string) {
if err := findConfig(nil, nil); err == nil && configPath != "" {
versionFile := filepath.Join(configPath, "VERSION")
if data, err := os.ReadFile(versionFile); err == nil {
return strings.TrimSpace(string(data)), configPath
}
return "installed", configPath
}
if dmsPath, err := config.LocateDMSConfig(); err == nil {
versionFile := filepath.Join(dmsPath, "VERSION")
if data, err := os.ReadFile(versionFile); err == nil {
return strings.TrimSpace(string(data)), dmsPath
}
return "installed", dmsPath
}
return "", ""
}
func getQuickshellVersionInfo(missingFeatures bool) (string, status, string) {
if !utils.CommandExists("qs") {
return "Not installed", statusError, ""
}
qsPath, _ := exec.LookPath("qs")
output, err := exec.Command("qs", "--version").Output()
if err != nil {
return "Installed (version check failed)", statusWarn, qsPath
}
fullVersion := strings.TrimSpace(string(output))
if matches := quickshellVersionRegex.FindStringSubmatch(fullVersion); len(matches) >= 2 {
if version.CompareVersions(matches[1], "0.2.0") < 0 {
return fmt.Sprintf("%s (needs >= 0.2.0)", fullVersion), statusError, qsPath
}
if missingFeatures {
return fullVersion, statusWarn, qsPath
}
return fullVersion, statusOK, qsPath
}
return fullVersion, statusWarn, qsPath
}
func checkDMSInstallation() []checkResult {
var results []checkResult
dmsPath := ""
if err := findConfig(nil, nil); err == nil && configPath != "" {
dmsPath = configPath
} else if path, err := config.LocateDMSConfig(); err == nil {
dmsPath = path
}
if dmsPath == "" {
return []checkResult{{catInstallation, "DMS Configuration", statusError, "Not found", "shell.qml not found in any config path"}}
}
results = append(results, checkResult{catInstallation, "DMS Configuration", statusOK, "Found", dmsPath})
shellQml := filepath.Join(dmsPath, "shell.qml")
if _, err := os.Stat(shellQml); err != nil {
results = append(results, checkResult{catInstallation, "shell.qml", statusError, "Missing", shellQml})
} else {
results = append(results, checkResult{catInstallation, "shell.qml", statusOK, "Present", shellQml})
}
if doctorVerbose {
installType := "Unknown"
switch {
case strings.Contains(dmsPath, "/nix/store"):
installType = "Nix store"
case strings.Contains(dmsPath, ".local/share") || strings.Contains(dmsPath, "/usr/share"):
installType = "System package"
case strings.Contains(dmsPath, ".config"):
installType = "User config"
}
results = append(results, checkResult{catInstallation, "Install Type", statusInfo, installType, dmsPath})
}
return results
}
func checkWindowManagers() []checkResult {
compositors := []struct {
name, versionCmd, versionArg string
versionRegex *regexp.Regexp
commands []string
}{
{"Hyprland", "hyprctl", "version", hyprlandVersionRegex, []string{"hyprland", "Hyprland"}},
{"niri", "niri", "--version", niriVersionRegex, []string{"niri"}},
{"Sway", "sway", "--version", swayVersionRegex, []string{"sway"}},
{"River", "river", "-version", riverVersionRegex, []string{"river"}},
{"Wayfire", "wayfire", "--version", wayfireVersionRegex, []string{"wayfire"}},
}
var results []checkResult
foundAny := false
for _, c := range compositors {
if slices.ContainsFunc(c.commands, utils.CommandExists) {
foundAny = true
var compositorPath string
for _, cmd := range c.commands {
if path, err := exec.LookPath(cmd); err == nil {
compositorPath = path
break
}
}
details := ""
if doctorVerbose && compositorPath != "" {
details = compositorPath
}
results = append(results, checkResult{
catCompositor, c.name, statusOK,
getVersionFromCommand(c.versionCmd, c.versionArg, c.versionRegex), details,
})
}
}
if !foundAny {
results = append(results, checkResult{
catCompositor, "Compositor", statusError,
"No supported Wayland compositor found",
"Install Hyprland, niri, Sway, River, or Wayfire",
})
}
if wm := detectRunningWM(); wm != "" {
results = append(results, checkResult{catCompositor, "Active", statusInfo, wm, ""})
}
return results
}
func getVersionFromCommand(cmd, arg string, regex *regexp.Regexp) string {
output, err := exec.Command(cmd, arg).Output()
if err != nil {
return "installed"
}
outStr := string(output)
if matches := regex.FindStringSubmatch(outStr); len(matches) > 1 {
ver := matches[1]
if strings.Contains(outStr, "git") || strings.Contains(outStr, "dirty") {
return ver + " (git)"
}
return ver
}
return strings.TrimSpace(outStr)
}
func detectRunningWM() string {
switch {
case os.Getenv("HYPRLAND_INSTANCE_SIGNATURE") != "":
return "Hyprland"
case os.Getenv("NIRI_SOCKET") != "":
return "niri"
case os.Getenv("XDG_CURRENT_DESKTOP") != "":
return os.Getenv("XDG_CURRENT_DESKTOP")
}
return ""
}
func checkQuickshellFeatures() ([]checkResult, bool) {
if !utils.CommandExists("qs") {
return nil, false
}
tmpDir := os.TempDir()
testScript := filepath.Join(tmpDir, "qs-feature-test.qml")
defer os.Remove(testScript)
qmlContent := `
import QtQuick
import Quickshell
ShellRoot {
id: root
property bool polkitAvailable: false
property bool idleMonitorAvailable: false
property bool idleInhibitorAvailable: false
property bool shortcutInhibitorAvailable: false
Timer {
interval: 50
running: true
repeat: false
onTriggered: {
try {
var polkitTest = Qt.createQmlObject(
'import Quickshell.Services.Polkit; import QtQuick; Item {}',
root
)
root.polkitAvailable = true
polkitTest.destroy()
} catch (e) {}
try {
var testItem = Qt.createQmlObject(
'import Quickshell.Wayland; import QtQuick; QtObject { ' +
'readonly property bool hasIdleMonitor: typeof IdleMonitor !== "undefined"; ' +
'readonly property bool hasIdleInhibitor: typeof IdleInhibitor !== "undefined"; ' +
'readonly property bool hasShortcutInhibitor: typeof ShortcutInhibitor !== "undefined" ' +
'}',
root
)
root.idleMonitorAvailable = testItem.hasIdleMonitor
root.idleInhibitorAvailable = testItem.hasIdleInhibitor
root.shortcutInhibitorAvailable = testItem.hasShortcutInhibitor
testItem.destroy()
} catch (e) {}
console.warn(root.polkitAvailable ? "FEATURE:Polkit:OK" : "FEATURE:Polkit:UNAVAILABLE")
console.warn(root.idleMonitorAvailable ? "FEATURE:IdleMonitor:OK" : "FEATURE:IdleMonitor:UNAVAILABLE")
console.warn(root.idleInhibitorAvailable ? "FEATURE:IdleInhibitor:OK" : "FEATURE:IdleInhibitor:UNAVAILABLE")
console.warn(root.shortcutInhibitorAvailable ? "FEATURE:ShortcutInhibitor:OK" : "FEATURE:ShortcutInhibitor:UNAVAILABLE")
Quickshell.execDetached(["kill", "-TERM", String(Quickshell.processId)])
}
}
}
`
if err := os.WriteFile(testScript, []byte(qmlContent), 0644); err != nil {
return nil, false
}
cmd := exec.Command("qs", "-p", testScript)
cmd.Env = append(os.Environ(), "NO_COLOR=1")
output, _ := cmd.CombinedOutput()
outputStr := string(output)
features := []struct{ name, desc string }{
{"Polkit", "Authentication prompts"},
{"IdleMonitor", "Idle detection"},
{"IdleInhibitor", "Prevent idle/sleep"},
{"ShortcutInhibitor", "Allow shortcut management (niri)"},
}
var results []checkResult
missingFeatures := false
for _, f := range features {
available := strings.Contains(outputStr, fmt.Sprintf("FEATURE:%s:OK", f.name))
status, message := statusOK, "Available"
if !available {
status, message = statusInfo, "Not available"
missingFeatures = true
}
results = append(results, checkResult{catQuickshellFeatures, f.name, status, message, f.desc})
}
return results, missingFeatures
}
func checkI2CAvailability() checkResult {
ddc, err := brightness.NewDDCBackend()
if err != nil {
return checkResult{catOptionalFeatures, "I2C/DDC", statusInfo, "Not available", "External monitor brightness control"}
}
defer ddc.Close()
devices, err := ddc.GetDevices()
if err != nil || len(devices) == 0 {
return checkResult{catOptionalFeatures, "I2C/DDC", statusInfo, "No monitors detected", "External monitor brightness control"}
}
return checkResult{catOptionalFeatures, "I2C/DDC", statusOK, fmt.Sprintf("%d monitor(s) detected", len(devices)), "External monitor brightness control"}
}
func detectNetworkBackend() string {
result, err := network.DetectNetworkStack()
if err != nil {
return ""
}
switch result.Backend {
case network.BackendNetworkManager:
return "NetworkManager"
case network.BackendIwd:
return "iwd"
case network.BackendNetworkd:
if result.HasIwd {
return "iwd + systemd-networkd"
}
return "systemd-networkd"
case network.BackendConnMan:
return "ConnMan"
default:
return ""
}
}
func checkOptionalDependencies() []checkResult {
var results []checkResult
if utils.IsServiceActive("accounts-daemon", false) {
results = append(results, checkResult{catOptionalFeatures, "accountsservice", statusOK, "Running", "User accounts"})
} else {
results = append(results, checkResult{catOptionalFeatures, "accountsservice", statusWarn, "Not running", "User accounts"})
}
if utils.IsServiceActive("power-profiles-daemon", false) {
results = append(results, checkResult{catOptionalFeatures, "power-profiles-daemon", statusOK, "Running", "Power profile management"})
} else {
results = append(results, checkResult{catOptionalFeatures, "power-profiles-daemon", statusInfo, "Not running", "Power profile management"})
}
i2cStatus := checkI2CAvailability()
results = append(results, i2cStatus)
terminals := []string{"ghostty", "kitty", "alacritty", "foot", "wezterm"}
if idx := slices.IndexFunc(terminals, utils.CommandExists); idx >= 0 {
results = append(results, checkResult{catOptionalFeatures, "Terminal", statusOK, terminals[idx], ""})
} else {
results = append(results, checkResult{catOptionalFeatures, "Terminal", statusWarn, "None found", "Install ghostty, kitty, or alacritty"})
}
deps := []struct {
name, cmd, altCmd, desc string
important bool
}{
{"matugen", "matugen", "", "Dynamic theming", true},
{"dgop", "dgop", "", "System monitoring", true},
{"cava", "cava", "", "Audio waveform", false},
{"khal", "khal", "", "Calendar events", false},
{"Network", "nmcli", "iwctl", "Network management", false},
{"danksearch", "dsearch", "", "File search", false},
{"loginctl", "loginctl", "", "Session management", false},
{"fprintd", "fprintd-list", "", "Fingerprint auth", false},
}
for _, d := range deps {
found, foundCmd := utils.CommandExists(d.cmd), d.cmd
if !found && d.altCmd != "" {
if utils.CommandExists(d.altCmd) {
found, foundCmd = true, d.altCmd
}
}
if found {
message := "Installed"
details := d.desc
if d.name == "Network" {
result, err := network.DetectNetworkStack()
if err == nil && result.Backend != network.BackendNone {
message = detectNetworkBackend() + " (active)"
if doctorVerbose {
details = result.ChosenReason
}
} else {
switch foundCmd {
case "nmcli":
message = "NetworkManager (installed)"
case "iwctl":
message = "iwd (installed)"
}
}
}
results = append(results, checkResult{catOptionalFeatures, d.name, statusOK, message, details})
} else if d.important {
results = append(results, checkResult{catOptionalFeatures, d.name, statusWarn, "Missing", d.desc})
} else {
results = append(results, checkResult{catOptionalFeatures, d.name, statusInfo, "Not installed", d.desc})
}
}
return results
}
func checkConfigurationFiles() []checkResult {
configDir, _ := os.UserConfigDir()
cacheDir, _ := os.UserCacheDir()
dmsDir := "DankMaterialShell"
configFiles := []struct{ name, path string }{
{"settings.json", filepath.Join(configDir, dmsDir, "settings.json")},
{"clsettings.json", filepath.Join(configDir, dmsDir, "clsettings.json")},
{"plugin_settings.json", filepath.Join(configDir, dmsDir, "plugin_settings.json")},
{"session.json", filepath.Join(utils.XDGStateHome(), dmsDir, "session.json")},
{"dms-colors.json", filepath.Join(cacheDir, dmsDir, "dms-colors.json")},
}
var results []checkResult
for _, cf := range configFiles {
info, err := os.Stat(cf.path)
if err == nil {
status := statusOK
message := "Present"
if info.Mode().Perm()&0200 == 0 {
status = statusWarn
message += " (read-only)"
}
results = append(results, checkResult{catConfigFiles, cf.name, status, message, cf.path})
} else {
results = append(results, checkResult{catConfigFiles, cf.name, statusInfo, "Not yet created", cf.path})
}
}
return results
}
func checkSystemdServices() []checkResult {
if !utils.CommandExists("systemctl") {
return nil
}
var results []checkResult
dmsState := getServiceState("dms", true)
if !dmsState.exists {
results = append(results, checkResult{catServices, "dms.service", statusInfo, "Not installed", "Optional user service"})
} else {
status, message := statusOK, dmsState.enabled
if dmsState.active != "" {
message = fmt.Sprintf("%s, %s", dmsState.enabled, dmsState.active)
}
if dmsState.enabled == "disabled" {
status, message = statusWarn, "Disabled"
} else if dmsState.active == "failed" || dmsState.active == "inactive" {
status = statusError
}
results = append(results, checkResult{catServices, "dms.service", status, message, ""})
}
greetdState := getServiceState("greetd", false)
if greetdState.exists {
status := statusOK
if greetdState.enabled == "disabled" {
status = statusInfo
}
results = append(results, checkResult{catServices, "greetd", status, greetdState.enabled, ""})
} else if doctorVerbose {
results = append(results, checkResult{catServices, "greetd", statusInfo, "Not installed", "Optional greeter service"})
}
return results
}
type serviceState struct {
exists bool
enabled string
active string
}
func getServiceState(name string, userService bool) serviceState {
args := []string{"is-enabled", name}
if userService {
args = []string{"--user", "is-enabled", name}
}
output, _ := exec.Command("systemctl", args...).Output()
enabled := strings.TrimSpace(string(output))
if enabled == "" || enabled == "not-found" {
return serviceState{}
}
state := serviceState{exists: true, enabled: enabled}
if userService {
output, _ = exec.Command("systemctl", "--user", "is-active", name).Output()
if active := strings.TrimSpace(string(output)); active != "" && active != "unknown" {
state.active = active
}
}
return state
}
func printResults(results []checkResult) {
theme := tui.TerminalTheme()
styles := tui.NewStyles(theme)
currentCategory := category(-1)
for _, r := range results {
if r.category != currentCategory {
if currentCategory != -1 {
fmt.Println()
}
fmt.Printf(" %s\n", styles.Bold.Render(r.category.String()))
currentCategory = r.category
}
printResultLine(r, styles)
}
}
func printResultsJSON(results []checkResult) {
var ds DoctorStatus
for _, r := range results {
ds.Add(r)
}
output := doctorOutputJSON{}
output.Summary.Errors = ds.ErrorCount()
output.Summary.Warnings = ds.WarningCount()
output.Summary.OK = ds.OKCount()
output.Summary.Info = len(ds.Info)
output.Results = make([]checkResultJSON, 0, len(results))
for _, r := range results {
output.Results = append(output.Results, r.toJSON())
}
encoder := json.NewEncoder(os.Stdout)
encoder.SetIndent("", " ")
if err := encoder.Encode(output); err != nil {
fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err)
os.Exit(1)
}
}
func printResultLine(r checkResult, styles tui.Styles) {
icon, style := r.status.IconStyle(styles)
name := r.name
nameLen := len(name)
if nameLen > checkNameMaxLength {
name = name[:checkNameMaxLength-1] + "…"
nameLen = checkNameMaxLength
}
dots := strings.Repeat("·", checkNameMaxLength-nameLen)
fmt.Printf(" %s %s %s %s\n", style.Render(icon), name, styles.Subtle.Render(dots), r.message)
if doctorVerbose && r.details != "" {
fmt.Printf(" %s\n", styles.Subtle.Render("└─ "+r.details))
}
}
func printSummary(results []checkResult, qsMissingFeatures bool) {
theme := tui.TerminalTheme()
styles := tui.NewStyles(theme)
var ds DoctorStatus
for _, r := range results {
ds.Add(r)
}
fmt.Println()
fmt.Printf(" %s\n", styles.Subtle.Render("──────────────────────────────────────"))
if !ds.HasIssues() {
fmt.Printf(" %s\n", styles.Success.Render("✓ All checks passed!"))
} else {
var parts []string
if ds.ErrorCount() > 0 {
parts = append(parts, styles.Error.Render(fmt.Sprintf("%d error(s)", ds.ErrorCount())))
}
if ds.WarningCount() > 0 {
parts = append(parts, styles.Warning.Render(fmt.Sprintf("%d warning(s)", ds.WarningCount())))
}
parts = append(parts, styles.Success.Render(fmt.Sprintf("%d ok", ds.OKCount())))
fmt.Printf(" %s\n", strings.Join(parts, ", "))
if qsMissingFeatures {
fmt.Println()
fmt.Printf(" %s\n", styles.Subtle.Render("→ Consider using quickshell-git for full feature support"))
}
}
fmt.Println()
}

View File

@@ -377,7 +377,7 @@ func updateDMSBinary() error {
}
version := ""
for line := range strings.SplitSeq(string(output), "\n") {
for _, line := range strings.Split(string(output), "\n") {
if strings.Contains(line, "\"tag_name\"") {
parts := strings.Split(line, "\"")
if len(parts) >= 4 {
@@ -443,7 +443,7 @@ func updateDMSBinary() error {
decompressedPath := filepath.Join(tempDir, "dms")
if err := os.Chmod(decompressedPath, 0o755); err != nil {
if err := os.Chmod(decompressedPath, 0755); err != nil {
return fmt.Errorf("failed to make binary executable: %w", err)
}

View File

@@ -211,8 +211,8 @@ func checkGroupExists(groupName string) bool {
return false
}
lines := strings.SplitSeq(string(data), "\n")
for line := range lines {
lines := strings.Split(string(data), "\n")
for _, line := range lines {
if strings.HasPrefix(line, groupName+":") {
return true
}
@@ -521,7 +521,7 @@ func enableGreeter() error {
newConfig := strings.Join(finalLines, "\n")
tmpFile := "/tmp/greetd-config.toml"
if err := os.WriteFile(tmpFile, []byte(newConfig), 0o644); err != nil {
if err := os.WriteFile(tmpFile, []byte(newConfig), 0644); err != nil {
return fmt.Errorf("failed to write temp config: %w", err)
}
@@ -592,8 +592,8 @@ func checkGreeterStatus() error {
if data, err := os.ReadFile(configPath); err == nil {
configContent := string(data)
if strings.Contains(configContent, "dms-greeter") {
lines := strings.SplitSeq(configContent, "\n")
for line := range lines {
lines := strings.Split(configContent, "\n")
for _, line := range lines {
trimmed := strings.TrimSpace(line)
if strings.HasPrefix(trimmed, "command =") || strings.HasPrefix(trimmed, "command=") {
parts := strings.SplitN(trimmed, "=", 2)

View File

@@ -2,12 +2,15 @@ package main
import (
"context"
"encoding/json"
"fmt"
"net"
"os"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/matugen"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/spf13/cobra"
)
@@ -46,7 +49,6 @@ func init() {
cmd.Flags().String("stock-colors", "", "Stock theme colors JSON")
cmd.Flags().Bool("sync-mode-with-portal", false, "Sync color scheme with GNOME portal")
cmd.Flags().Bool("terminals-always-dark", false, "Force terminal themes to dark variant")
cmd.Flags().String("skip-templates", "", "Comma-separated list of templates to skip")
}
matugenQueueCmd.Flags().Bool("wait", true, "Wait for completion")
@@ -66,7 +68,6 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
stockColors, _ := cmd.Flags().GetString("stock-colors")
syncModeWithPortal, _ := cmd.Flags().GetBool("sync-mode-with-portal")
terminalsAlwaysDark, _ := cmd.Flags().GetBool("terminals-always-dark")
skipTemplates, _ := cmd.Flags().GetString("skip-templates")
return matugen.Options{
StateDir: stateDir,
@@ -74,14 +75,13 @@ func buildMatugenOptions(cmd *cobra.Command) matugen.Options {
ConfigDir: configDir,
Kind: kind,
Value: value,
Mode: matugen.ColorMode(mode),
Mode: mode,
IconTheme: iconTheme,
MatugenType: matugenType,
RunUserTemplates: runUserTemplates,
StockColors: stockColors,
SyncModeWithPortal: syncModeWithPortal,
TerminalsAlwaysDark: terminalsAlwaysDark,
SkipTemplates: skipTemplates,
}
}
@@ -97,10 +97,33 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
wait, _ := cmd.Flags().GetBool("wait")
timeout, _ := cmd.Flags().GetDuration("timeout")
request := models.Request{
ID: 1,
Method: "matugen.queue",
Params: map[string]any{
socketPath := os.Getenv("DMS_SOCKET")
if socketPath == "" {
var err error
socketPath, err = server.FindSocket()
if err != nil {
log.Info("No socket available, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
}
conn, err := net.Dial("unix", socketPath)
if err != nil {
log.Info("Socket connection failed, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
defer conn.Close()
request := map[string]any{
"id": 1,
"method": "matugen.queue",
"params": map[string]any{
"stateDir": opts.StateDir,
"shellDir": opts.ShellDir,
"configDir": opts.ConfigDir,
@@ -113,19 +136,15 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
"stockColors": opts.StockColors,
"syncModeWithPortal": opts.SyncModeWithPortal,
"terminalsAlwaysDark": opts.TerminalsAlwaysDark,
"skipTemplates": opts.SkipTemplates,
"wait": wait,
},
}
if err := json.NewEncoder(conn).Encode(request); err != nil {
log.Fatalf("Failed to send request: %v", err)
}
if !wait {
if err := sendServerRequestFireAndForget(request); err != nil {
log.Info("Server unavailable, running synchronously")
if err := matugen.Run(opts); err != nil {
log.Fatalf("Theme generation failed: %v", err)
}
return
}
fmt.Println("Theme generation queued")
return
}
@@ -135,18 +154,17 @@ func runMatugenQueue(cmd *cobra.Command, args []string) {
resultCh := make(chan error, 1)
go func() {
resp, ok := tryServerRequest(request)
if !ok {
log.Info("Server unavailable, running synchronously")
if err := matugen.Run(opts); err != nil {
resultCh <- err
return
}
resultCh <- nil
var response struct {
ID int `json:"id"`
Result any `json:"result"`
Error string `json:"error"`
}
if err := json.NewDecoder(conn).Decode(&response); err != nil {
resultCh <- fmt.Errorf("failed to read response: %w", err)
return
}
if resp.Error != "" {
resultCh <- fmt.Errorf("server error: %s", resp.Error)
if response.Error != "" {
resultCh <- fmt.Errorf("server error: %s", response.Error)
return
}
resultCh <- nil

View File

@@ -1,14 +1,17 @@
package main
import (
"encoding/json"
"fmt"
"mime"
"net"
"net/url"
"os"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/spf13/cobra"
)
@@ -90,6 +93,32 @@ func mimeTypeToCategories(mimeType string) []string {
}
func runOpen(target string) {
socketPath, err := server.FindSocket()
if err != nil {
log.Warnf("DMS socket not found: %v", err)
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
}
conn, err := net.Dial("unix", socketPath)
if err != nil {
log.Warnf("DMS socket connection failed: %v", err)
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
}
defer conn.Close()
buf := make([]byte, 1)
for {
_, err := conn.Read(buf)
if err != nil {
return
}
if buf[0] == '\n' {
break
}
}
// Parse file:// URIs to extract the actual file path
actualTarget := target
detectedMimeType := openMimeType
@@ -131,12 +160,6 @@ func runOpen(target string) {
detectedRequestType = "url"
}
log.Infof("Detected HTTP(S) URL")
} else if strings.HasPrefix(target, "dms://") {
// Handle DMS internal URLs (theme/plugin install, etc.)
if detectedRequestType == "" {
detectedRequestType = "url"
}
log.Infof("Detected DMS internal URL")
} else if _, err := os.Stat(target); err == nil {
// Handle local file paths directly (not file:// URIs)
// Convert to absolute path
@@ -183,7 +206,7 @@ func runOpen(target string) {
}
method := "apppicker.open"
if detectedMimeType == "" && len(detectedCategories) == 0 && (strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://") || strings.HasPrefix(target, "dms://")) {
if detectedMimeType == "" && len(detectedCategories) == 0 && (strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://")) {
method = "browser.open"
params["url"] = target
}
@@ -196,9 +219,8 @@ func runOpen(target string) {
log.Infof("Sending request - Method: %s, Params: %+v", method, params)
if err := sendServerRequestFireAndForget(req); err != nil {
fmt.Println("DMS is not running. Please start DMS first.")
os.Exit(1)
if err := json.NewEncoder(conn).Encode(req); err != nil {
log.Fatalf("Failed to send request: %v", err)
}
log.Infof("Request sent successfully")

View File

@@ -50,18 +50,15 @@ func findConfig(cmd *cobra.Command, args []string) error {
configStateFile := filepath.Join(getRuntimeDir(), "danklinux.path")
if data, readErr := os.ReadFile(configStateFile); readErr == nil {
if len(getAllDMSPIDs()) == 0 {
os.Remove(configStateFile)
} else {
statePath := strings.TrimSpace(string(data))
shellPath := filepath.Join(statePath, "shell.qml")
statePath := strings.TrimSpace(string(data))
shellPath := filepath.Join(statePath, "shell.qml")
if info, statErr := os.Stat(shellPath); statErr == nil && !info.IsDir() {
log.Debug("Using config from active session state file: %s", statePath)
configPath = statePath
log.Debug("Using config from: %s", configPath)
return nil
}
if info, statErr := os.Stat(shellPath); statErr == nil && !info.IsDir() {
log.Debug("Using config from active session state file: %s", statePath)
configPath = statePath
log.Debug("Using config from: %s", configPath)
return nil // <-- Guard statement
} else {
os.Remove(configStateFile)
}
}

View File

@@ -4,10 +4,10 @@ import (
"bytes"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/clipboard"
"github.com/AvengeMedia/DankMaterialShell/core/internal/screenshot"
"github.com/spf13/cobra"
)
@@ -257,7 +257,9 @@ func copyImageToClipboard(buf *screenshot.ShmBuffer, format screenshot.Format, q
}
}
return clipboard.Copy(data.Bytes(), mimeType)
cmd := exec.Command("wl-copy", "--type", mimeType)
cmd.Stdin = &data
return cmd.Run()
}
func writeImageToStdout(buf *screenshot.ShmBuffer, format screenshot.Format, quality int, pixelFormat uint32) error {

View File

@@ -87,14 +87,20 @@ func newDPMSClient() (*dpmsClient, error) {
switch e.Interface {
case wlr_output_power.ZwlrOutputPowerManagerV1InterfaceName:
powerMgr := wlr_output_power.NewZwlrOutputPowerManagerV1(c.ctx)
version := min(e.Version, 1)
version := e.Version
if version > 1 {
version = 1
}
if err := registry.Bind(e.Name, e.Interface, version, powerMgr); err == nil {
c.powerMgr = powerMgr
}
case "wl_output":
output := wlclient.NewOutput(c.ctx)
version := min(e.Version, 4)
version := e.Version
if version > 4 {
version = 4
}
if err := registry.Bind(e.Name, e.Interface, version, output); err == nil {
outputID := fmt.Sprintf("output-%d", output.ID())
state := &outputState{

View File

@@ -1,114 +0,0 @@
package main
import (
"bufio"
"encoding/json"
"fmt"
"net"
"os"
"path/filepath"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
)
func sendServerRequest(req models.Request) (*models.Response[any], error) {
socketPath := getServerSocketPath()
conn, err := net.Dial("unix", socketPath)
if err != nil {
return nil, fmt.Errorf("failed to connect to server (is it running?): %w", err)
}
defer conn.Close()
scanner := bufio.NewScanner(conn)
scanner.Scan() // discard initial capabilities message
reqData, err := json.Marshal(req)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
if _, err := conn.Write(reqData); err != nil {
return nil, fmt.Errorf("failed to write request: %w", err)
}
if _, err := conn.Write([]byte("\n")); err != nil {
return nil, fmt.Errorf("failed to write newline: %w", err)
}
if !scanner.Scan() {
return nil, fmt.Errorf("failed to read response")
}
var resp models.Response[any]
if err := json.Unmarshal(scanner.Bytes(), &resp); err != nil {
return nil, fmt.Errorf("failed to unmarshal response: %w", err)
}
return &resp, nil
}
// sendServerRequestFireAndForget sends a request without waiting for a response.
// Useful for commands that trigger UI or async operations.
func sendServerRequestFireAndForget(req models.Request) error {
socketPath := getServerSocketPath()
conn, err := net.Dial("unix", socketPath)
if err != nil {
return fmt.Errorf("failed to connect to server (is it running?): %w", err)
}
defer conn.Close()
scanner := bufio.NewScanner(conn)
scanner.Scan() // discard initial capabilities message
reqData, err := json.Marshal(req)
if err != nil {
return fmt.Errorf("failed to marshal request: %w", err)
}
if _, err := conn.Write(reqData); err != nil {
return fmt.Errorf("failed to write request: %w", err)
}
if _, err := conn.Write([]byte("\n")); err != nil {
return fmt.Errorf("failed to write newline: %w", err)
}
return nil
}
// tryServerRequest attempts to send a request but returns false if server unavailable.
// Does not log errors - caller can decide what to do on failure.
func tryServerRequest(req models.Request) (*models.Response[any], bool) {
resp, err := sendServerRequest(req)
if err != nil {
return nil, false
}
return resp, true
}
func getServerSocketPath() string {
runtimeDir := os.Getenv("XDG_RUNTIME_DIR")
if runtimeDir == "" {
runtimeDir = os.TempDir()
}
entries, err := os.ReadDir(runtimeDir)
if err != nil {
return filepath.Join(runtimeDir, "danklinux.sock")
}
for _, entry := range entries {
name := entry.Name()
if name == "danklinux.sock" {
return filepath.Join(runtimeDir, name)
}
if len(name) > 10 && name[:10] == "danklinux-" && filepath.Ext(name) == ".sock" {
return filepath.Join(runtimeDir, name)
}
}
return server.GetSocketPath()
}

View File

@@ -7,10 +7,8 @@ import (
"os/exec"
"os/signal"
"path/filepath"
"slices"
"strconv"
"strings"
"sync"
"syscall"
"time"
@@ -20,25 +18,6 @@ import (
type ipcTargets map[string]map[string][]string
// getProcessExitCode returns the exit code from a ProcessState.
// For normal exits, returns the exit code directly.
// For signal termination, returns 128 + signal number (Unix convention).
func getProcessExitCode(state *os.ProcessState) int {
if state == nil {
return 1
}
if code := state.ExitCode(); code != -1 {
return code
}
// Process was killed by signal - extract signal number
if status, ok := state.Sys().(syscall.WaitStatus); ok {
if status.Signaled() {
return 128 + int(status.Signal())
}
}
return 1
}
var isSessionManaged bool
func execDetachedRestart(targetPID int) {
@@ -186,10 +165,8 @@ func runShellInteractive(session bool) {
cmd := exec.CommandContext(ctx, "qs", "-p", configPath)
cmd.Env = append(os.Environ(), "DMS_SOCKET="+socketPath)
if os.Getenv("QT_LOGGING_RULES") == "" {
if qtRules := log.GetQtLoggingRules(); qtRules != "" {
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
}
if qtRules := log.GetQtLoggingRules(); qtRules != "" {
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
}
if isSessionManaged && hasSystemdRun() {
@@ -203,16 +180,6 @@ func runShellInteractive(session bool) {
}
}
if os.Getenv("QT_QPA_PLATFORMTHEME") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME=gtk3")
}
if os.Getenv("QT_QPA_PLATFORMTHEME_QT6") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME_QT6=gtk3")
}
if os.Getenv("QT_QPA_PLATFORM") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORM=wayland")
}
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
@@ -247,28 +214,14 @@ func runShellInteractive(session bool) {
for {
select {
case sig := <-sigChan:
if sig == syscall.SIGUSR1 {
if isSessionManaged {
log.Infof("Received SIGUSR1, exiting for systemd restart...")
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
os.Exit(1)
}
// Handle SIGUSR1 restart for non-session managed processes
if sig == syscall.SIGUSR1 && !isSessionManaged {
log.Infof("Received SIGUSR1, spawning detached restart process...")
execDetachedRestart(os.Getpid())
// Exit immediately to avoid race conditions with detached restart
return
}
// Check if qs already crashed before we got SIGTERM (systemd sends SIGTERM when D-Bus name is released)
select {
case <-errChan:
cancel()
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
case <-time.After(500 * time.Millisecond):
}
log.Infof("\nReceived signal %v, shutting down...", sig)
cancel()
cmd.Process.Signal(syscall.SIGTERM)
@@ -282,7 +235,7 @@ func runShellInteractive(session bool) {
cmd.Process.Signal(syscall.SIGTERM)
}
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
os.Exit(1)
}
}
}
@@ -375,7 +328,13 @@ func killShell() {
func runShellDaemon(session bool) {
isSessionManaged = session
isDaemonChild := slices.Contains(os.Args, "--daemon-child")
isDaemonChild := false
for _, arg := range os.Args {
if arg == "--daemon-child" {
isDaemonChild = true
break
}
}
if !isDaemonChild {
fmt.Fprintf(os.Stderr, "dms %s\n", Version)
@@ -426,10 +385,8 @@ func runShellDaemon(session bool) {
cmd := exec.CommandContext(ctx, "qs", "-p", configPath)
cmd.Env = append(os.Environ(), "DMS_SOCKET="+socketPath)
if os.Getenv("QT_LOGGING_RULES") == "" {
if qtRules := log.GetQtLoggingRules(); qtRules != "" {
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
}
if qtRules := log.GetQtLoggingRules(); qtRules != "" {
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
}
if isSessionManaged && hasSystemdRun() {
@@ -443,16 +400,6 @@ func runShellDaemon(session bool) {
}
}
if os.Getenv("QT_QPA_PLATFORMTHEME") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME=gtk3")
}
if os.Getenv("QT_QPA_PLATFORMTHEME_QT6") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORMTHEME_QT6=gtk3")
}
if os.Getenv("QT_QPA_PLATFORM") == "" {
cmd.Env = append(cmd.Env, "QT_QPA_PLATFORM=wayland")
}
devNull, err := os.OpenFile("/dev/null", os.O_RDWR, 0)
if err != nil {
log.Fatalf("Error opening /dev/null: %v", err)
@@ -493,28 +440,15 @@ func runShellDaemon(session bool) {
for {
select {
case sig := <-sigChan:
if sig == syscall.SIGUSR1 {
if isSessionManaged {
log.Infof("Received SIGUSR1, exiting for systemd restart...")
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
os.Exit(1)
}
// Handle SIGUSR1 restart for non-session managed processes
if sig == syscall.SIGUSR1 && !isSessionManaged {
log.Infof("Received SIGUSR1, spawning detached restart process...")
execDetachedRestart(os.Getpid())
// Exit immediately to avoid race conditions with detached restart
return
}
// Check if qs already crashed before we got SIGTERM (systemd sends SIGTERM when D-Bus name is released)
select {
case <-errChan:
cancel()
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
case <-time.After(500 * time.Millisecond):
}
// All other signals: clean shutdown
cancel()
cmd.Process.Signal(syscall.SIGTERM)
os.Remove(socketPath)
@@ -526,25 +460,17 @@ func runShellDaemon(session bool) {
cmd.Process.Signal(syscall.SIGTERM)
}
os.Remove(socketPath)
os.Exit(getProcessExitCode(cmd.ProcessState))
os.Exit(1)
}
}
}
var qsHasAnyDisplay = sync.OnceValue(func() bool {
out, err := exec.Command("qs", "ipc", "--help").Output()
if err != nil {
return false
}
return strings.Contains(string(out), "--any-display")
})
func parseTargetsFromIPCShowOutput(output string) ipcTargets {
targets := make(ipcTargets)
var currentTarget string
for line := range strings.SplitSeq(output, "\n") {
if after, ok := strings.CutPrefix(line, "target "); ok {
currentTarget = strings.TrimSpace(after)
for _, line := range strings.Split(output, "\n") {
if strings.HasPrefix(line, "target ") {
currentTarget = strings.TrimSpace(strings.TrimPrefix(line, "target "))
targets[currentTarget] = make(map[string][]string)
}
if strings.HasPrefix(line, " function") && currentTarget != "" {
@@ -569,11 +495,7 @@ func parseTargetsFromIPCShowOutput(output string) ipcTargets {
}
func getShellIPCCompletions(args []string, _ string) []string {
cmdArgs := []string{"ipc"}
if qsHasAnyDisplay() {
cmdArgs = append(cmdArgs, "--any-display")
}
cmdArgs = append(cmdArgs, "-p", configPath, "show")
cmdArgs := []string{"-p", configPath, "ipc", "show"}
cmd := exec.Command("qs", cmdArgs...)
var targets ipcTargets
@@ -627,12 +549,7 @@ func runShellIPCCommand(args []string) {
args = append([]string{"call"}, args...)
}
cmdArgs := []string{"ipc"}
if qsHasAnyDisplay() {
cmdArgs = append(cmdArgs, "--any-display")
}
cmdArgs = append(cmdArgs, "-p", configPath)
cmdArgs = append(cmdArgs, args...)
cmdArgs := append([]string{"-p", configPath, "ipc"}, args...)
cmd := exec.Command("qs", cmdArgs...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout

View File

@@ -17,8 +17,8 @@ func getThemedASCII() string {
logo := `
██████╗ █████╗ ███╗ ██╗██╗ ██╗
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██████╔╝██║ ██║██║ ╚████║██║ ██╗
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝`

View File

@@ -3,7 +3,6 @@ package main
import (
"fmt"
"os/exec"
"slices"
"strings"
)
@@ -37,7 +36,13 @@ func checkSystemdServiceEnabled(serviceName string) (string, bool, error) {
if err != nil {
knownStates := []string{"disabled", "masked", "masked-runtime", "not-found", "enabled", "enabled-runtime", "static", "indirect", "alias"}
isKnownState := slices.Contains(knownStates, stateStr)
isKnownState := false
for _, known := range knownStates {
if stateStr == known {
isKnownState = true
break
}
}
if !isKnownState {
return stateStr, false, fmt.Errorf("systemctl is-enabled failed: %w (output: %s)", err, stateStr)

View File

@@ -15,9 +15,7 @@ require (
github.com/sblinch/kdl-go v0.0.0-20250930225324-bf4099d4614a
github.com/spf13/cobra v1.10.1
github.com/stretchr/testify v1.11.1
go.etcd.io/bbolt v1.4.3
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39
golang.org/x/image v0.34.0
)
require (
@@ -67,6 +65,6 @@ require (
github.com/spf13/pflag v1.0.10 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/sys v0.38.0
golang.org/x/text v0.32.0
golang.org/x/text v0.31.0
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@@ -131,26 +131,20 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo=
go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 h1:DHNhtq3sNNzrvduZZIiFyXWOL9IWaDPHqTnLJp+rCBY=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/sh
set -e
@@ -9,8 +9,8 @@ NC='\033[0m' # No Color
# Check for root privileges
if [ "$(id -u)" == "0" ]; then
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
exit 1
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
exit 1
fi
# Check if running on Linux
@@ -22,17 +22,17 @@ fi
# Detect architecture
ARCH=$(uname -m)
case "$ARCH" in
x86_64)
ARCH="amd64"
;;
aarch64)
ARCH="arm64"
;;
*)
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
exit 1
;;
x86_64)
ARCH="amd64"
;;
aarch64)
ARCH="arm64"
;;
*)
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
exit 1
;;
esac
# Get the latest release version
@@ -55,7 +55,7 @@ curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LAT
curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LATEST_VERSION/dankinstall-$ARCH.gz.sha256" -o "expected.sha256"
# Get the expected checksum
EXPECTED_CHECKSUM=$(awk '{print $1}' expected.sha256)
EXPECTED_CHECKSUM=$(cat expected.sha256 | awk '{print $1}')
# Calculate actual checksum
printf "%bVerifying checksum...%b\n" "$GREEN" "$NC"
@@ -67,7 +67,7 @@ if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then
printf "Expected: %s\n" "$EXPECTED_CHECKSUM"
printf "Got: %s\n" "$ACTUAL_CHECKSUM"
printf "The downloaded file may be corrupted or tampered with\n"
cd - >/dev/null
cd - > /dev/null
rm -rf "$TEMP_DIR"
exit 1
fi
@@ -82,5 +82,5 @@ printf "%bRunning installer...%b\n" "$GREEN" "$NC"
./installer
# Cleanup
cd - >/dev/null
rm -rf "$TEMP_DIR"
cd - > /dev/null
rm -rf "$TEMP_DIR"

View File

@@ -1,332 +0,0 @@
package clipboard
import (
"fmt"
"io"
"os"
"os/exec"
"syscall"
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
)
func Copy(data []byte, mimeType string) error {
return CopyOpts(data, mimeType, false, false)
}
func CopyOpts(data []byte, mimeType string, foreground, pasteOnce bool) error {
if !foreground {
return copyFork(data, mimeType, pasteOnce)
}
return copyServe(data, mimeType, pasteOnce)
}
func copyFork(data []byte, mimeType string, pasteOnce bool) error {
args := []string{os.Args[0], "cl", "copy", "--foreground"}
if pasteOnce {
args = append(args, "--paste-once")
}
args = append(args, "--type", mimeType)
cmd := exec.Command(args[0], args[1:]...)
cmd.Stdin = nil
cmd.Stdout = nil
cmd.Stderr = nil
cmd.SysProcAttr = &syscall.SysProcAttr{Setsid: true}
stdin, err := cmd.StdinPipe()
if err != nil {
return fmt.Errorf("stdin pipe: %w", err)
}
if err := cmd.Start(); err != nil {
return fmt.Errorf("start: %w", err)
}
if _, err := stdin.Write(data); err != nil {
stdin.Close()
return fmt.Errorf("write stdin: %w", err)
}
stdin.Close()
return nil
}
func copyServe(data []byte, mimeType string, pasteOnce bool) error {
display, err := wlclient.Connect("")
if err != nil {
return fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
ctx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
source, err := dataControlMgr.CreateDataSource()
if err != nil {
return fmt.Errorf("create data source: %w", err)
}
if err := source.Offer(mimeType); err != nil {
return fmt.Errorf("offer mime type: %w", err)
}
if mimeType == "text/plain;charset=utf-8" || mimeType == "text/plain" {
if err := source.Offer("text/plain"); err != nil {
return fmt.Errorf("offer text/plain: %w", err)
}
if err := source.Offer("text/plain;charset=utf-8"); err != nil {
return fmt.Errorf("offer text/plain;charset=utf-8: %w", err)
}
if err := source.Offer("UTF8_STRING"); err != nil {
return fmt.Errorf("offer UTF8_STRING: %w", err)
}
if err := source.Offer("STRING"); err != nil {
return fmt.Errorf("offer STRING: %w", err)
}
if err := source.Offer("TEXT"); err != nil {
return fmt.Errorf("offer TEXT: %w", err)
}
}
cancelled := make(chan struct{})
pasted := make(chan struct{}, 1)
source.SetSendHandler(func(e ext_data_control.ExtDataControlSourceV1SendEvent) {
defer syscall.Close(e.Fd)
file := os.NewFile(uintptr(e.Fd), "pipe")
defer file.Close()
file.Write(data)
select {
case pasted <- struct{}{}:
default:
}
})
source.SetCancelledHandler(func(e ext_data_control.ExtDataControlSourceV1CancelledEvent) {
close(cancelled)
})
if err := device.SetSelection(source); err != nil {
return fmt.Errorf("set selection: %w", err)
}
display.Roundtrip()
for {
select {
case <-cancelled:
return nil
case <-pasted:
if pasteOnce {
return nil
}
default:
if err := ctx.Dispatch(); err != nil {
return nil
}
}
}
}
func CopyText(text string) error {
return Copy([]byte(text), "text/plain;charset=utf-8")
}
func Paste() ([]byte, string, error) {
display, err := wlclient.Connect("")
if err != nil {
return nil, "", fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
ctx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return nil, "", fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(ctx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return nil, "", fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return nil, "", fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return nil, "", fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return nil, "", fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
offerMimeTypes := make(map[*ext_data_control.ExtDataControlOfferV1][]string)
device.SetDataOfferHandler(func(e ext_data_control.ExtDataControlDeviceV1DataOfferEvent) {
if e.Id == nil {
return
}
offerMimeTypes[e.Id] = nil
e.Id.SetOfferHandler(func(me ext_data_control.ExtDataControlOfferV1OfferEvent) {
offerMimeTypes[e.Id] = append(offerMimeTypes[e.Id], me.MimeType)
})
})
var selectionOffer *ext_data_control.ExtDataControlOfferV1
gotSelection := false
device.SetSelectionHandler(func(e ext_data_control.ExtDataControlDeviceV1SelectionEvent) {
selectionOffer = e.Id
gotSelection = true
})
display.Roundtrip()
display.Roundtrip()
if !gotSelection || selectionOffer == nil {
return nil, "", fmt.Errorf("no clipboard data")
}
mimeTypes := offerMimeTypes[selectionOffer]
selectedMime := selectPreferredMimeType(mimeTypes)
if selectedMime == "" {
return nil, "", fmt.Errorf("no supported mime type")
}
r, w, err := os.Pipe()
if err != nil {
return nil, "", fmt.Errorf("create pipe: %w", err)
}
defer r.Close()
if err := selectionOffer.Receive(selectedMime, int(w.Fd())); err != nil {
w.Close()
return nil, "", fmt.Errorf("receive: %w", err)
}
w.Close()
display.Roundtrip()
data, err := io.ReadAll(r)
if err != nil {
return nil, "", fmt.Errorf("read: %w", err)
}
return data, selectedMime, nil
}
func PasteText() (string, error) {
data, _, err := Paste()
if err != nil {
return "", err
}
return string(data), nil
}
func selectPreferredMimeType(mimes []string) string {
preferred := []string{
"text/plain;charset=utf-8",
"text/plain",
"UTF8_STRING",
"STRING",
"TEXT",
"image/png",
"image/jpeg",
}
for _, pref := range preferred {
for _, mime := range mimes {
if mime == pref {
return mime
}
}
}
if len(mimes) > 0 {
return mimes[0]
}
return ""
}
func IsImageMimeType(mime string) bool {
return len(mime) > 6 && mime[:6] == "image/"
}

View File

@@ -1,229 +0,0 @@
package clipboard
import (
"bytes"
"encoding/binary"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"os"
"path/filepath"
"strings"
"time"
_ "golang.org/x/image/bmp"
_ "golang.org/x/image/tiff"
"hash/fnv"
bolt "go.etcd.io/bbolt"
)
type StoreConfig struct {
MaxHistory int
MaxEntrySize int64
}
func DefaultStoreConfig() StoreConfig {
return StoreConfig{
MaxHistory: 100,
MaxEntrySize: 5 * 1024 * 1024,
}
}
type Entry struct {
ID uint64
Data []byte
MimeType string
Preview string
Size int
Timestamp time.Time
IsImage bool
Hash uint64
}
func Store(data []byte, mimeType string) error {
return StoreWithConfig(data, mimeType, DefaultStoreConfig())
}
func StoreWithConfig(data []byte, mimeType string, cfg StoreConfig) error {
if len(data) == 0 {
return nil
}
if int64(len(data)) > cfg.MaxEntrySize {
return fmt.Errorf("data too large: %d > %d", len(data), cfg.MaxEntrySize)
}
dbPath, err := getDBPath()
if err != nil {
return fmt.Errorf("get db path: %w", err)
}
db, err := bolt.Open(dbPath, 0644, &bolt.Options{Timeout: 1 * time.Second})
if err != nil {
return fmt.Errorf("open db: %w", err)
}
defer db.Close()
entry := Entry{
Data: data,
MimeType: mimeType,
Size: len(data),
Timestamp: time.Now(),
IsImage: IsImageMimeType(mimeType),
Hash: computeHash(data),
}
switch {
case entry.IsImage:
entry.Preview = imagePreview(data, mimeType)
default:
entry.Preview = textPreview(data)
}
return db.Update(func(tx *bolt.Tx) error {
b, err := tx.CreateBucketIfNotExists([]byte("clipboard"))
if err != nil {
return err
}
if err := deduplicateInTx(b, entry.Hash); err != nil {
return err
}
id, err := b.NextSequence()
if err != nil {
return err
}
entry.ID = id
encoded, err := encodeEntry(entry)
if err != nil {
return err
}
if err := b.Put(itob(id), encoded); err != nil {
return err
}
return trimLengthInTx(b, cfg.MaxHistory)
})
}
func getDBPath() (string, error) {
cacheDir, err := os.UserCacheDir()
if err != nil {
homeDir, err := os.UserHomeDir()
if err != nil {
return "", err
}
cacheDir = filepath.Join(homeDir, ".cache")
}
dbDir := filepath.Join(cacheDir, "dms-clipboard")
if err := os.MkdirAll(dbDir, 0700); err != nil {
return "", err
}
return filepath.Join(dbDir, "db"), nil
}
func deduplicateInTx(b *bolt.Bucket, hash uint64) error {
c := b.Cursor()
for k, v := c.Last(); k != nil; k, v = c.Prev() {
if extractHash(v) != hash {
continue
}
if err := b.Delete(k); err != nil {
return err
}
}
return nil
}
func trimLengthInTx(b *bolt.Bucket, maxHistory int) error {
c := b.Cursor()
var count int
for k, _ := c.Last(); k != nil; k, _ = c.Prev() {
if count < maxHistory {
count++
continue
}
if err := b.Delete(k); err != nil {
return err
}
}
return nil
}
func encodeEntry(e Entry) ([]byte, error) {
buf := new(bytes.Buffer)
binary.Write(buf, binary.BigEndian, e.ID)
binary.Write(buf, binary.BigEndian, uint32(len(e.Data)))
buf.Write(e.Data)
binary.Write(buf, binary.BigEndian, uint32(len(e.MimeType)))
buf.WriteString(e.MimeType)
binary.Write(buf, binary.BigEndian, uint32(len(e.Preview)))
buf.WriteString(e.Preview)
binary.Write(buf, binary.BigEndian, int32(e.Size))
binary.Write(buf, binary.BigEndian, e.Timestamp.Unix())
if e.IsImage {
buf.WriteByte(1)
} else {
buf.WriteByte(0)
}
binary.Write(buf, binary.BigEndian, e.Hash)
return buf.Bytes(), nil
}
func itob(v uint64) []byte {
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, v)
return b
}
func computeHash(data []byte) uint64 {
h := fnv.New64a()
h.Write(data)
return h.Sum64()
}
func extractHash(data []byte) uint64 {
if len(data) < 8 {
return 0
}
return binary.BigEndian.Uint64(data[len(data)-8:])
}
func textPreview(data []byte) string {
text := string(data)
text = strings.TrimSpace(text)
text = strings.Join(strings.Fields(text), " ")
if len(text) > 100 {
return text[:100] + "…"
}
return text
}
func imagePreview(data []byte, format string) string {
config, imgFmt, err := image.DecodeConfig(bytes.NewReader(data))
if err != nil {
return fmt.Sprintf("[[ image %s %s ]]", sizeStr(len(data)), format)
}
return fmt.Sprintf("[[ image %s %s %dx%d ]]", sizeStr(len(data)), imgFmt, config.Width, config.Height)
}
func sizeStr(size int) string {
units := []string{"B", "KiB", "MiB"}
var i int
fsize := float64(size)
for fsize >= 1024 && i < len(units)-1 {
fsize /= 1024
i++
}
return fmt.Sprintf("%.0f %s", fsize, units[i])
}

View File

@@ -1,160 +0,0 @@
package clipboard
import (
"context"
"fmt"
"io"
"os"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/proto/ext_data_control"
wlclient "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
)
type ClipboardChange struct {
Data []byte
MimeType string
}
func Watch(ctx context.Context, callback func(data []byte, mimeType string)) error {
display, err := wlclient.Connect("")
if err != nil {
return fmt.Errorf("wayland connect: %w", err)
}
defer display.Destroy()
wlCtx := display.Context()
registry, err := display.GetRegistry()
if err != nil {
return fmt.Errorf("get registry: %w", err)
}
defer registry.Destroy()
var dataControlMgr *ext_data_control.ExtDataControlManagerV1
var seat *wlclient.Seat
var bindErr error
registry.SetGlobalHandler(func(e wlclient.RegistryGlobalEvent) {
switch e.Interface {
case "ext_data_control_manager_v1":
dataControlMgr = ext_data_control.NewExtDataControlManagerV1(wlCtx)
if err := registry.Bind(e.Name, e.Interface, e.Version, dataControlMgr); err != nil {
bindErr = err
}
case "wl_seat":
if seat != nil {
return
}
seat = wlclient.NewSeat(wlCtx)
if err := registry.Bind(e.Name, e.Interface, e.Version, seat); err != nil {
bindErr = err
}
}
})
display.Roundtrip()
display.Roundtrip()
if bindErr != nil {
return fmt.Errorf("registry bind: %w", bindErr)
}
if dataControlMgr == nil {
return fmt.Errorf("compositor does not support ext_data_control_manager_v1")
}
defer dataControlMgr.Destroy()
if seat == nil {
return fmt.Errorf("no seat available")
}
device, err := dataControlMgr.GetDataDevice(seat)
if err != nil {
return fmt.Errorf("get data device: %w", err)
}
defer device.Destroy()
offerMimeTypes := make(map[*ext_data_control.ExtDataControlOfferV1][]string)
device.SetDataOfferHandler(func(e ext_data_control.ExtDataControlDeviceV1DataOfferEvent) {
if e.Id == nil {
return
}
offerMimeTypes[e.Id] = nil
e.Id.SetOfferHandler(func(me ext_data_control.ExtDataControlOfferV1OfferEvent) {
offerMimeTypes[e.Id] = append(offerMimeTypes[e.Id], me.MimeType)
})
})
device.SetSelectionHandler(func(e ext_data_control.ExtDataControlDeviceV1SelectionEvent) {
if e.Id == nil {
return
}
mimes := offerMimeTypes[e.Id]
selectedMime := selectPreferredMimeType(mimes)
if selectedMime == "" {
return
}
r, w, err := os.Pipe()
if err != nil {
return
}
if err := e.Id.Receive(selectedMime, int(w.Fd())); err != nil {
w.Close()
r.Close()
return
}
w.Close()
go func() {
defer r.Close()
data, err := io.ReadAll(r)
if err != nil || len(data) == 0 {
return
}
callback(data, selectedMime)
}()
})
display.Roundtrip()
display.Roundtrip()
for {
select {
case <-ctx.Done():
return ctx.Err()
default:
if err := wlCtx.SetReadDeadline(time.Now().Add(100 * time.Millisecond)); err != nil {
return fmt.Errorf("set read deadline: %w", err)
}
if err := wlCtx.Dispatch(); err != nil && err != os.ErrDeadlineExceeded {
return fmt.Errorf("dispatch: %w", err)
}
}
}
}
func WatchChan(ctx context.Context) (<-chan ClipboardChange, <-chan error) {
ch := make(chan ClipboardChange, 16)
errCh := make(chan error, 1)
go func() {
defer close(ch)
err := Watch(ctx, func(data []byte, mimeType string) {
select {
case ch <- ClipboardChange{Data: data, MimeType: mimeType}:
default:
}
})
if err != nil && err != context.Canceled {
errCh <- err
}
close(errCh)
}()
time.Sleep(50 * time.Millisecond)
return ch, errCh
}

View File

@@ -221,7 +221,10 @@ func (p *Picker) handleGlobal(e client.RegistryGlobalEvent) {
case client.OutputInterfaceName:
output := client.NewOutput(p.ctx)
version := min(e.Version, 4)
version := e.Version
if version > 4 {
version = 4
}
if err := p.registry.Bind(e.Name, e.Interface, version, output); err == nil {
p.outputsMu.Lock()
p.outputs[e.Name] = &Output{
@@ -236,14 +239,20 @@ func (p *Picker) handleGlobal(e client.RegistryGlobalEvent) {
case wlr_layer_shell.ZwlrLayerShellV1InterfaceName:
layerShell := wlr_layer_shell.NewZwlrLayerShellV1(p.ctx)
version := min(e.Version, 4)
version := e.Version
if version > 4 {
version = 4
}
if err := p.registry.Bind(e.Name, e.Interface, version, layerShell); err == nil {
p.layerShell = layerShell
}
case wlr_screencopy.ZwlrScreencopyManagerV1InterfaceName:
screencopy := wlr_screencopy.NewZwlrScreencopyManagerV1(p.ctx)
version := min(e.Version, 3)
version := e.Version
if version > 3 {
version = 3
}
if err := p.registry.Bind(e.Name, e.Interface, version, screencopy); err == nil {
p.screencopy = screencopy
}

View File

@@ -1157,7 +1157,7 @@ func drawGlyph(data []byte, stride, width, height, x, y int, r rune, col Color,
rOff, bOff = 2, 0
}
for row := range fontH {
for row := 0; row < fontH; row++ {
yy := y + row
if yy < 0 || yy >= height {
continue
@@ -1165,7 +1165,7 @@ func drawGlyph(data []byte, stride, width, height, x, y int, r rune, col Color,
rowPattern := g[row]
dstRowOff := yy * stride
for colIdx := range fontW {
for colIdx := 0; colIdx < fontW; colIdx++ {
if (rowPattern & (1 << (fontW - 1 - colIdx))) == 0 {
continue
}

View File

@@ -14,11 +14,11 @@ func TestSurfaceState_ConcurrentPointerMotion(t *testing.T) {
const goroutines = 50
const iterations = 100
for i := range goroutines {
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := range iterations {
for j := 0; j < iterations; j++ {
s.OnPointerMotion(float64(id*10+j), float64(id*10+j))
}
}(i)
@@ -34,21 +34,21 @@ func TestSurfaceState_ConcurrentScaleAccess(t *testing.T) {
const goroutines = 30
const iterations = 100
for i := range goroutines / 2 {
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
s.SetScale(int32(id%3 + 1))
}
}(i)
}
for range goroutines / 2 {
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
scale := s.Scale()
assert.GreaterOrEqual(t, scale, int32(1))
}
@@ -65,21 +65,21 @@ func TestSurfaceState_ConcurrentLogicalSize(t *testing.T) {
const goroutines = 20
const iterations = 100
for i := range goroutines / 2 {
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := range iterations {
for j := 0; j < iterations; j++ {
_ = s.OnLayerConfigure(1920+id, 1080+j)
}
}(i)
}
for range goroutines / 2 {
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
w, h := s.LogicalSize()
_ = w
_ = h
@@ -97,31 +97,31 @@ func TestSurfaceState_ConcurrentIsDone(t *testing.T) {
const goroutines = 30
const iterations = 100
for range goroutines / 3 {
for i := 0; i < goroutines/3; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
s.OnPointerButton(0x110, 1)
}
}()
}
for range goroutines / 3 {
for i := 0; i < goroutines/3; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
s.OnKey(1, 1)
}
}()
}
for range goroutines / 3 {
for i := 0; i < goroutines/3; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
picked, cancelled := s.IsDone()
_ = picked
_ = cancelled
@@ -139,11 +139,11 @@ func TestSurfaceState_ConcurrentIsReady(t *testing.T) {
const goroutines = 20
const iterations = 100
for range goroutines {
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
_ = s.IsReady()
}
}()
@@ -159,11 +159,11 @@ func TestSurfaceState_ConcurrentSwapBuffers(t *testing.T) {
const goroutines = 20
const iterations = 100
for range goroutines {
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for range iterations {
for j := 0; j < iterations; j++ {
s.SwapBuffers()
}
}()

View File

@@ -213,11 +213,6 @@ func (cd *ConfigDeployer) deployNiriDmsConfigs(dmsDir, terminalCommand string) e
for _, cfg := range configs {
path := filepath.Join(dmsDir, cfg.name)
// Skip if file already exists to preserve user modifications
if _, err := os.Stat(path); err == nil {
cd.log(fmt.Sprintf("Skipping %s (already exists)", cfg.name))
continue
}
if err := os.WriteFile(path, []byte(cfg.content), 0644); err != nil {
return fmt.Errorf("failed to write %s: %w", cfg.name, err)
}
@@ -270,13 +265,7 @@ func (cd *ConfigDeployer) deployGhosttyConfig() ([]DeploymentResult, error) {
colorResult := DeploymentResult{
ConfigType: "Ghostty Colors",
Path: filepath.Join(os.Getenv("HOME"), ".config", "ghostty", "themes", "dankcolors"),
}
themesDir := filepath.Dir(colorResult.Path)
if err := os.MkdirAll(themesDir, 0755); err != nil {
mainResult.Error = fmt.Errorf("failed to create themes directory: %w", err)
return []DeploymentResult{mainResult}, mainResult.Error
Path: filepath.Join(os.Getenv("HOME"), ".config", "ghostty", "config-dankcolors"),
}
if err := os.WriteFile(colorResult.Path, []byte(GhosttyColorConfig), 0644); err != nil {
@@ -626,11 +615,10 @@ func (cd *ConfigDeployer) transformNiriConfigForNonSystemd(config, terminalComma
spawnDms := `spawn-at-startup "dms" "run"`
if !strings.Contains(config, spawnDms) {
// Insert spawn-at-startup for dms after the environment block
envBlockEnd := regexp.MustCompile(`environment \{[^}]*\}`)
if loc := envBlockEnd.FindStringIndex(config); loc != nil {
config = config[:loc[1]] + "\n" + spawnDms + config[loc[1]:]
}
config = strings.Replace(config,
`spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"`,
`spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"`+"\n"+spawnDms,
1)
}
return config

View File

@@ -462,13 +462,13 @@ func TestHyprlandConfigStructure(t *testing.T) {
assert.Contains(t, HyprlandConfig, "# KEYBINDINGS")
assert.Contains(t, HyprlandConfig, "bind = $mod, T, exec, {{TERMINAL_COMMAND}}")
assert.Contains(t, HyprlandConfig, "bind = $mod, space, exec, dms ipc call spotlight toggle")
assert.Contains(t, HyprlandConfig, "windowrule = border_size 0, match:class ^(com\\.mitchellh\\.ghostty)$")
assert.Contains(t, HyprlandConfig, "windowrulev2 = noborder, class:^(com\\.mitchellh\\.ghostty)$")
}
func TestGhosttyConfigStructure(t *testing.T) {
assert.Contains(t, GhosttyConfig, "window-decoration = false")
assert.Contains(t, GhosttyConfig, "background-opacity = 1.0")
assert.Contains(t, GhosttyConfig, "theme = dankcolors")
assert.Contains(t, GhosttyConfig, "config-file = ./config-dankcolors")
}
func TestGhosttyColorConfigStructure(t *testing.T) {

View File

@@ -5,13 +5,15 @@ import (
"os"
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
func LocateDMSConfig() (string, error) {
var primaryPaths []string
configHome, err := os.UserConfigDir()
if err == nil && configHome != "" {
configHome := utils.XDGConfigHome()
if configHome != "" {
primaryPaths = append(primaryPaths, filepath.Join(configHome, "quickshell", "dms"))
}
@@ -21,7 +23,7 @@ func LocateDMSConfig() (string, error) {
dataDirs = "/usr/local/share:/usr/share"
}
for dir := range strings.SplitSeq(dataDirs, ":") {
for _, dir := range strings.Split(dataDirs, ":") {
if dir != "" {
primaryPaths = append(primaryPaths, filepath.Join(dir, "quickshell", "dms"))
}
@@ -33,7 +35,7 @@ func LocateDMSConfig() (string, error) {
configDirs = "/etc/xdg"
}
for dir := range strings.SplitSeq(configDirs, ":") {
for _, dir := range strings.Split(configDirs, ":") {
if dir != "" {
primaryPaths = append(primaryPaths, filepath.Join(dir, "quickshell", "dms"))
}

View File

@@ -48,4 +48,4 @@ keybind = shift+enter=text:\n
gtk-single-instance = true
# Dank color generation
theme = dankcolors
config-file = ./config-dankcolors

View File

@@ -12,6 +12,7 @@ monitor = , preferred,auto,auto
# ==================
exec-once = dbus-update-activation-environment --systemd --all
exec-once = systemctl --user start hyprland-session.target
exec-once = bash -c "wl-paste --watch cliphist store &"
# ==================
# INPUT CONFIG
@@ -90,36 +91,36 @@ misc {
# ==================
# WINDOW RULES
# ==================
windowrule = tile on, match:class ^(org\.wezfurlong\.wezterm)$
windowrulev2 = tile, class:^(org\.wezfurlong\.wezterm)$
windowrule = rounding 12, match:class ^(org\.gnome\.)
windowrule = border_size 0, match:class ^(org\.gnome\.)
windowrulev2 = rounding 12, class:^(org\.gnome\.)
windowrulev2 = noborder, class:^(org\.gnome\.)
windowrule = tile on, match:class ^(gnome-control-center)$
windowrule = tile on, match:class ^(pavucontrol)$
windowrule = tile on, match:class ^(nm-connection-editor)$
windowrulev2 = tile, class:^(gnome-control-center)$
windowrulev2 = tile, class:^(pavucontrol)$
windowrulev2 = tile, class:^(nm-connection-editor)$
windowrule = float on, match:class ^(gnome-calculator)$
windowrule = float on, match:class ^(galculator)$
windowrule = float on, match:class ^(blueman-manager)$
windowrule = float on, match:class ^(org\.gnome\.Nautilus)$
windowrule = float on, match:class ^(steam)$
windowrule = float on, match:class ^(xdg-desktop-portal)$
windowrulev2 = float, class:^(gnome-calculator)$
windowrulev2 = float, class:^(galculator)$
windowrulev2 = float, class:^(blueman-manager)$
windowrulev2 = float, class:^(org\.gnome\.Nautilus)$
windowrulev2 = float, class:^(steam)$
windowrulev2 = float, class:^(xdg-desktop-portal)$
windowrule = border_size 0, match:class ^(org\.wezfurlong\.wezterm)$
windowrule = border_size 0, match:class ^(Alacritty)$
windowrule = border_size 0, match:class ^(zen)$
windowrule = border_size 0, match:class ^(com\.mitchellh\.ghostty)$
windowrule = border_size 0, match:class ^(kitty)$
windowrulev2 = noborder, class:^(org\.wezfurlong\.wezterm)$
windowrulev2 = noborder, class:^(Alacritty)$
windowrulev2 = noborder, class:^(zen)$
windowrulev2 = noborder, class:^(com\.mitchellh\.ghostty)$
windowrulev2 = noborder, class:^(kitty)$
windowrule = float on, match:class ^(firefox)$, match:title ^(Picture-in-Picture)$
windowrule = float on, match:class ^(zoom)$
windowrulev2 = float, class:^(firefox)$, title:^(Picture-in-Picture)$
windowrulev2 = float, class:^(zoom)$
# DMS windows floating by default
windowrule = float on, match:class ^(org.quickshell)$
windowrule = opacity 0.9 0.9, match:float false, match:focus false
windowrulev2 = float, class:^(org.quickshell)$
windowrulev2 = opacity 0.9 0.9, floating:0, focus:0
layerrule = no_anim on, match:namespace ^(quickshell)$
layerrule = noanim, ^(quickshell)$
# ==================
# KEYBINDINGS
@@ -150,10 +151,6 @@ bindel = , XF86AudioRaiseVolume, exec, dms ipc call audio increment 3
bindel = , XF86AudioLowerVolume, exec, dms ipc call audio decrement 3
bindl = , XF86AudioMute, exec, dms ipc call audio mute
bindl = , XF86AudioMicMute, exec, dms ipc call audio micmute
bindl = , XF86AudioPause, exec, dms ipc call mpris playPause
bindl = , XF86AudioPlay, exec, dms ipc call mpris playPause
bindl = , XF86AudioPrev, exec, dms ipc call mpris previous
bindl = , XF86AudioNext, exec, dms ipc call mpris next
# === Brightness Controls ===
bindel = , XF86MonBrightnessUp, exec, dms ipc call brightness increment 5 ""

View File

@@ -1,8 +1,3 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
recent-windows {
highlight {
corner-radius 12

View File

@@ -1,8 +1,3 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
binds {
// === System & Overview ===
Mod+D repeat=false { toggle-overview; }
@@ -51,18 +46,6 @@ binds {
XF86AudioMicMute allow-when-locked=true {
spawn "dms" "ipc" "call" "audio" "micmute";
}
XF86AudioPause allow-when-locked=true {
spawn "dms" "ipc" "call" "mpris" "playPause";
}
XF86AudioPlay allow-when-locked=true {
spawn "dms" "ipc" "call" "mpris" "playPause";
}
XF86AudioPrev allow-when-locked=true {
spawn "dms" "ipc" "call" "mpris" "previous";
}
XF86AudioNext allow-when-locked=true {
spawn "dms" "ipc" "call" "mpris" "next";
}
// === Brightness Controls ===
XF86MonBrightnessUp allow-when-locked=true {
@@ -209,4 +192,4 @@ binds {
// === System Controls ===
Mod+Escape allow-inhibiting=false { toggle-keyboard-shortcuts-inhibit; }
Mod+Shift+P { power-off-monitors; }
}
}

View File

@@ -1,8 +1,3 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
layout {
background-color "transparent"
@@ -38,4 +33,4 @@ recent-windows {
active-color "#124a73"
urgent-color "#ffb4ab"
}
}
}

View File

@@ -1,8 +1,3 @@
// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
layout {
gaps 4

View File

@@ -18,64 +18,15 @@ gestures {
input {
keyboard {
xkb {
// You can set rules, model, layout, variant and options.
// For more information, see xkeyboard-config(7).
// For example:
// layout "us,ru"
// options "grp:win_space_toggle,compose:ralt,ctrl:nocaps"
// If this section is empty, niri will fetch xkb settings
// from org.freedesktop.locale1. You can control these using
// localectl set-x11-keymap.
}
// Enable numlock on startup, omitting this setting disables it.
numlock
}
// Next sections include libinput settings.
// Omitting settings disables them, or leaves them at their default values.
// All commented-out settings here are examples, not defaults.
touchpad {
// off
tap
// dwt
// dwtp
// drag false
// drag-lock
natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "two-finger"
// disabled-on-external-mouse
}
mouse {
// off
// natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "no-scroll"
}
trackpoint {
// off
// natural-scroll
// accel-speed 0.2
// accel-profile "flat"
// scroll-method "on-button-down"
// scroll-button 273
// scroll-button-lock
// middle-emulation
}
// Uncomment this to make the mouse warp to the center of newly focused windows.
// warp-mouse-to-focus
// Focus windows and outputs automatically when moving the mouse into them.
// Setting max-scroll-amount="0%" makes it work only on windows already fully on screen.
// focus-follows-mouse max-scroll-amount="0%"
}
// You can configure outputs by their name, which you can find
// by running `niri msg outputs` while inside a niri instance.
@@ -158,6 +109,7 @@ overview {
// which may be more convenient to use.
// See the binds section below for more spawn examples.
// This line starts waybar, a commonly used bar for Wayland compositors.
spawn-at-startup "bash" "-c" "wl-paste --watch cliphist store &"
environment {
XDG_CURRENT_DESKTOP "niri"
}

View File

@@ -345,7 +345,7 @@ func EnsureContrastDPSLstar(hexColor, hexBg string, minLc float64, isLightMode b
}
step := 0.5
for range 120 {
for i := 0; i < 120; i++ {
Lf = math.Max(0, math.Min(100, Lf+dir*step))
cand := labToHex(Lf, af, bf)
if DeltaPhiStarContrast(cand, hexBg, isLightMode) >= minLc {

View File

@@ -658,7 +658,7 @@ func TestContrastAlgorithmComparison(t *testing.T) {
}
differentCount := 0
for i := range 16 {
for i := 0; i < 16; i++ {
if wcagColors[i].Hex != dpsColors[i].Hex {
differentCount++
}

View File

@@ -112,24 +112,3 @@ func GenerateWeztermTheme(p Palette) string {
p.Color12.Hex, p.Color13.Hex, p.Color14.Hex, p.Color15.Hex)
return result.String()
}
func GenerateNeovimTheme(p Palette) string {
var result strings.Builder
fmt.Fprintf(&result, "vim.g.terminal_color_0 = \"%s\"\n", p.Color0.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_1 = \"%s\"\n", p.Color1.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_2 = \"%s\"\n", p.Color2.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_3 = \"%s\"\n", p.Color3.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_4 = \"%s\"\n", p.Color4.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_5 = \"%s\"\n", p.Color5.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_6 = \"%s\"\n", p.Color6.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_7 = \"%s\"\n", p.Color7.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_8 = \"%s\"\n", p.Color8.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_9 = \"%s\"\n", p.Color9.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_10 = \"%s\"\n", p.Color10.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_11 = \"%s\"\n", p.Color11.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_12 = \"%s\"\n", p.Color12.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_13 = \"%s\"\n", p.Color13.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_14 = \"%s\"\n", p.Color14.Hex)
fmt.Fprintf(&result, "vim.g.terminal_color_15 = \"%s\"\n", p.Color15.Hex)
return result.String()
}

View File

@@ -7,7 +7,6 @@ import (
"os/exec"
"path/filepath"
"runtime"
"slices"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
@@ -104,8 +103,10 @@ func (a *ArchDistribution) DetectDependenciesWithTerminal(ctx context.Context, w
dependencies = append(dependencies, a.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, a.detectMatugen())
dependencies = append(dependencies, a.detectDgop())
dependencies = append(dependencies, a.detectClipboardTools()...)
return dependencies, nil
}
@@ -138,6 +139,8 @@ func (a *ArchDistribution) GetPackageMappingWithVariants(wm deps.WindowManager,
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"cliphist": {Name: "cliphist", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
}
@@ -515,9 +518,12 @@ func (a *ArchDistribution) reorderAURPackages(packages []string) []string {
dmsShell = append(dmsShell, pkg)
} else {
isDep := false
if slices.Contains(dmsDepencies, pkg) {
deps = append(deps, pkg)
isDep = true
for _, dep := range dmsDepencies {
if pkg == dep {
deps = append(deps, pkg)
isDep = true
break
}
}
if !isDep {
others = append(others, pkg)
@@ -543,7 +549,7 @@ func (a *ArchDistribution) installSingleAURPackage(ctx context.Context, pkg, sud
a.log(fmt.Sprintf("Warning: failed to clean existing cache for %s: %v", pkg, err))
}
if err := os.MkdirAll(buildDir, 0o755); err != nil {
if err := os.MkdirAll(buildDir, 0755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
defer func() {

View File

@@ -14,6 +14,7 @@ import (
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/AvengeMedia/DankMaterialShell/core/internal/version"
)
@@ -185,6 +186,37 @@ func (b *BaseDistribution) detectSpecificTerminal(terminal deps.Terminal) deps.D
}
}
func (b *BaseDistribution) detectClipboardTools() []deps.Dependency {
var dependencies []deps.Dependency
cliphist := deps.StatusMissing
if b.commandExists("cliphist") {
cliphist = deps.StatusInstalled
}
wlClipboard := deps.StatusMissing
if b.commandExists("wl-copy") && b.commandExists("wl-paste") {
wlClipboard = deps.StatusInstalled
}
dependencies = append(dependencies,
deps.Dependency{
Name: "cliphist",
Status: cliphist,
Description: "Wayland clipboard manager",
Required: true,
},
deps.Dependency{
Name: "wl-clipboard",
Status: wlClipboard,
Description: "Wayland clipboard utilities",
Required: true,
},
)
return dependencies
}
func (b *BaseDistribution) detectHyprlandTools() []deps.Dependency {
var dependencies []deps.Dependency
@@ -550,9 +582,20 @@ func (b *BaseDistribution) WriteEnvironmentConfig(terminal deps.Terminal) error
terminalCmd = "ghostty"
}
content := fmt.Sprintf(`ELECTRON_OZONE_PLATFORM_HINT=auto
// ! This deviates from master branch so it doesnt need a hotfix
var content string
if utils.CommandExists("plasmashell") || utils.CommandExists("plasma-session") || utils.CommandExists("plasma_session") {
content = fmt.Sprintf(`ELECTRON_OZONE_PLATFORM_HINT=auto
TERMINAL=%s
`, terminalCmd)
} else {
content = fmt.Sprintf(`QT_QPA_PLATFORM=wayland
ELECTRON_OZONE_PLATFORM_HINT=auto
QT_QPA_PLATFORMTHEME=gtk3
QT_QPA_PLATFORMTHEME_QT6=gtk3
TERMINAL=%s
`, terminalCmd)
}
envFile := filepath.Join(envDir, "90-dms.conf")
if err := os.WriteFile(envFile, []byte(content), 0644); err != nil {

View File

@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"os/exec"
"runtime"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/deps"
@@ -70,6 +69,7 @@ func (d *DebianDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, d.detectMatugen())
dependencies = append(dependencies, d.detectDgop())
dependencies = append(dependencies, d.detectClipboardTools()...)
return dependencies, nil
}
@@ -102,6 +102,7 @@ func (d *DebianDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "git", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
@@ -110,6 +111,7 @@ func (d *DebianDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": d.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"dgop": {Name: "dgop", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
"ghostty": {Name: "ghostty", Repository: RepoTypeOBS, RepoURL: "home:AvengeMedia:danklinux"},
}
@@ -385,8 +387,6 @@ func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Packa
debianVersion := "Debian_13"
if osInfo.VersionID == "testing" {
debianVersion = "Debian_Testing"
} else if osInfo.VersionCodename == "sid" || osInfo.VersionID == "sid" || strings.Contains(strings.ToLower(osInfo.PrettyName), "sid") || strings.Contains(strings.ToLower(osInfo.PrettyName), "unstable") {
debianVersion = "Debian_Unstable"
}
for _, pkg := range obsPkgs {
@@ -430,7 +430,7 @@ func (d *DebianDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Packa
}
// Add repository
repoLine := fmt.Sprintf("deb [signed-by=%s, arch=%s] %s/ /", keyringPath, runtime.GOARCH, baseURL)
repoLine := fmt.Sprintf("deb [signed-by=%s] %s/ /", keyringPath, baseURL)
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
@@ -549,7 +549,7 @@ func (d *DebianDistribution) installBuildDependencies(ctx context.Context, manua
if err := d.installRust(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Rust: %w", err)
}
case "dgop":
case "cliphist", "dgop":
if err := d.installGo(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Go: %w", err)
}

View File

@@ -88,8 +88,10 @@ func (f *FedoraDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, f.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, f.detectMatugen())
dependencies = append(dependencies, f.detectDgop())
dependencies = append(dependencies, f.detectClipboardTools()...)
return dependencies, nil
}
@@ -115,12 +117,14 @@ func (f *FedoraDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"ghostty": {Name: "ghostty", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
// COPR packages
"quickshell": f.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
"dms (DankMaterialShell)": f.getDmsMapping(variants["dms (DankMaterialShell)"]),
"dgop": {Name: "dgop", Repository: RepoTypeCOPR, RepoURL: "avengemedia/danklinux"},
}

View File

@@ -107,6 +107,7 @@ func (g *GentooDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, g.detectMatugen())
dependencies = append(dependencies, g.detectDgop())
dependencies = append(dependencies, g.detectClipboardTools()...)
return dependencies, nil
}
@@ -139,6 +140,7 @@ func (g *GentooDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "dev-vcs/git", Repository: RepoTypeSystem},
"kitty": {Name: "x11-terms/kitty", Repository: RepoTypeSystem, UseFlags: "X wayland"},
"alacritty": {Name: "x11-terms/alacritty", Repository: RepoTypeSystem, UseFlags: "X wayland"},
"wl-clipboard": {Name: "gui-apps/wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "sys-apps/xdg-desktop-portal-gtk", Repository: RepoTypeSystem, UseFlags: "wayland X"},
"accountsservice": {Name: "sys-apps/accountsservice", Repository: RepoTypeSystem},
@@ -149,6 +151,7 @@ func (g *GentooDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": g.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "x11-misc/matugen", Repository: RepoTypeGURU, AcceptKeywords: archKeyword},
"cliphist": {Name: "app-misc/cliphist", Repository: RepoTypeGURU, AcceptKeywords: archKeyword},
"dms (DankMaterialShell)": g.getDmsMapping(variants["dms (DankMaterialShell)"]),
"dgop": {Name: "dgop", Repository: RepoTypeManual, BuildFunc: "installDgop"},
}

View File

@@ -18,8 +18,8 @@ type ManualPackageInstaller struct {
// parseLatestTagFromGitOutput parses git ls-remote output and returns the latest tag
func (m *ManualPackageInstaller) parseLatestTagFromGitOutput(output string) string {
lines := strings.SplitSeq(output, "\n")
for line := range lines {
lines := strings.Split(output, "\n")
for _, line := range lines {
if strings.Contains(line, "refs/tags/") && !strings.Contains(line, "^{}") {
parts := strings.Split(line, "refs/tags/")
if len(parts) > 1 {
@@ -74,6 +74,10 @@ func (m *ManualPackageInstaller) InstallManualPackages(ctx context.Context, pack
if err := m.installHyprland(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install hyprland: %w", err)
}
case "hyprpicker":
if err := m.installHyprpicker(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install hyprpicker: %w", err)
}
case "ghostty":
if err := m.installGhostty(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install ghostty: %w", err)
@@ -82,6 +86,10 @@ func (m *ManualPackageInstaller) InstallManualPackages(ctx context.Context, pack
if err := m.installMatugen(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install matugen: %w", err)
}
case "cliphist":
if err := m.installCliphist(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install cliphist: %w", err)
}
case "xwayland-satellite":
if err := m.installXwaylandSatellite(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install xwayland-satellite: %w", err)
@@ -103,12 +111,12 @@ func (m *ManualPackageInstaller) installDgop(ctx context.Context, sudoPassword s
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "dgop-build")
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -160,10 +168,10 @@ func (m *ManualPackageInstaller) installNiri(ctx context.Context, sudoPassword s
homeDir, _ := os.UserHomeDir()
buildDir := filepath.Join(homeDir, ".cache", "dankinstall", "niri-build")
tmpDir := filepath.Join(homeDir, ".cache", "dankinstall", "tmp")
if err := os.MkdirAll(buildDir, 0o755); err != nil {
if err := os.MkdirAll(buildDir, 0755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer func() {
@@ -237,12 +245,12 @@ func (m *ManualPackageInstaller) installQuickshell(ctx context.Context, variant
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "quickshell-build")
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -273,7 +281,7 @@ func (m *ManualPackageInstaller) installQuickshell(ctx context.Context, variant
}
buildDir := tmpDir + "/build"
if err := os.MkdirAll(buildDir, 0o755); err != nil {
if err := os.MkdirAll(buildDir, 0755); err != nil {
return fmt.Errorf("failed to create build directory: %w", err)
}
@@ -343,12 +351,12 @@ func (m *ManualPackageInstaller) installHyprland(ctx context.Context, sudoPasswo
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "hyprland-build")
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -397,6 +405,184 @@ func (m *ManualPackageInstaller) installHyprland(ctx context.Context, sudoPasswo
return nil
}
func (m *ManualPackageInstaller) installHyprpicker(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing hyprpicker from source...")
homeDir := os.Getenv("HOME")
if homeDir == "" {
return fmt.Errorf("HOME environment variable not set")
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
// Install hyprutils first
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.05,
Step: "Building hyprutils dependency...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprutils.git",
}
hyprutilsDir := filepath.Join(cacheDir, "hyprutils-build")
if err := os.MkdirAll(hyprutilsDir, 0755); err != nil {
return fmt.Errorf("failed to create hyprutils directory: %w", err)
}
defer os.RemoveAll(hyprutilsDir)
cloneUtilsCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprutils.git", hyprutilsDir)
if err := cloneUtilsCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprutils: %w", err)
}
configureUtilsCmd := exec.CommandContext(ctx, "cmake",
"--no-warn-unused-cli",
"-DCMAKE_BUILD_TYPE:STRING=Release",
"-DCMAKE_INSTALL_PREFIX:PATH=/usr",
"-DBUILD_TESTING=off",
"-S", ".",
"-B", "./build")
configureUtilsCmd.Dir = hyprutilsDir
configureUtilsCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(configureUtilsCmd, progressChan, PhaseSystemPackages, 0.05, 0.1, "Configuring hyprutils..."); err != nil {
return fmt.Errorf("failed to configure hyprutils: %w", err)
}
buildUtilsCmd := exec.CommandContext(ctx, "cmake", "--build", "./build", "--config", "Release", "--target", "all")
buildUtilsCmd.Dir = hyprutilsDir
buildUtilsCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildUtilsCmd, progressChan, PhaseSystemPackages, 0.1, 0.2, "Building hyprutils..."); err != nil {
return fmt.Errorf("failed to build hyprutils: %w", err)
}
installUtilsCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install ./build")
installUtilsCmd.Dir = hyprutilsDir
if err := installUtilsCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprutils: %w", err)
}
// Install hyprwayland-scanner
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.2,
Step: "Building hyprwayland-scanner dependency...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprwayland-scanner.git",
}
scannerDir := filepath.Join(cacheDir, "hyprwayland-scanner-build")
if err := os.MkdirAll(scannerDir, 0755); err != nil {
return fmt.Errorf("failed to create scanner directory: %w", err)
}
defer os.RemoveAll(scannerDir)
cloneScannerCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprwayland-scanner.git", scannerDir)
if err := cloneScannerCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprwayland-scanner: %w", err)
}
configureScannerCmd := exec.CommandContext(ctx, "cmake",
"-DCMAKE_INSTALL_PREFIX=/usr",
"-B", "build")
configureScannerCmd.Dir = scannerDir
configureScannerCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(configureScannerCmd, progressChan, PhaseSystemPackages, 0.2, 0.25, "Configuring hyprwayland-scanner..."); err != nil {
return fmt.Errorf("failed to configure hyprwayland-scanner: %w", err)
}
buildScannerCmd := exec.CommandContext(ctx, "cmake", "--build", "build", "-j")
buildScannerCmd.Dir = scannerDir
buildScannerCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildScannerCmd, progressChan, PhaseSystemPackages, 0.25, 0.35, "Building hyprwayland-scanner..."); err != nil {
return fmt.Errorf("failed to build hyprwayland-scanner: %w", err)
}
installScannerCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install build")
installScannerCmd.Dir = scannerDir
if err := installScannerCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprwayland-scanner: %w", err)
}
// Now build hyprpicker
tmpDir := filepath.Join(cacheDir, "hyprpicker-build")
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.35,
Step: "Cloning hyprpicker repository...",
IsComplete: false,
CommandInfo: "git clone https://github.com/hyprwm/hyprpicker.git",
}
cloneCmd := exec.CommandContext(ctx, "git", "clone", "https://github.com/hyprwm/hyprpicker.git", tmpDir)
if err := cloneCmd.Run(); err != nil {
return fmt.Errorf("failed to clone hyprpicker: %w", err)
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.45,
Step: "Configuring hyprpicker build...",
IsComplete: false,
CommandInfo: "cmake -B build -S . -DCMAKE_BUILD_TYPE=Release",
}
configureCmd := exec.CommandContext(ctx, "cmake",
"--no-warn-unused-cli",
"-DCMAKE_BUILD_TYPE:STRING=Release",
"-DCMAKE_INSTALL_PREFIX:PATH=/usr",
"-S", ".",
"-B", "./build")
configureCmd.Dir = tmpDir
configureCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
output, err := configureCmd.CombinedOutput()
if err != nil {
m.log(fmt.Sprintf("cmake configure failed. Output:\n%s", string(output)))
return fmt.Errorf("failed to configure hyprpicker: %w\nCMake output:\n%s", err, string(output))
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.55,
Step: "Building hyprpicker...",
IsComplete: false,
CommandInfo: "cmake --build build --target hyprpicker",
}
buildCmd := exec.CommandContext(ctx, "cmake", "--build", "./build", "--config", "Release", "--target", "hyprpicker")
buildCmd.Dir = tmpDir
buildCmd.Env = append(os.Environ(), "TMPDIR="+cacheDir)
if err := m.runWithProgressStep(buildCmd, progressChan, PhaseSystemPackages, 0.55, 0.8, "Building hyprpicker..."); err != nil {
return fmt.Errorf("failed to build hyprpicker: %w", err)
}
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.8,
Step: "Installing hyprpicker...",
IsComplete: false,
NeedsSudo: true,
CommandInfo: "sudo cmake --install build",
}
installCmd := ExecSudoCommand(ctx, sudoPassword, "cmake --install ./build")
installCmd.Dir = tmpDir
if err := installCmd.Run(); err != nil {
return fmt.Errorf("failed to install hyprpicker: %w", err)
}
m.log("hyprpicker installed successfully from source")
return nil
}
func (m *ManualPackageInstaller) installGhostty(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing Ghostty from source...")
@@ -406,12 +592,12 @@ func (m *ManualPackageInstaller) installGhostty(ctx context.Context, sudoPasswor
}
cacheDir := filepath.Join(homeDir, ".cache", "dankinstall")
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
if err := os.MkdirAll(cacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
tmpDir := filepath.Join(cacheDir, "ghostty-build")
if err := os.MkdirAll(tmpDir, 0o755); err != nil {
if err := os.MkdirAll(tmpDir, 0755); err != nil {
return fmt.Errorf("failed to create temp directory: %w", err)
}
defer os.RemoveAll(tmpDir)
@@ -528,7 +714,7 @@ func (m *ManualPackageInstaller) installDankMaterialShell(ctx context.Context, v
}
configDir := filepath.Dir(dmsPath)
if err := os.MkdirAll(configDir, 0o755); err != nil {
if err := os.MkdirAll(configDir, 0755); err != nil {
return fmt.Errorf("failed to create quickshell config directory: %w", err)
}
@@ -617,6 +803,52 @@ func (m *ManualPackageInstaller) installDankMaterialShell(ctx context.Context, v
return nil
}
func (m *ManualPackageInstaller) installCliphist(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing cliphist from source...")
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.1,
Step: "Installing cliphist via go install...",
IsComplete: false,
CommandInfo: "go install go.senan.xyz/cliphist@latest",
}
installCmd := exec.CommandContext(ctx, "go", "install", "go.senan.xyz/cliphist@latest")
if err := m.runWithProgressStep(installCmd, progressChan, PhaseSystemPackages, 0.1, 0.7, "Building cliphist..."); err != nil {
return fmt.Errorf("failed to install cliphist: %w", err)
}
homeDir := os.Getenv("HOME")
sourcePath := filepath.Join(homeDir, "go", "bin", "cliphist")
targetPath := "/usr/local/bin/cliphist"
progressChan <- InstallProgressMsg{
Phase: PhaseSystemPackages,
Progress: 0.7,
Step: "Installing cliphist binary to system...",
IsComplete: false,
NeedsSudo: true,
CommandInfo: fmt.Sprintf("sudo cp %s %s", sourcePath, targetPath),
}
copyCmd := exec.CommandContext(ctx, "sudo", "-S", "cp", sourcePath, targetPath)
copyCmd.Stdin = strings.NewReader(sudoPassword + "\n")
if err := copyCmd.Run(); err != nil {
return fmt.Errorf("failed to copy cliphist to /usr/local/bin: %w", err)
}
// Make it executable
chmodCmd := exec.CommandContext(ctx, "sudo", "-S", "chmod", "+x", targetPath)
chmodCmd.Stdin = strings.NewReader(sudoPassword + "\n")
if err := chmodCmd.Run(); err != nil {
return fmt.Errorf("failed to make cliphist executable: %w", err)
}
m.log("cliphist installed successfully from source")
return nil
}
func (m *ManualPackageInstaller) installXwaylandSatellite(ctx context.Context, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
m.log("Installing xwayland-satellite from source...")

View File

@@ -15,12 +15,6 @@ func init() {
Register("opensuse-tumbleweed", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
Register("opensuse-leap", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
Register("opensuse-slowroll", "#73BA25", FamilySUSE, func(config DistroConfig, logChan chan<- string) Distribution {
return NewOpenSUSEDistribution(config, logChan)
})
}
type OpenSUSEDistribution struct {
@@ -84,8 +78,10 @@ func (o *OpenSUSEDistribution) DetectDependenciesWithTerminal(ctx context.Contex
dependencies = append(dependencies, o.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, o.detectMatugen())
dependencies = append(dependencies, o.detectDgop())
dependencies = append(dependencies, o.detectClipboardTools()...)
return dependencies, nil
}
@@ -111,8 +107,10 @@ func (o *OpenSUSEDistribution) GetPackageMappingWithVariants(wm deps.WindowManag
"ghostty": {Name: "ghostty", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
"cliphist": {Name: "cliphist", Repository: RepoTypeSystem},
// DMS packages from OBS
"dms (DankMaterialShell)": o.getDmsMapping(variants["dms (DankMaterialShell)"]),
@@ -440,19 +438,6 @@ func (o *OpenSUSEDistribution) extractPackageNames(packages []PackageMapping) []
func (o *OpenSUSEDistribution) enableOBSRepos(ctx context.Context, obsPkgs []PackageMapping, sudoPassword string, progressChan chan<- InstallProgressMsg) error {
enabledRepos := make(map[string]bool)
osInfo, err := GetOSInfo()
if err != nil {
return fmt.Errorf("failed to get OS info: %w", err)
}
obsDistroVersion := "openSUSE_Tumbleweed"
switch osInfo.Distribution.ID {
case "opensuse-leap":
obsDistroVersion = fmt.Sprintf("openSUSE_Leap_%s", osInfo.VersionID)
case "opensuse-slowroll":
obsDistroVersion = "openSUSE_Slowroll"
}
for _, pkg := range obsPkgs {
if pkg.RepoURL != "" && !enabledRepos[pkg.RepoURL] {
o.log(fmt.Sprintf("Enabling OBS repository: %s", pkg.RepoURL))
@@ -460,8 +445,8 @@ func (o *OpenSUSEDistribution) enableOBSRepos(ctx context.Context, obsPkgs []Pac
// RepoURL format: "home:AvengeMedia:danklinux"
repoPath := strings.ReplaceAll(pkg.RepoURL, ":", ":/")
repoName := strings.ReplaceAll(pkg.RepoURL, ":", "-")
repoURL := fmt.Sprintf("https://download.opensuse.org/repositories/%s/%s/%s.repo",
repoPath, obsDistroVersion, pkg.RepoURL)
repoURL := fmt.Sprintf("https://download.opensuse.org/repositories/%s/openSUSE_Tumbleweed/%s.repo",
repoPath, pkg.RepoURL)
checkCmd := exec.CommandContext(ctx, "zypper", "repos", repoName)
if checkCmd.Run() == nil {

View File

@@ -19,12 +19,11 @@ type DistroInfo struct {
// OSInfo contains complete OS information
type OSInfo struct {
Distribution DistroInfo
Version string
VersionID string
VersionCodename string
PrettyName string
Architecture string
Distribution DistroInfo
Version string
VersionID string
PrettyName string
Architecture string
}
// GetOSInfo detects the current OS and returns information about it
@@ -73,8 +72,6 @@ func GetOSInfo() (*OSInfo, error) {
info.VersionID = value
case "VERSION":
info.Version = value
case "VERSION_CODENAME":
info.VersionCodename = value
case "PRETTY_NAME":
info.PrettyName = value
}
@@ -103,10 +100,6 @@ func IsUnsupportedDistro(distroID, versionID string) bool {
}
if distroID == "debian" {
// unstable/sid support
if versionID == "sid" {
return false
}
if versionID == "" {
// debian testing/sid have no version ID
return false

View File

@@ -76,8 +76,10 @@ func (u *UbuntuDistribution) DetectDependenciesWithTerminal(ctx context.Context,
dependencies = append(dependencies, u.detectXwaylandSatellite())
}
// Base detections (common across distros)
dependencies = append(dependencies, u.detectMatugen())
dependencies = append(dependencies, u.detectDgop())
dependencies = append(dependencies, u.detectClipboardTools()...)
return dependencies, nil
}
@@ -110,6 +112,7 @@ func (u *UbuntuDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"git": {Name: "git", Repository: RepoTypeSystem},
"kitty": {Name: "kitty", Repository: RepoTypeSystem},
"alacritty": {Name: "alacritty", Repository: RepoTypeSystem},
"wl-clipboard": {Name: "wl-clipboard", Repository: RepoTypeSystem},
"xdg-desktop-portal-gtk": {Name: "xdg-desktop-portal-gtk", Repository: RepoTypeSystem},
"accountsservice": {Name: "accountsservice", Repository: RepoTypeSystem},
@@ -118,6 +121,7 @@ func (u *UbuntuDistribution) GetPackageMappingWithVariants(wm deps.WindowManager
"quickshell": u.getQuickshellMapping(variants["quickshell"]),
"matugen": {Name: "matugen", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"dgop": {Name: "dgop", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"cliphist": {Name: "cliphist", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
"ghostty": {Name: "ghostty", Repository: RepoTypePPA, RepoURL: "ppa:avengemedia/danklinux"},
}
@@ -535,6 +539,8 @@ func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manua
buildDeps["libpam0g-dev"] = true
case "matugen":
buildDeps["curl"] = true
case "cliphist":
// Go will be installed separately with PPA
}
}
@@ -544,7 +550,7 @@ func (u *UbuntuDistribution) installBuildDependencies(ctx context.Context, manua
if err := u.installRust(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Rust: %w", err)
}
case "dgop":
case "cliphist", "dgop":
if err := u.installGo(ctx, sudoPassword, progressChan); err != nil {
return fmt.Errorf("failed to install Go: %w", err)
}

View File

@@ -518,7 +518,7 @@ func (m Model) categorizeDependencies() map[string][]DependencyInfo {
categories["Hyprland Components"] = append(categories["Hyprland Components"], dep)
case "niri":
categories["Niri Components"] = append(categories["Niri Components"], dep)
case "kitty", "alacritty", "ghostty":
case "kitty", "alacritty", "ghostty", "hyprpicker":
categories["Shared Components"] = append(categories["Shared Components"], dep)
default:
categories["Shared Components"] = append(categories["Shared Components"], dep)

View File

@@ -16,14 +16,14 @@ type DiscoveryConfig struct {
func DefaultDiscoveryConfig() *DiscoveryConfig {
var searchPaths []string
configDir, err := os.UserConfigDir()
if err == nil && configDir != "" {
searchPaths = append(searchPaths, filepath.Join(configDir, "DankMaterialShell", "cheatsheets"))
configHome := utils.XDGConfigHome()
if configHome != "" {
searchPaths = append(searchPaths, filepath.Join(configHome, "DankMaterialShell", "cheatsheets"))
}
configDirs := os.Getenv("XDG_CONFIG_DIRS")
if configDirs != "" {
for dir := range strings.SplitSeq(configDirs, ":") {
for _, dir := range strings.Split(configDirs, ":") {
if dir != "" {
searchPaths = append(searchPaths, filepath.Join(dir, "DankMaterialShell", "cheatsheets"))
}

View File

@@ -12,7 +12,7 @@ func TestNewJSONFileProvider(t *testing.T) {
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "test.json")
if err := os.WriteFile(testFile, []byte("{}"), 0o644); err != nil {
if err := os.WriteFile(testFile, []byte("{}"), 0644); err != nil {
t.Fatalf("Failed to create test file: %v", err)
}
@@ -81,7 +81,7 @@ func TestJSONFileProviderGetCheatSheet(t *testing.T) {
}
}`
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -135,7 +135,7 @@ func TestJSONFileProviderGetCheatSheetNoProvider(t *testing.T) {
"binds": {}
}`
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -181,7 +181,7 @@ func TestJSONFileProviderFlatArrayBackwardsCompat(t *testing.T) {
]
}`
if err := os.WriteFile(testFile, []byte(content), 0o644); err != nil {
if err := os.WriteFile(testFile, []byte(content), 0644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
@@ -216,7 +216,7 @@ func TestJSONFileProviderInvalidJSON(t *testing.T) {
tmpDir := t.TempDir()
testFile := filepath.Join(tmpDir, "invalid.json")
if err := os.WriteFile(testFile, []byte("not valid json"), 0o644); err != nil {
if err := os.WriteFile(testFile, []byte("not valid json"), 0644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}

View File

@@ -10,6 +10,7 @@ import (
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/keybinds"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/sblinch/kdl-go"
"github.com/sblinch/kdl-go/document"
)
@@ -30,11 +31,7 @@ func NewNiriProvider(configDir string) *NiriProvider {
}
func defaultNiriConfigDir() string {
configDir, err := os.UserConfigDir()
if err != nil {
return ""
}
return filepath.Join(configDir, "niri")
return filepath.Join(utils.XDGConfigHome(), "niri")
}
func (n *NiriProvider) Name() string {
@@ -461,16 +458,9 @@ func (n *NiriProvider) getBindSortPriority(action string) int {
}
}
const dmsWarningHeader = `// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
`
func (n *NiriProvider) generateBindsContent(binds map[string]*overrideBind) string {
if len(binds) == 0 {
return dmsWarningHeader + "binds {}\n"
return "binds {}\n"
}
var regularBinds, recentWindowsBinds []*overrideBind
@@ -497,7 +487,6 @@ func (n *NiriProvider) generateBindsContent(binds map[string]*overrideBind) stri
var sb strings.Builder
sb.WriteString(dmsWarningHeader)
sb.WriteString("binds {\n")
for _, bind := range regularBinds {
n.writeBindNode(&sb, bind, " ")

View File

@@ -6,13 +6,6 @@ import (
"testing"
)
const testHeader = `// ! DO NOT EDIT !
// ! AUTO-GENERATED BY DMS !
// ! CHANGES WILL BE OVERWRITTEN !
// ! PLACE YOUR CUSTOM CONFIGURATION ELSEWHERE !
`
func TestNiriProviderName(t *testing.T) {
provider := NewNiriProvider("")
if provider.Name() != "niri" {
@@ -204,7 +197,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
{
name: "empty binds",
binds: map[string]*overrideBind{},
expected: testHeader + "binds {}\n",
expected: "binds {}\n",
},
{
name: "simple spawn bind",
@@ -215,7 +208,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Open Terminal",
},
},
expected: testHeader + `binds {
expected: `binds {
Mod+T hotkey-overlay-title="Open Terminal" { spawn "kitty"; }
}
`,
@@ -229,7 +222,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Application Launcher",
},
},
expected: testHeader + `binds {
expected: `binds {
Mod+Space hotkey-overlay-title="Application Launcher" { spawn "dms" "ipc" "call" "spotlight" "toggle"; }
}
`,
@@ -243,7 +236,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Options: map[string]any{"allow-when-locked": true},
},
},
expected: testHeader + `binds {
expected: `binds {
XF86AudioMute allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "mute"; }
}
`,
@@ -257,7 +250,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Description: "Close Window",
},
},
expected: testHeader + `binds {
expected: `binds {
Mod+Q hotkey-overlay-title="Close Window" { close-window; }
}
`,
@@ -270,7 +263,7 @@ func TestNiriGenerateBindsContent(t *testing.T) {
Action: "next-window",
},
},
expected: testHeader + `binds {
expected: `binds {
}
recent-windows {
@@ -422,7 +415,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Focus Workspace 1",
},
},
expected: testHeader + `binds {
expected: `binds {
Mod+1 hotkey-overlay-title="Focus Workspace 1" { focus-workspace 1; }
}
`,
@@ -436,7 +429,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Focus Workspace 10",
},
},
expected: testHeader + `binds {
expected: `binds {
Mod+0 hotkey-overlay-title="Focus Workspace 10" { focus-workspace 10; }
}
`,
@@ -450,7 +443,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Adjust Column Width -10%",
},
},
expected: testHeader + `binds {
expected: `binds {
Super+Minus hotkey-overlay-title="Adjust Column Width -10%" { set-column-width "-10%"; }
}
`,
@@ -464,7 +457,7 @@ func TestNiriGenerateBindsContentNumericArgs(t *testing.T) {
Description: "Adjust Column Width +10%",
},
},
expected: testHeader + `binds {
expected: `binds {
Super+Equal hotkey-overlay-title="Adjust Column Width +10%" { set-column-width "+10%"; }
}
`,
@@ -493,7 +486,7 @@ func TestNiriGenerateActionWithUnquotedPercentArg(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := testHeader + `binds {
expected := `binds {
Super+Equal hotkey-overlay-title="Adjust Window Height +10%" { set-window-height "+10%"; }
}
`
@@ -514,7 +507,7 @@ func TestNiriGenerateSpawnWithNumericArgs(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := testHeader + `binds {
expected := `binds {
XF86AudioLowerVolume allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "decrement" "3"; }
}
`
@@ -535,7 +528,7 @@ func TestNiriGenerateSpawnNumericArgFromCLI(t *testing.T) {
}
content := provider.generateBindsContent(binds)
expected := testHeader + `binds {
expected := `binds {
XF86AudioLowerVolume allow-when-locked=true { spawn "dms" "ipc" "call" "audio" "decrement" "3"; }
}
`

View File

@@ -16,22 +16,6 @@ import (
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
type ColorMode string
const (
ColorModeDark ColorMode = "dark"
ColorModeLight ColorMode = "light"
)
func (c *ColorMode) GTKTheme() string {
switch *c {
case ColorModeDark:
return "adw-gtk3-dark"
default:
return "adw-gtk3"
}
}
var (
matugenVersionOnce sync.Once
matugenSupportsCOE bool
@@ -43,14 +27,13 @@ type Options struct {
ConfigDir string
Kind string
Value string
Mode ColorMode
Mode string
IconTheme string
MatugenType string
RunUserTemplates bool
StockColors string
SyncModeWithPortal bool
TerminalsAlwaysDark bool
SkipTemplates string
}
type ColorsOutput struct {
@@ -64,18 +47,6 @@ func (o *Options) ColorsOutput() string {
return filepath.Join(o.StateDir, "dms-colors.json")
}
func (o *Options) ShouldSkipTemplate(name string) bool {
if o.SkipTemplates == "" {
return false
}
for _, skip := range strings.Split(o.SkipTemplates, ",") {
if strings.TrimSpace(skip) == name {
return true
}
}
return false
}
func Run(opts Options) error {
if opts.StateDir == "" {
return fmt.Errorf("state-dir is required")
@@ -93,7 +64,7 @@ func Run(opts Options) error {
return fmt.Errorf("value is required")
}
if opts.Mode == "" {
opts.Mode = ColorModeDark
opts.Mode = "dark"
}
if opts.MatugenType == "" {
opts.MatugenType = "scheme-tonal-spot"
@@ -161,7 +132,7 @@ func buildOnce(opts *Options) error {
importArgs = []string{"--import-json-string", importData}
log.Info("Running matugen color hex with stock color overrides")
args := []string{"color", "hex", primaryDark, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name()}
args := []string{"color", "hex", primaryDark, "-m", opts.Mode, "-t", opts.MatugenType, "-c", cfgFile.Name()}
args = append(args, importArgs...)
if err := runMatugen(args); err != nil {
return err
@@ -197,7 +168,7 @@ func buildOnce(opts *Options) error {
default:
args = []string{opts.Kind, opts.Value}
}
args = append(args, "-m", string(opts.Mode), "-t", opts.MatugenType, "-c", cfgFile.Name())
args = append(args, "-m", opts.Mode, "-t", opts.MatugenType, "-c", cfgFile.Name())
args = append(args, importArgs...)
if err := runMatugen(args); err != nil {
return err
@@ -247,74 +218,34 @@ output_path = '%s'
`, opts.ShellDir, opts.ColorsOutput())
if !opts.ShouldSkipTemplate("gtk") {
switch opts.Mode {
case "light":
appendConfig(opts, cfgFile, nil, nil, "gtk3-light.toml")
default:
appendConfig(opts, cfgFile, nil, nil, "gtk3-dark.toml")
}
switch opts.Mode {
case "light":
appendConfig(opts, cfgFile, "skip", "gtk3-light.toml")
default:
appendConfig(opts, cfgFile, "skip", "gtk3-dark.toml")
}
if !opts.ShouldSkipTemplate("niri") {
appendConfig(opts, cfgFile, []string{"niri"}, nil, "niri.toml")
}
if !opts.ShouldSkipTemplate("qt5ct") {
appendConfig(opts, cfgFile, []string{"qt5ct"}, nil, "qt5ct.toml")
}
if !opts.ShouldSkipTemplate("qt6ct") {
appendConfig(opts, cfgFile, []string{"qt6ct"}, nil, "qt6ct.toml")
}
if !opts.ShouldSkipTemplate("firefox") {
appendConfig(opts, cfgFile, []string{"firefox"}, nil, "firefox.toml")
}
if !opts.ShouldSkipTemplate("pywalfox") {
appendConfig(opts, cfgFile, []string{"pywalfox"}, nil, "pywalfox.toml")
}
if !opts.ShouldSkipTemplate("zenbrowser") {
appendConfig(opts, cfgFile, []string{"zen", "zen-browser"}, []string{"app.zen_browser.zen"}, "zenbrowser.toml")
}
if !opts.ShouldSkipTemplate("vesktop") {
appendConfig(opts, cfgFile, []string{"vesktop"}, []string{"dev.vencord.Vesktop"}, "vesktop.toml")
}
if !opts.ShouldSkipTemplate("equibop") {
appendConfig(opts, cfgFile, []string{"equibop"}, nil, "equibop.toml")
}
if !opts.ShouldSkipTemplate("ghostty") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"ghostty"}, nil, "ghostty.toml")
}
if !opts.ShouldSkipTemplate("kitty") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"kitty"}, nil, "kitty.toml")
}
if !opts.ShouldSkipTemplate("foot") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"foot"}, nil, "foot.toml")
}
if !opts.ShouldSkipTemplate("alacritty") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"alacritty"}, nil, "alacritty.toml")
}
if !opts.ShouldSkipTemplate("wezterm") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"wezterm"}, nil, "wezterm.toml")
}
if !opts.ShouldSkipTemplate("nvim") {
appendTerminalConfig(opts, cfgFile, tmpDir, []string{"nvim"}, nil, "neovim.toml")
}
appendConfig(opts, cfgFile, "niri", "niri.toml")
appendConfig(opts, cfgFile, "qt5ct", "qt5ct.toml")
appendConfig(opts, cfgFile, "qt6ct", "qt6ct.toml")
appendConfig(opts, cfgFile, "firefox", "firefox.toml")
appendConfig(opts, cfgFile, "pywalfox", "pywalfox.toml")
appendConfig(opts, cfgFile, "vesktop", "vesktop.toml")
if !opts.ShouldSkipTemplate("dgop") {
appendConfig(opts, cfgFile, []string{"dgop"}, nil, "dgop.toml")
}
appendTerminalConfig(opts, cfgFile, tmpDir, "ghostty", "ghostty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "kitty", "kitty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "foot", "foot.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "alacritty", "alacritty.toml")
appendTerminalConfig(opts, cfgFile, tmpDir, "wezterm", "wezterm.toml")
if !opts.ShouldSkipTemplate("kcolorscheme") {
appendConfig(opts, cfgFile, nil, nil, "kcolorscheme.toml")
}
appendConfig(opts, cfgFile, "dgop", "dgop.toml")
if !opts.ShouldSkipTemplate("vscode") {
homeDir, _ := os.UserHomeDir()
appendVSCodeConfig(cfgFile, "vscode", filepath.Join(homeDir, ".vscode/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codium", filepath.Join(homeDir, ".vscode-oss/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codeoss", filepath.Join(homeDir, ".config/Code - OSS/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
}
homeDir, _ := os.UserHomeDir()
appendVSCodeConfig(cfgFile, "vscode", filepath.Join(homeDir, ".vscode/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codium", filepath.Join(homeDir, ".vscode-oss/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "codeoss", filepath.Join(homeDir, ".config/Code - OSS/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "cursor", filepath.Join(homeDir, ".cursor/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
appendVSCodeConfig(cfgFile, "windsurf", filepath.Join(homeDir, ".windsurf/extensions/local.dynamic-base16-dankshell-0.0.1"), opts.ShellDir)
if opts.RunUserTemplates {
if data, err := os.ReadFile(userConfigPath); err == nil {
@@ -342,21 +273,12 @@ output_path = '%s'
return nil
}
func appendConfig(
opts *Options,
cfgFile *os.File,
checkCmd []string,
checkFlatpaks []string,
fileName string,
) {
func appendConfig(opts *Options, cfgFile *os.File, checkCmd, fileName string) {
configPath := filepath.Join(opts.ShellDir, "matugen", "configs", fileName)
if _, err := os.Stat(configPath); err != nil {
return
}
cmdExists := checkCmd == nil || utils.AnyCommandExists(checkCmd...)
flatpakExists := checkFlatpaks == nil || utils.AnyFlatpakExists(checkFlatpaks...)
if !cmdExists && !flatpakExists {
if checkCmd != "skip" && !utils.CommandExists(checkCmd) {
return
}
data, err := os.ReadFile(configPath)
@@ -367,15 +289,12 @@ func appendConfig(
cfgFile.WriteString("\n")
}
func appendTerminalConfig(opts *Options, cfgFile *os.File, tmpDir string, checkCmd []string, checkFlatpaks []string, fileName string) {
func appendTerminalConfig(opts *Options, cfgFile *os.File, tmpDir, checkCmd, fileName string) {
configPath := filepath.Join(opts.ShellDir, "matugen", "configs", fileName)
if _, err := os.Stat(configPath); err != nil {
return
}
cmdExists := checkCmd == nil || utils.AnyCommandExists(checkCmd...)
flatpakExists := checkFlatpaks == nil || utils.AnyFlatpakExists(checkFlatpaks...)
if !cmdExists && !flatpakExists {
if checkCmd != "skip" && !utils.CommandExists(checkCmd) {
return
}
data, err := os.ReadFile(configPath)
@@ -584,19 +503,19 @@ func extractNestedColor(jsonStr, colorName, variant string) string {
return color
}
func generateDank16Variants(primaryDark, primaryLight, surface string, mode ColorMode) string {
func generateDank16Variants(primaryDark, primaryLight, surface, mode string) string {
variantOpts := dank16.VariantOptions{
PrimaryDark: primaryDark,
PrimaryLight: primaryLight,
Background: surface,
UseDPS: true,
IsLightMode: mode == ColorModeLight,
IsLightMode: mode == "light",
}
variantColors := dank16.GenerateVariantPalette(variantOpts)
return dank16.GenerateVariantJSON(variantColors)
}
func refreshGTK(configDir string, mode ColorMode) {
func refreshGTK(configDir, mode string) {
gtkCSS := filepath.Join(configDir, "gtk-3.0", "gtk.css")
info, err := os.Lstat(gtkCSS)
@@ -622,7 +541,7 @@ func refreshGTK(configDir string, mode ColorMode) {
}
exec.Command("gsettings", "set", "org.gnome.desktop.interface", "gtk-theme", "").Run()
exec.Command("gsettings", "set", "org.gnome.desktop.interface", "gtk-theme", mode.GTKTheme()).Run()
exec.Command("gsettings", "set", "org.gnome.desktop.interface", "gtk-theme", "adw-gtk3-"+mode).Run()
}
func signalTerminals() {
@@ -652,9 +571,9 @@ func signalByName(name string, sig syscall.Signal) {
}
}
func syncColorScheme(mode ColorMode) {
func syncColorScheme(mode string) {
scheme := "prefer-dark"
if mode == ColorModeLight {
if mode == "light" {
scheme = "default"
}

View File

@@ -1,300 +0,0 @@
package matugen
import (
"os"
"path/filepath"
"testing"
)
func TestAppendConfigBinaryExists(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "test config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, []string{"sh"}, nil, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) == 0 {
t.Errorf("expected config to be written when binary exists")
}
if string(output) != testConfig+"\n" {
t.Errorf("expected %q, got %q", testConfig+"\n", string(output))
}
}
func TestAppendConfigBinaryDoesNotExist(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "test config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, []string{"nonexistent-binary-12345"}, []string{}, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) != 0 {
t.Errorf("expected no config when binary doesn't exist, got: %q", string(output))
}
}
func TestAppendConfigFlatpakExists(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "zen config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, nil, []string{"app.zen_browser.zen"}, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) == 0 {
t.Errorf("expected config to be written when flatpak exists")
}
}
func TestAppendConfigFlatpakDoesNotExist(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "test config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, []string{}, []string{"com.nonexistent.flatpak"}, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) != 0 {
t.Errorf("expected no config when flatpak doesn't exist, got: %q", string(output))
}
}
func TestAppendConfigBothExist(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "zen config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, []string{"sh"}, []string{"app.zen_browser.zen"}, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) == 0 {
t.Errorf("expected config to be written when both binary and flatpak exist")
}
}
func TestAppendConfigNeitherExists(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "test config content"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, []string{"nonexistent-binary-12345"}, []string{"com.nonexistent.flatpak"}, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) != 0 {
t.Errorf("expected no config when neither exists, got: %q", string(output))
}
}
func TestAppendConfigNoChecks(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
testConfig := "always include"
configPath := filepath.Join(configsDir, "test.toml")
if err := os.WriteFile(configPath, []byte(testConfig), 0644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, nil, nil, "test.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) == 0 {
t.Errorf("expected config to be written when no checks specified")
}
}
func TestAppendConfigFileDoesNotExist(t *testing.T) {
tempDir := t.TempDir()
shellDir := filepath.Join(tempDir, "shell")
configsDir := filepath.Join(shellDir, "matugen", "configs")
if err := os.MkdirAll(configsDir, 0755); err != nil {
t.Fatalf("failed to create configs dir: %v", err)
}
outFile := filepath.Join(tempDir, "output.toml")
cfgFile, err := os.Create(outFile)
if err != nil {
t.Fatalf("failed to create output file: %v", err)
}
defer cfgFile.Close()
opts := &Options{ShellDir: shellDir}
appendConfig(opts, cfgFile, nil, nil, "nonexistent.toml")
cfgFile.Close()
output, err := os.ReadFile(outFile)
if err != nil {
t.Fatalf("failed to read output: %v", err)
}
if len(output) != 0 {
t.Errorf("expected no config when file doesn't exist, got: %q", string(output))
}
}

View File

@@ -1,229 +0,0 @@
// Code generated by mockery v2.53.5. DO NOT EDIT.
package mocks_wlclient
import (
client "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
mock "github.com/stretchr/testify/mock"
)
// MockWaylandDisplay is an autogenerated mock type for the WaylandDisplay type
type MockWaylandDisplay struct {
mock.Mock
}
type MockWaylandDisplay_Expecter struct {
mock *mock.Mock
}
func (_m *MockWaylandDisplay) EXPECT() *MockWaylandDisplay_Expecter {
return &MockWaylandDisplay_Expecter{mock: &_m.Mock}
}
// Context provides a mock function with no fields
func (_m *MockWaylandDisplay) Context() *client.Context {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Context")
}
var r0 *client.Context
if rf, ok := ret.Get(0).(func() *client.Context); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Context)
}
}
return r0
}
// MockWaylandDisplay_Context_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Context'
type MockWaylandDisplay_Context_Call struct {
*mock.Call
}
// Context is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Context() *MockWaylandDisplay_Context_Call {
return &MockWaylandDisplay_Context_Call{Call: _e.mock.On("Context")}
}
func (_c *MockWaylandDisplay_Context_Call) Run(run func()) *MockWaylandDisplay_Context_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Context_Call) Return(_a0 *client.Context) *MockWaylandDisplay_Context_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Context_Call) RunAndReturn(run func() *client.Context) *MockWaylandDisplay_Context_Call {
_c.Call.Return(run)
return _c
}
// Destroy provides a mock function with no fields
func (_m *MockWaylandDisplay) Destroy() error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Destroy")
}
var r0 error
if rf, ok := ret.Get(0).(func() error); ok {
r0 = rf()
} else {
r0 = ret.Error(0)
}
return r0
}
// MockWaylandDisplay_Destroy_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Destroy'
type MockWaylandDisplay_Destroy_Call struct {
*mock.Call
}
// Destroy is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Destroy() *MockWaylandDisplay_Destroy_Call {
return &MockWaylandDisplay_Destroy_Call{Call: _e.mock.On("Destroy")}
}
func (_c *MockWaylandDisplay_Destroy_Call) Run(run func()) *MockWaylandDisplay_Destroy_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Destroy_Call) Return(_a0 error) *MockWaylandDisplay_Destroy_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Destroy_Call) RunAndReturn(run func() error) *MockWaylandDisplay_Destroy_Call {
_c.Call.Return(run)
return _c
}
// GetRegistry provides a mock function with no fields
func (_m *MockWaylandDisplay) GetRegistry() (*client.Registry, error) {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for GetRegistry")
}
var r0 *client.Registry
var r1 error
if rf, ok := ret.Get(0).(func() (*client.Registry, error)); ok {
return rf()
}
if rf, ok := ret.Get(0).(func() *client.Registry); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Registry)
}
}
if rf, ok := ret.Get(1).(func() error); ok {
r1 = rf()
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// MockWaylandDisplay_GetRegistry_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRegistry'
type MockWaylandDisplay_GetRegistry_Call struct {
*mock.Call
}
// GetRegistry is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) GetRegistry() *MockWaylandDisplay_GetRegistry_Call {
return &MockWaylandDisplay_GetRegistry_Call{Call: _e.mock.On("GetRegistry")}
}
func (_c *MockWaylandDisplay_GetRegistry_Call) Run(run func()) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_GetRegistry_Call) Return(_a0 *client.Registry, _a1 error) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Return(_a0, _a1)
return _c
}
func (_c *MockWaylandDisplay_GetRegistry_Call) RunAndReturn(run func() (*client.Registry, error)) *MockWaylandDisplay_GetRegistry_Call {
_c.Call.Return(run)
return _c
}
// Roundtrip provides a mock function with no fields
func (_m *MockWaylandDisplay) Roundtrip() error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Roundtrip")
}
var r0 error
if rf, ok := ret.Get(0).(func() error); ok {
r0 = rf()
} else {
r0 = ret.Error(0)
}
return r0
}
// MockWaylandDisplay_Roundtrip_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Roundtrip'
type MockWaylandDisplay_Roundtrip_Call struct {
*mock.Call
}
// Roundtrip is a helper method to define mock.On call
func (_e *MockWaylandDisplay_Expecter) Roundtrip() *MockWaylandDisplay_Roundtrip_Call {
return &MockWaylandDisplay_Roundtrip_Call{Call: _e.mock.On("Roundtrip")}
}
func (_c *MockWaylandDisplay_Roundtrip_Call) Run(run func()) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandDisplay_Roundtrip_Call) Return(_a0 error) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandDisplay_Roundtrip_Call) RunAndReturn(run func() error) *MockWaylandDisplay_Roundtrip_Call {
_c.Call.Return(run)
return _c
}
// NewMockWaylandDisplay creates a new instance of MockWaylandDisplay. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockWaylandDisplay(t interface {
mock.TestingT
Cleanup(func())
}) *MockWaylandDisplay {
mock := &MockWaylandDisplay{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -1,226 +0,0 @@
// Code generated by mockery v2.53.5. DO NOT EDIT.
package mocks_wlcontext
import (
client "github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
mock "github.com/stretchr/testify/mock"
)
// MockWaylandContext is an autogenerated mock type for the WaylandContext type
type MockWaylandContext struct {
mock.Mock
}
type MockWaylandContext_Expecter struct {
mock *mock.Mock
}
func (_m *MockWaylandContext) EXPECT() *MockWaylandContext_Expecter {
return &MockWaylandContext_Expecter{mock: &_m.Mock}
}
// Close provides a mock function with no fields
func (_m *MockWaylandContext) Close() {
_m.Called()
}
// MockWaylandContext_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close'
type MockWaylandContext_Close_Call struct {
*mock.Call
}
// Close is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Close() *MockWaylandContext_Close_Call {
return &MockWaylandContext_Close_Call{Call: _e.mock.On("Close")}
}
func (_c *MockWaylandContext_Close_Call) Run(run func()) *MockWaylandContext_Close_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Close_Call) Return() *MockWaylandContext_Close_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Close_Call) RunAndReturn(run func()) *MockWaylandContext_Close_Call {
_c.Run(run)
return _c
}
// Display provides a mock function with no fields
func (_m *MockWaylandContext) Display() *client.Display {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for Display")
}
var r0 *client.Display
if rf, ok := ret.Get(0).(func() *client.Display); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*client.Display)
}
}
return r0
}
// MockWaylandContext_Display_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Display'
type MockWaylandContext_Display_Call struct {
*mock.Call
}
// Display is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Display() *MockWaylandContext_Display_Call {
return &MockWaylandContext_Display_Call{Call: _e.mock.On("Display")}
}
func (_c *MockWaylandContext_Display_Call) Run(run func()) *MockWaylandContext_Display_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Display_Call) Return(_a0 *client.Display) *MockWaylandContext_Display_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandContext_Display_Call) RunAndReturn(run func() *client.Display) *MockWaylandContext_Display_Call {
_c.Call.Return(run)
return _c
}
// FatalError provides a mock function with no fields
func (_m *MockWaylandContext) FatalError() <-chan error {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for FatalError")
}
var r0 <-chan error
if rf, ok := ret.Get(0).(func() <-chan error); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(<-chan error)
}
}
return r0
}
// MockWaylandContext_FatalError_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FatalError'
type MockWaylandContext_FatalError_Call struct {
*mock.Call
}
// FatalError is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) FatalError() *MockWaylandContext_FatalError_Call {
return &MockWaylandContext_FatalError_Call{Call: _e.mock.On("FatalError")}
}
func (_c *MockWaylandContext_FatalError_Call) Run(run func()) *MockWaylandContext_FatalError_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_FatalError_Call) Return(_a0 <-chan error) *MockWaylandContext_FatalError_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockWaylandContext_FatalError_Call) RunAndReturn(run func() <-chan error) *MockWaylandContext_FatalError_Call {
_c.Call.Return(run)
return _c
}
// Post provides a mock function with given fields: fn
func (_m *MockWaylandContext) Post(fn func()) {
_m.Called(fn)
}
// MockWaylandContext_Post_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Post'
type MockWaylandContext_Post_Call struct {
*mock.Call
}
// Post is a helper method to define mock.On call
// - fn func()
func (_e *MockWaylandContext_Expecter) Post(fn interface{}) *MockWaylandContext_Post_Call {
return &MockWaylandContext_Post_Call{Call: _e.mock.On("Post", fn)}
}
func (_c *MockWaylandContext_Post_Call) Run(run func(fn func())) *MockWaylandContext_Post_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(func()))
})
return _c
}
func (_c *MockWaylandContext_Post_Call) Return() *MockWaylandContext_Post_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Post_Call) RunAndReturn(run func(func())) *MockWaylandContext_Post_Call {
_c.Run(run)
return _c
}
// Start provides a mock function with no fields
func (_m *MockWaylandContext) Start() {
_m.Called()
}
// MockWaylandContext_Start_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Start'
type MockWaylandContext_Start_Call struct {
*mock.Call
}
// Start is a helper method to define mock.On call
func (_e *MockWaylandContext_Expecter) Start() *MockWaylandContext_Start_Call {
return &MockWaylandContext_Start_Call{Call: _e.mock.On("Start")}
}
func (_c *MockWaylandContext_Start_Call) Run(run func()) *MockWaylandContext_Start_Call {
_c.Call.Run(func(args mock.Arguments) {
run()
})
return _c
}
func (_c *MockWaylandContext_Start_Call) Return() *MockWaylandContext_Start_Call {
_c.Call.Return()
return _c
}
func (_c *MockWaylandContext_Start_Call) RunAndReturn(run func()) *MockWaylandContext_Start_Call {
_c.Run(run)
return _c
}
// NewMockWaylandContext creates a new instance of MockWaylandContext. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockWaylandContext(t interface {
mock.TestingT
Cleanup(func())
}) *MockWaylandContext {
mock := &MockWaylandContext{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -9,7 +9,7 @@ import (
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
"github.com/spf13/afero"
)
@@ -33,12 +33,7 @@ func NewManagerWithFs(fs afero.Fs) (*Manager, error) {
}
func getPluginsDir() string {
configDir, err := os.UserConfigDir()
if err != nil {
log.Error("failed to get user config dir", "err", err)
return ""
}
return filepath.Join(configDir, "DankMaterialShell", "plugins")
return filepath.Join(utils.XDGConfigHome(), "DankMaterialShell", "plugins")
}
func (m *Manager) IsInstalled(plugin Plugin) (bool, error) {

View File

@@ -1,695 +0,0 @@
// Generated by go-wayland-scanner
// https://github.com/yaslama/go-wayland/cmd/go-wayland-scanner
// XML file : internal/proto/xml/ext-data-control-v1.xml
//
// ext_data_control_v1 Protocol Copyright:
//
// Copyright © 2018 Simon Ser
// Copyright © 2019 Ivan Molodetskikh
// Copyright © 2024 Neal Gompa
//
// Permission to use, copy, modify, distribute, and sell this
// software and its documentation for any purpose is hereby granted
// without fee, provided that the above copyright notice appear in
// all copies and that both that copyright notice and this permission
// notice appear in supporting documentation, and that the name of
// the copyright holders not be used in advertising or publicity
// pertaining to distribution of the software without specific,
// written prior permission. The copyright holders make no
// representations about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
//
// THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
// SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
// SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
// AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
// ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
// THIS SOFTWARE.
package ext_data_control
import (
"github.com/AvengeMedia/DankMaterialShell/core/pkg/go-wayland/wayland/client"
"golang.org/x/sys/unix"
)
// ExtDataControlManagerV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlManagerV1InterfaceName = "ext_data_control_manager_v1"
// ExtDataControlManagerV1 : manager to control data devices
//
// This interface is a manager that allows creating per-seat data device
// controls.
type ExtDataControlManagerV1 struct {
client.BaseProxy
}
// NewExtDataControlManagerV1 : manager to control data devices
//
// This interface is a manager that allows creating per-seat data device
// controls.
func NewExtDataControlManagerV1(ctx *client.Context) *ExtDataControlManagerV1 {
extDataControlManagerV1 := &ExtDataControlManagerV1{}
ctx.Register(extDataControlManagerV1)
return extDataControlManagerV1
}
// CreateDataSource : create a new data source
//
// Create a new data source.
func (i *ExtDataControlManagerV1) CreateDataSource() (*ExtDataControlSourceV1, error) {
id := NewExtDataControlSourceV1(i.Context())
const opcode = 0
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], id.ID())
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return id, err
}
// GetDataDevice : get a data device for a seat
//
// Create a data device that can be used to manage a seat's selection.
func (i *ExtDataControlManagerV1) GetDataDevice(seat *client.Seat) (*ExtDataControlDeviceV1, error) {
id := NewExtDataControlDeviceV1(i.Context())
const opcode = 1
const _reqBufLen = 8 + 4 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], id.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], seat.ID())
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return id, err
}
// GetDataDeviceWithProxy : get a data device for a seat using a pre-created proxy
//
// Like GetDataDevice, but uses a pre-created ExtDataControlDeviceV1 proxy.
// This allows setting up event handlers before the request is sent.
func (i *ExtDataControlManagerV1) GetDataDeviceWithProxy(device *ExtDataControlDeviceV1, seat *client.Seat) error {
const opcode = 1
const _reqBufLen = 8 + 4 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutUint32(_reqBuf[l:l+4], device.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], seat.ID())
l += 4
return i.Context().WriteMsg(_reqBuf[:], nil)
}
// Destroy : destroy the manager
//
// All objects created by the manager will still remain valid, until their
// appropriate destroy request has been called.
func (i *ExtDataControlManagerV1) Destroy() error {
defer i.MarkZombie()
const opcode = 2
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// ExtDataControlDeviceV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlDeviceV1InterfaceName = "ext_data_control_device_v1"
// ExtDataControlDeviceV1 : manage a data device for a seat
//
// This interface allows a client to manage a seat's selection.
//
// When the seat is destroyed, this object becomes inert.
type ExtDataControlDeviceV1 struct {
client.BaseProxy
dataOfferHandler ExtDataControlDeviceV1DataOfferHandlerFunc
selectionHandler ExtDataControlDeviceV1SelectionHandlerFunc
finishedHandler ExtDataControlDeviceV1FinishedHandlerFunc
primarySelectionHandler ExtDataControlDeviceV1PrimarySelectionHandlerFunc
}
// NewExtDataControlDeviceV1 : manage a data device for a seat
//
// This interface allows a client to manage a seat's selection.
//
// When the seat is destroyed, this object becomes inert.
func NewExtDataControlDeviceV1(ctx *client.Context) *ExtDataControlDeviceV1 {
extDataControlDeviceV1 := &ExtDataControlDeviceV1{}
ctx.Register(extDataControlDeviceV1)
return extDataControlDeviceV1
}
// SetSelection : copy data to the selection
//
// This request asks the compositor to set the selection to the data from
// the source on behalf of the client.
//
// The given source may not be used in any further set_selection or
// set_primary_selection requests. Attempting to use a previously used
// source triggers the used_source protocol error.
//
// To unset the selection, set the source to NULL.
func (i *ExtDataControlDeviceV1) SetSelection(source *ExtDataControlSourceV1) error {
const opcode = 0
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
if source == nil {
client.PutUint32(_reqBuf[l:l+4], 0)
l += 4
} else {
client.PutUint32(_reqBuf[l:l+4], source.ID())
l += 4
}
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// Destroy : destroy this data device
//
// Destroys the data device object.
func (i *ExtDataControlDeviceV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// SetPrimarySelection : copy data to the primary selection
//
// This request asks the compositor to set the primary selection to the
// data from the source on behalf of the client.
//
// The given source may not be used in any further set_selection or
// set_primary_selection requests. Attempting to use a previously used
// source triggers the used_source protocol error.
//
// To unset the primary selection, set the source to NULL.
//
// The compositor will ignore this request if it does not support primary
// selection.
func (i *ExtDataControlDeviceV1) SetPrimarySelection(source *ExtDataControlSourceV1) error {
const opcode = 2
const _reqBufLen = 8 + 4
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
if source == nil {
client.PutUint32(_reqBuf[l:l+4], 0)
l += 4
} else {
client.PutUint32(_reqBuf[l:l+4], source.ID())
l += 4
}
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
type ExtDataControlDeviceV1Error uint32
// ExtDataControlDeviceV1Error :
const (
// ExtDataControlDeviceV1ErrorUsedSource : source given to set_selection or set_primary_selection was already used before
ExtDataControlDeviceV1ErrorUsedSource ExtDataControlDeviceV1Error = 1
)
func (e ExtDataControlDeviceV1Error) Name() string {
switch e {
case ExtDataControlDeviceV1ErrorUsedSource:
return "used_source"
default:
return ""
}
}
func (e ExtDataControlDeviceV1Error) Value() string {
switch e {
case ExtDataControlDeviceV1ErrorUsedSource:
return "1"
default:
return ""
}
}
func (e ExtDataControlDeviceV1Error) String() string {
return e.Name() + "=" + e.Value()
}
// ExtDataControlDeviceV1DataOfferEvent : introduce a new ext_data_control_offer
//
// The data_offer event introduces a new ext_data_control_offer object,
// which will subsequently be used in either the
// ext_data_control_device.selection event (for the regular clipboard
// selections) or the ext_data_control_device.primary_selection event (for
// the primary clipboard selections). Immediately following the
// ext_data_control_device.data_offer event, the new data_offer object
// will send out ext_data_control_offer.offer events to describe the MIME
// types it offers.
type ExtDataControlDeviceV1DataOfferEvent struct {
Id *ExtDataControlOfferV1
}
type ExtDataControlDeviceV1DataOfferHandlerFunc func(ExtDataControlDeviceV1DataOfferEvent)
// SetDataOfferHandler : sets handler for ExtDataControlDeviceV1DataOfferEvent
func (i *ExtDataControlDeviceV1) SetDataOfferHandler(f ExtDataControlDeviceV1DataOfferHandlerFunc) {
i.dataOfferHandler = f
}
// ExtDataControlDeviceV1SelectionEvent : advertise new selection
//
// The selection event is sent out to notify the client of a new
// ext_data_control_offer for the selection for this device. The
// ext_data_control_device.data_offer and the ext_data_control_offer.offer
// events are sent out immediately before this event to introduce the data
// offer object. The selection event is sent to a client when a new
// selection is set. The ext_data_control_offer is valid until a new
// ext_data_control_offer or NULL is received. The client must destroy the
// previous selection ext_data_control_offer, if any, upon receiving this
// event. Regardless, the previous selection will be ignored once a new
// selection ext_data_control_offer is received.
//
// The first selection event is sent upon binding the
// ext_data_control_device object.
type ExtDataControlDeviceV1SelectionEvent struct {
Id *ExtDataControlOfferV1
OfferId uint32 // Raw object ID for external registry lookups
}
type ExtDataControlDeviceV1SelectionHandlerFunc func(ExtDataControlDeviceV1SelectionEvent)
// SetSelectionHandler : sets handler for ExtDataControlDeviceV1SelectionEvent
func (i *ExtDataControlDeviceV1) SetSelectionHandler(f ExtDataControlDeviceV1SelectionHandlerFunc) {
i.selectionHandler = f
}
// ExtDataControlDeviceV1FinishedEvent : this data control is no longer valid
//
// This data control object is no longer valid and should be destroyed by
// the client.
type ExtDataControlDeviceV1FinishedEvent struct{}
type ExtDataControlDeviceV1FinishedHandlerFunc func(ExtDataControlDeviceV1FinishedEvent)
// SetFinishedHandler : sets handler for ExtDataControlDeviceV1FinishedEvent
func (i *ExtDataControlDeviceV1) SetFinishedHandler(f ExtDataControlDeviceV1FinishedHandlerFunc) {
i.finishedHandler = f
}
// ExtDataControlDeviceV1PrimarySelectionEvent : advertise new primary selection
//
// The primary_selection event is sent out to notify the client of a new
// ext_data_control_offer for the primary selection for this device. The
// ext_data_control_device.data_offer and the ext_data_control_offer.offer
// events are sent out immediately before this event to introduce the data
// offer object. The primary_selection event is sent to a client when a
// new primary selection is set. The ext_data_control_offer is valid until
// a new ext_data_control_offer or NULL is received. The client must
// destroy the previous primary selection ext_data_control_offer, if any,
// upon receiving this event. Regardless, the previous primary selection
// will be ignored once a new primary selection ext_data_control_offer is
// received.
//
// If the compositor supports primary selection, the first
// primary_selection event is sent upon binding the
// ext_data_control_device object.
type ExtDataControlDeviceV1PrimarySelectionEvent struct {
Id *ExtDataControlOfferV1
OfferId uint32 // Raw object ID for external registry lookups
}
type ExtDataControlDeviceV1PrimarySelectionHandlerFunc func(ExtDataControlDeviceV1PrimarySelectionEvent)
// SetPrimarySelectionHandler : sets handler for ExtDataControlDeviceV1PrimarySelectionEvent
func (i *ExtDataControlDeviceV1) SetPrimarySelectionHandler(f ExtDataControlDeviceV1PrimarySelectionHandlerFunc) {
i.primarySelectionHandler = f
}
func (i *ExtDataControlDeviceV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
// data_offer event: server creates a new object (new_id)
if i.dataOfferHandler == nil {
return
}
var e ExtDataControlDeviceV1DataOfferEvent
l := 0
newID := client.Uint32(data[l : l+4])
l += 4
ctx := i.Context()
offer := &ExtDataControlOfferV1{}
offer.SetContext(ctx)
offer.SetID(newID)
ctx.RegisterWithID(offer, newID)
e.Id = offer
i.dataOfferHandler(e)
case 1:
// selection event: nullable object reference
if i.selectionHandler == nil {
return
}
var e ExtDataControlDeviceV1SelectionEvent
l := 0
objID := client.Uint32(data[l : l+4])
l += 4
e.OfferId = objID
if objID != 0 {
if p := i.Context().GetProxy(objID); p != nil {
e.Id = p.(*ExtDataControlOfferV1)
}
}
i.selectionHandler(e)
case 2:
if i.finishedHandler == nil {
return
}
var e ExtDataControlDeviceV1FinishedEvent
i.finishedHandler(e)
case 3:
// primary_selection event: nullable object reference
if i.primarySelectionHandler == nil {
return
}
var e ExtDataControlDeviceV1PrimarySelectionEvent
l := 0
objID := client.Uint32(data[l : l+4])
l += 4
e.OfferId = objID
if objID != 0 {
if p := i.Context().GetProxy(objID); p != nil {
e.Id = p.(*ExtDataControlOfferV1)
}
}
i.primarySelectionHandler(e)
}
}
// ExtDataControlSourceV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlSourceV1InterfaceName = "ext_data_control_source_v1"
// ExtDataControlSourceV1 : offer to transfer data
//
// The ext_data_control_source object is the source side of a
// ext_data_control_offer. It is created by the source client in a data
// transfer and provides a way to describe the offered data and a way to
// respond to requests to transfer the data.
type ExtDataControlSourceV1 struct {
client.BaseProxy
sendHandler ExtDataControlSourceV1SendHandlerFunc
cancelledHandler ExtDataControlSourceV1CancelledHandlerFunc
}
// NewExtDataControlSourceV1 : offer to transfer data
//
// The ext_data_control_source object is the source side of a
// ext_data_control_offer. It is created by the source client in a data
// transfer and provides a way to describe the offered data and a way to
// respond to requests to transfer the data.
func NewExtDataControlSourceV1(ctx *client.Context) *ExtDataControlSourceV1 {
extDataControlSourceV1 := &ExtDataControlSourceV1{}
ctx.Register(extDataControlSourceV1)
return extDataControlSourceV1
}
// Offer : add an offered MIME type
//
// This request adds a MIME type to the set of MIME types advertised to
// targets. Can be called several times to offer multiple types.
//
// Calling this after ext_data_control_device.set_selection is a protocol
// error.
//
// mimeType: MIME type offered by the data source
func (i *ExtDataControlSourceV1) Offer(mimeType string) error {
const opcode = 0
mimeTypeLen := client.PaddedLen(len(mimeType) + 1)
_reqBufLen := 8 + (4 + mimeTypeLen)
_reqBuf := make([]byte, _reqBufLen)
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutString(_reqBuf[l:l+(4+mimeTypeLen)], mimeType)
l += (4 + mimeTypeLen)
err := i.Context().WriteMsg(_reqBuf, nil)
return err
}
// Destroy : destroy this source
//
// Destroys the data source object.
func (i *ExtDataControlSourceV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
type ExtDataControlSourceV1Error uint32
// ExtDataControlSourceV1Error :
const (
// ExtDataControlSourceV1ErrorInvalidOffer : offer sent after ext_data_control_device.set_selection
ExtDataControlSourceV1ErrorInvalidOffer ExtDataControlSourceV1Error = 1
)
func (e ExtDataControlSourceV1Error) Name() string {
switch e {
case ExtDataControlSourceV1ErrorInvalidOffer:
return "invalid_offer"
default:
return ""
}
}
func (e ExtDataControlSourceV1Error) Value() string {
switch e {
case ExtDataControlSourceV1ErrorInvalidOffer:
return "1"
default:
return ""
}
}
func (e ExtDataControlSourceV1Error) String() string {
return e.Name() + "=" + e.Value()
}
// ExtDataControlSourceV1SendEvent : send the data
//
// Request for data from the client. Send the data as the specified MIME
// type over the passed file descriptor, then close it.
type ExtDataControlSourceV1SendEvent struct {
MimeType string
Fd int
}
type ExtDataControlSourceV1SendHandlerFunc func(ExtDataControlSourceV1SendEvent)
// SetSendHandler : sets handler for ExtDataControlSourceV1SendEvent
func (i *ExtDataControlSourceV1) SetSendHandler(f ExtDataControlSourceV1SendHandlerFunc) {
i.sendHandler = f
}
// ExtDataControlSourceV1CancelledEvent : selection was cancelled
//
// This data source is no longer valid. The data source has been replaced
// by another data source.
//
// The client should clean up and destroy this data source.
type ExtDataControlSourceV1CancelledEvent struct{}
type ExtDataControlSourceV1CancelledHandlerFunc func(ExtDataControlSourceV1CancelledEvent)
// SetCancelledHandler : sets handler for ExtDataControlSourceV1CancelledEvent
func (i *ExtDataControlSourceV1) SetCancelledHandler(f ExtDataControlSourceV1CancelledHandlerFunc) {
i.cancelledHandler = f
}
func (i *ExtDataControlSourceV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
if i.sendHandler == nil {
if fd != -1 {
unix.Close(fd)
}
return
}
var e ExtDataControlSourceV1SendEvent
l := 0
mimeTypeLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
l += 4
e.MimeType = client.String(data[l : l+mimeTypeLen])
l += mimeTypeLen
e.Fd = fd
i.sendHandler(e)
case 1:
if i.cancelledHandler == nil {
return
}
var e ExtDataControlSourceV1CancelledEvent
i.cancelledHandler(e)
}
}
// ExtDataControlOfferV1InterfaceName is the name of the interface as it appears in the [client.Registry].
// It can be used to match the [client.RegistryGlobalEvent.Interface] in the
// [Registry.SetGlobalHandler] and can be used in [Registry.Bind] if this applies.
const ExtDataControlOfferV1InterfaceName = "ext_data_control_offer_v1"
// ExtDataControlOfferV1 : offer to transfer data
//
// A ext_data_control_offer represents a piece of data offered for transfer
// by another client (the source client). The offer describes the different
// MIME types that the data can be converted to and provides the mechanism
// for transferring the data directly from the source client.
type ExtDataControlOfferV1 struct {
client.BaseProxy
offerHandler ExtDataControlOfferV1OfferHandlerFunc
}
// NewExtDataControlOfferV1 : offer to transfer data
//
// A ext_data_control_offer represents a piece of data offered for transfer
// by another client (the source client). The offer describes the different
// MIME types that the data can be converted to and provides the mechanism
// for transferring the data directly from the source client.
func NewExtDataControlOfferV1(ctx *client.Context) *ExtDataControlOfferV1 {
extDataControlOfferV1 := &ExtDataControlOfferV1{}
ctx.Register(extDataControlOfferV1)
return extDataControlOfferV1
}
// Receive : request that the data is transferred
//
// To transfer the offered data, the client issues this request and
// indicates the MIME type it wants to receive. The transfer happens
// through the passed file descriptor (typically created with the pipe
// system call). The source client writes the data in the MIME type
// representation requested and then closes the file descriptor.
//
// The receiving client reads from the read end of the pipe until EOF and
// then closes its end, at which point the transfer is complete.
//
// This request may happen multiple times for different MIME types.
//
// mimeType: MIME type desired by receiver
// fd: file descriptor for data transfer
func (i *ExtDataControlOfferV1) Receive(mimeType string, fd int) error {
const opcode = 0
mimeTypeLen := client.PaddedLen(len(mimeType) + 1)
_reqBufLen := 8 + (4 + mimeTypeLen)
_reqBuf := make([]byte, _reqBufLen)
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
client.PutString(_reqBuf[l:l+(4+mimeTypeLen)], mimeType)
l += (4 + mimeTypeLen)
oob := unix.UnixRights(int(fd))
err := i.Context().WriteMsg(_reqBuf, oob)
return err
}
// Destroy : destroy this offer
//
// Destroys the data offer object.
func (i *ExtDataControlOfferV1) Destroy() error {
defer i.MarkZombie()
const opcode = 1
const _reqBufLen = 8
var _reqBuf [_reqBufLen]byte
l := 0
client.PutUint32(_reqBuf[l:4], i.ID())
l += 4
client.PutUint32(_reqBuf[l:l+4], uint32(_reqBufLen<<16|opcode&0x0000ffff))
l += 4
err := i.Context().WriteMsg(_reqBuf[:], nil)
return err
}
// ExtDataControlOfferV1OfferEvent : advertise offered MIME type
//
// Sent immediately after creating the ext_data_control_offer object.
// One event per offered MIME type.
type ExtDataControlOfferV1OfferEvent struct {
MimeType string
}
type ExtDataControlOfferV1OfferHandlerFunc func(ExtDataControlOfferV1OfferEvent)
// SetOfferHandler : sets handler for ExtDataControlOfferV1OfferEvent
func (i *ExtDataControlOfferV1) SetOfferHandler(f ExtDataControlOfferV1OfferHandlerFunc) {
i.offerHandler = f
}
func (i *ExtDataControlOfferV1) Dispatch(opcode uint32, fd int, data []byte) {
switch opcode {
case 0:
if i.offerHandler == nil {
return
}
var e ExtDataControlOfferV1OfferEvent
l := 0
mimeTypeLen := client.PaddedLen(int(client.Uint32(data[l : l+4])))
l += 4
e.MimeType = client.String(data[l : l+mimeTypeLen])
l += mimeTypeLen
i.offerHandler(e)
}
}

View File

@@ -238,7 +238,7 @@ func (i *ZwlrOutputManagerV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil || proxy.IsZombie() {
if proxy == nil {
head := &ZwlrOutputHeadV1{}
head.SetContext(i.Context())
head.SetID(objectID)
@@ -723,7 +723,7 @@ func (i *ZwlrOutputHeadV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil || proxy.IsZombie() {
if proxy == nil {
mode := &ZwlrOutputModeV1{}
mode.SetContext(i.Context())
mode.SetID(objectID)
@@ -761,8 +761,8 @@ func (i *ZwlrOutputHeadV1) Dispatch(opcode uint32, fd int, data []byte) {
l := 0
objectID := client.Uint32(data[l : l+4])
proxy := i.Context().GetProxy(objectID)
if proxy == nil || proxy.IsZombie() {
// Mode not yet registered or zombie, create fresh
if proxy == nil {
// Mode not yet registered, create it
mode := &ZwlrOutputModeV1{}
mode.SetContext(i.Context())
mode.SetID(objectID)

View File

@@ -1,276 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<protocol name="ext_data_control_v1">
<copyright>
Copyright © 2018 Simon Ser
Copyright © 2019 Ivan Molodetskikh
Copyright © 2024 Neal Gompa
Permission to use, copy, modify, distribute, and sell this
software and its documentation for any purpose is hereby granted
without fee, provided that the above copyright notice appear in
all copies and that both that copyright notice and this permission
notice appear in supporting documentation, and that the name of
the copyright holders not be used in advertising or publicity
pertaining to distribution of the software without specific,
written prior permission. The copyright holders make no
representations about the suitability of this software for any
purpose. It is provided "as is" without express or implied
warranty.
THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
</copyright>
<description summary="control data devices">
This protocol allows a privileged client to control data devices. In
particular, the client will be able to manage the current selection and take
the role of a clipboard manager.
Warning! The protocol described in this file is currently in the testing
phase. Backward compatible changes may be added together with the
corresponding interface version bump. Backward incompatible changes can
only be done by creating a new major version of the extension.
</description>
<interface name="ext_data_control_manager_v1" version="1">
<description summary="manager to control data devices">
This interface is a manager that allows creating per-seat data device
controls.
</description>
<request name="create_data_source">
<description summary="create a new data source">
Create a new data source.
</description>
<arg name="id" type="new_id" interface="ext_data_control_source_v1"
summary="data source to create"/>
</request>
<request name="get_data_device">
<description summary="get a data device for a seat">
Create a data device that can be used to manage a seat's selection.
</description>
<arg name="id" type="new_id" interface="ext_data_control_device_v1"/>
<arg name="seat" type="object" interface="wl_seat"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy the manager">
All objects created by the manager will still remain valid, until their
appropriate destroy request has been called.
</description>
</request>
</interface>
<interface name="ext_data_control_device_v1" version="1">
<description summary="manage a data device for a seat">
This interface allows a client to manage a seat's selection.
When the seat is destroyed, this object becomes inert.
</description>
<request name="set_selection">
<description summary="copy data to the selection">
This request asks the compositor to set the selection to the data from
the source on behalf of the client.
The given source may not be used in any further set_selection or
set_primary_selection requests. Attempting to use a previously used
source triggers the used_source protocol error.
To unset the selection, set the source to NULL.
</description>
<arg name="source" type="object" interface="ext_data_control_source_v1"
allow-null="true"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this data device">
Destroys the data device object.
</description>
</request>
<event name="data_offer">
<description summary="introduce a new ext_data_control_offer">
The data_offer event introduces a new ext_data_control_offer object,
which will subsequently be used in either the
ext_data_control_device.selection event (for the regular clipboard
selections) or the ext_data_control_device.primary_selection event (for
the primary clipboard selections). Immediately following the
ext_data_control_device.data_offer event, the new data_offer object
will send out ext_data_control_offer.offer events to describe the MIME
types it offers.
</description>
<arg name="id" type="new_id" interface="ext_data_control_offer_v1"/>
</event>
<event name="selection">
<description summary="advertise new selection">
The selection event is sent out to notify the client of a new
ext_data_control_offer for the selection for this device. The
ext_data_control_device.data_offer and the ext_data_control_offer.offer
events are sent out immediately before this event to introduce the data
offer object. The selection event is sent to a client when a new
selection is set. The ext_data_control_offer is valid until a new
ext_data_control_offer or NULL is received. The client must destroy the
previous selection ext_data_control_offer, if any, upon receiving this
event. Regardless, the previous selection will be ignored once a new
selection ext_data_control_offer is received.
The first selection event is sent upon binding the
ext_data_control_device object.
</description>
<arg name="id" type="object" interface="ext_data_control_offer_v1"
allow-null="true"/>
</event>
<event name="finished">
<description summary="this data control is no longer valid">
This data control object is no longer valid and should be destroyed by
the client.
</description>
</event>
<event name="primary_selection">
<description summary="advertise new primary selection">
The primary_selection event is sent out to notify the client of a new
ext_data_control_offer for the primary selection for this device. The
ext_data_control_device.data_offer and the ext_data_control_offer.offer
events are sent out immediately before this event to introduce the data
offer object. The primary_selection event is sent to a client when a
new primary selection is set. The ext_data_control_offer is valid until
a new ext_data_control_offer or NULL is received. The client must
destroy the previous primary selection ext_data_control_offer, if any,
upon receiving this event. Regardless, the previous primary selection
will be ignored once a new primary selection ext_data_control_offer is
received.
If the compositor supports primary selection, the first
primary_selection event is sent upon binding the
ext_data_control_device object.
</description>
<arg name="id" type="object" interface="ext_data_control_offer_v1"
allow-null="true"/>
</event>
<request name="set_primary_selection">
<description summary="copy data to the primary selection">
This request asks the compositor to set the primary selection to the
data from the source on behalf of the client.
The given source may not be used in any further set_selection or
set_primary_selection requests. Attempting to use a previously used
source triggers the used_source protocol error.
To unset the primary selection, set the source to NULL.
The compositor will ignore this request if it does not support primary
selection.
</description>
<arg name="source" type="object" interface="ext_data_control_source_v1"
allow-null="true"/>
</request>
<enum name="error">
<entry name="used_source" value="1"
summary="source given to set_selection or set_primary_selection was already used before"/>
</enum>
</interface>
<interface name="ext_data_control_source_v1" version="1">
<description summary="offer to transfer data">
The ext_data_control_source object is the source side of a
ext_data_control_offer. It is created by the source client in a data
transfer and provides a way to describe the offered data and a way to
respond to requests to transfer the data.
</description>
<enum name="error">
<entry name="invalid_offer" value="1"
summary="offer sent after ext_data_control_device.set_selection"/>
</enum>
<request name="offer">
<description summary="add an offered MIME type">
This request adds a MIME type to the set of MIME types advertised to
targets. Can be called several times to offer multiple types.
Calling this after ext_data_control_device.set_selection is a protocol
error.
</description>
<arg name="mime_type" type="string"
summary="MIME type offered by the data source"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this source">
Destroys the data source object.
</description>
</request>
<event name="send">
<description summary="send the data">
Request for data from the client. Send the data as the specified MIME
type over the passed file descriptor, then close it.
</description>
<arg name="mime_type" type="string" summary="MIME type for the data"/>
<arg name="fd" type="fd" summary="file descriptor for the data"/>
</event>
<event name="cancelled">
<description summary="selection was cancelled">
This data source is no longer valid. The data source has been replaced
by another data source.
The client should clean up and destroy this data source.
</description>
</event>
</interface>
<interface name="ext_data_control_offer_v1" version="1">
<description summary="offer to transfer data">
A ext_data_control_offer represents a piece of data offered for transfer
by another client (the source client). The offer describes the different
MIME types that the data can be converted to and provides the mechanism
for transferring the data directly from the source client.
</description>
<request name="receive">
<description summary="request that the data is transferred">
To transfer the offered data, the client issues this request and
indicates the MIME type it wants to receive. The transfer happens
through the passed file descriptor (typically created with the pipe
system call). The source client writes the data in the MIME type
representation requested and then closes the file descriptor.
The receiving client reads from the read end of the pipe until EOF and
then closes its end, at which point the transfer is complete.
This request may happen multiple times for different MIME types.
</description>
<arg name="mime_type" type="string"
summary="MIME type desired by receiver"/>
<arg name="fd" type="fd" summary="file descriptor for data transfer"/>
</request>
<request name="destroy" type="destructor">
<description summary="destroy this offer">
Destroys the data offer object.
</description>
</request>
<event name="offer">
<description summary="advertise offered MIME type">
Sent immediately after creating the ext_data_control_offer object.
One event per offered MIME type.
</description>
<arg name="mime_type" type="string" summary="offered MIME type"/>
</event>
</interface>
</protocol>

View File

@@ -73,7 +73,7 @@
</description>
<arg name="workspace" type="new_id" interface="ext_workspace_handle_v1"/>
</event>
<request name="commit">
<description summary="all requests about the workspaces have been sent">
The client must send this request after it has finished sending other
@@ -242,7 +242,7 @@
- a list of states, conveyed to the client with the state event
- and optionally a set of coordinates, conveyed to the client with the
coordinates event
The client may request that the compositor activate or deactivate the workspace.
Each workspace can belong to only a single workspace group.

View File

@@ -12,7 +12,6 @@ import (
"strings"
"time"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
@@ -120,12 +119,7 @@ func GetOutputDir() string {
}
func getXDGPicturesDir() string {
userConfigDir, err := os.UserConfigDir()
if err != nil {
log.Error("failed to get user config dir", "err", err)
return ""
}
userDirsFile := filepath.Join(userConfigDir, "user-dirs.dirs")
userDirsFile := filepath.Join(utils.XDGConfigHome(), "user-dirs.dirs")
data, err := os.ReadFile(userDirsFile)
if err != nil {
return ""

View File

@@ -141,7 +141,7 @@ func (r *RegionSelector) setupKeyboardHandlers() {
for _, os := range r.surfaces {
r.redrawSurface(os)
}
case 28, 57, 96:
case 28, 57:
if r.selection.hasSelection {
r.finishSelection()
}

View File

@@ -7,7 +7,7 @@ import (
"path/filepath"
"strings"
"github.com/AvengeMedia/DankMaterialShell/core/internal/log"
"github.com/AvengeMedia/DankMaterialShell/core/internal/utils"
)
type ThemeColors struct {
@@ -74,12 +74,7 @@ func loadColorsFile() *ColorScheme {
}
func getColorsFilePath() string {
cacheDir, err := os.UserCacheDir()
if err != nil {
log.Error("Failed to get user cache dir", "err", err)
return ""
}
return filepath.Join(cacheDir, "DankMaterialShell", "dms-colors.json")
return filepath.Join(utils.XDGCacheHome(), "DankMaterialShell", "dms-colors.json")
}
func isLightMode() bool {

View File

@@ -19,9 +19,9 @@ func HandleRequest(conn net.Conn, req models.Request, manager *Manager) {
func handleOpen(conn net.Conn, req models.Request, manager *Manager) {
log.Infof("AppPicker: Received %s request with params: %+v", req.Method, req.Params)
target, ok := models.Get[string](req, "target")
target, ok := req.Params["target"].(string)
if !ok {
target, ok = models.Get[string](req, "url")
target, ok = req.Params["url"].(string)
if !ok {
log.Warnf("AppPicker: Invalid target parameter in request")
models.RespondError(conn, req.ID, "invalid target parameter")
@@ -31,11 +31,14 @@ func handleOpen(conn net.Conn, req models.Request, manager *Manager) {
event := OpenEvent{
Target: target,
RequestType: models.GetOr(req, "requestType", "url"),
MimeType: models.GetOr(req, "mimeType", ""),
RequestType: "url",
}
if categories, ok := models.Get[[]any](req, "categories"); ok {
if mimeType, ok := req.Params["mimeType"].(string); ok {
event.MimeType = mimeType
}
if categories, ok := req.Params["categories"].([]any); ok {
event.Categories = make([]string, 0, len(categories))
for _, cat := range categories {
if catStr, ok := cat.(string); ok {
@@ -44,6 +47,10 @@ func handleOpen(conn net.Conn, req models.Request, manager *Manager) {
}
}
if requestType, ok := req.Params["requestType"].(string); ok {
event.RequestType = requestType
}
log.Infof("AppPicker: Broadcasting event: %+v", event)
manager.RequestOpen(event)
models.Respond(conn, req.ID, "ok")

View File

@@ -9,7 +9,7 @@ import (
func HandleRequest(conn net.Conn, req models.Request, manager *Manager) {
switch req.Method {
case "browser.open":
url, ok := models.Get[string](req, "url")
url, ok := req.Params["url"].(string)
if !ok {
models.RespondError(conn, req.ID, "invalid url parameter")
return

View File

@@ -1,232 +0,0 @@
package clipboard
import (
"encoding/json"
"fmt"
"net"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/models"
"github.com/AvengeMedia/DankMaterialShell/core/internal/server/params"
)
func HandleRequest(conn net.Conn, req models.Request, m *Manager) {
switch req.Method {
case "clipboard.getState":
handleGetState(conn, req, m)
case "clipboard.getHistory":
handleGetHistory(conn, req, m)
case "clipboard.getEntry":
handleGetEntry(conn, req, m)
case "clipboard.deleteEntry":
handleDeleteEntry(conn, req, m)
case "clipboard.clearHistory":
handleClearHistory(conn, req, m)
case "clipboard.copy":
handleCopy(conn, req, m)
case "clipboard.copyEntry":
handleCopyEntry(conn, req, m)
case "clipboard.paste":
handlePaste(conn, req, m)
case "clipboard.subscribe":
handleSubscribe(conn, req, m)
case "clipboard.search":
handleSearch(conn, req, m)
case "clipboard.getConfig":
handleGetConfig(conn, req, m)
case "clipboard.setConfig":
handleSetConfig(conn, req, m)
case "clipboard.store":
handleStore(conn, req, m)
default:
models.RespondError(conn, req.ID, "unknown method: "+req.Method)
}
}
func handleGetState(conn net.Conn, req models.Request, m *Manager) {
models.Respond(conn, req.ID, m.GetState())
}
func handleGetHistory(conn net.Conn, req models.Request, m *Manager) {
history := m.GetHistory()
for i := range history {
history[i].Data = nil
}
models.Respond(conn, req.ID, history)
}
func handleGetEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
entry, err := m.GetEntry(uint64(id))
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, entry)
}
func handleDeleteEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.DeleteEntry(uint64(id)); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "entry deleted"})
}
func handleClearHistory(conn net.Conn, req models.Request, m *Manager) {
m.ClearHistory()
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "history cleared"})
}
func handleCopy(conn net.Conn, req models.Request, m *Manager) {
text, err := params.String(req.Params, "text")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.CopyText(text); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "copied to clipboard"})
}
func handleCopyEntry(conn net.Conn, req models.Request, m *Manager) {
id, err := params.Int(req.Params, "id")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
entry, err := m.GetEntry(uint64(id))
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
if err := m.SetClipboard(entry.Data, entry.MimeType); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "copied to clipboard"})
}
func handlePaste(conn net.Conn, req models.Request, m *Manager) {
text, err := m.PasteText()
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, map[string]string{"text": text})
}
func handleSubscribe(conn net.Conn, req models.Request, m *Manager) {
clientID := fmt.Sprintf("clipboard-%d", req.ID)
ch := m.Subscribe(clientID)
defer m.Unsubscribe(clientID)
initialState := m.GetState()
if err := json.NewEncoder(conn).Encode(models.Response[State]{
ID: req.ID,
Result: &initialState,
}); err != nil {
return
}
for state := range ch {
if err := json.NewEncoder(conn).Encode(models.Response[State]{
ID: req.ID,
Result: &state,
}); err != nil {
return
}
}
}
func handleSearch(conn net.Conn, req models.Request, m *Manager) {
p := SearchParams{
Query: params.StringOpt(req.Params, "query", ""),
MimeType: params.StringOpt(req.Params, "mimeType", ""),
Limit: params.IntOpt(req.Params, "limit", 50),
Offset: params.IntOpt(req.Params, "offset", 0),
}
if img, ok := models.Get[bool](req, "isImage"); ok {
p.IsImage = &img
}
if b, ok := models.Get[float64](req, "before"); ok {
v := int64(b)
p.Before = &v
}
if a, ok := models.Get[float64](req, "after"); ok {
v := int64(a)
p.After = &v
}
models.Respond(conn, req.ID, m.Search(p))
}
func handleGetConfig(conn net.Conn, req models.Request, m *Manager) {
models.Respond(conn, req.ID, m.GetConfig())
}
func handleSetConfig(conn net.Conn, req models.Request, m *Manager) {
cfg := m.GetConfig()
if v, ok := models.Get[float64](req, "maxHistory"); ok {
cfg.MaxHistory = int(v)
}
if v, ok := models.Get[float64](req, "maxEntrySize"); ok {
cfg.MaxEntrySize = int64(v)
}
if v, ok := models.Get[float64](req, "autoClearDays"); ok {
cfg.AutoClearDays = int(v)
}
if v, ok := models.Get[bool](req, "clearAtStartup"); ok {
cfg.ClearAtStartup = v
}
if v, ok := models.Get[bool](req, "disabled"); ok {
cfg.Disabled = v
}
if err := m.SetConfig(cfg); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "config updated"})
}
func handleStore(conn net.Conn, req models.Request, m *Manager) {
data, err := params.String(req.Params, "data")
if err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
mimeType := params.StringOpt(req.Params, "mimeType", "text/plain;charset=utf-8")
if err := m.StoreData([]byte(data), mimeType); err != nil {
models.RespondError(conn, req.ID, err.Error())
return
}
models.Respond(conn, req.ID, models.SuccessResult{Success: true, Message: "stored"})
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,531 +0,0 @@
package clipboard
import (
"sync"
"sync/atomic"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
mocks_wlcontext "github.com/AvengeMedia/DankMaterialShell/core/internal/mocks/wlcontext"
)
func TestEncodeDecodeEntry_Roundtrip(t *testing.T) {
original := Entry{
ID: 12345,
Data: []byte("hello world"),
MimeType: "text/plain;charset=utf-8",
Preview: "hello world",
Size: 11,
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Equal(t, original.Data, decoded.Data)
assert.Equal(t, original.MimeType, decoded.MimeType)
assert.Equal(t, original.Preview, decoded.Preview)
assert.Equal(t, original.Size, decoded.Size)
assert.Equal(t, original.Timestamp.Unix(), decoded.Timestamp.Unix())
assert.Equal(t, original.IsImage, decoded.IsImage)
}
func TestEncodeDecodeEntry_Image(t *testing.T) {
original := Entry{
ID: 99999,
Data: []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A},
MimeType: "image/png",
Preview: "[[ image 8 B png 100x100 ]]",
Size: 8,
Timestamp: time.Now().Truncate(time.Second),
IsImage: true,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Equal(t, original.Data, decoded.Data)
assert.True(t, decoded.IsImage)
assert.Equal(t, original.Preview, decoded.Preview)
}
func TestEncodeDecodeEntry_EmptyData(t *testing.T) {
original := Entry{
ID: 1,
Data: []byte{},
MimeType: "text/plain",
Preview: "",
Size: 0,
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.ID, decoded.ID)
assert.Empty(t, decoded.Data)
}
func TestEncodeDecodeEntry_LargeData(t *testing.T) {
largeData := make([]byte, 100000)
for i := range largeData {
largeData[i] = byte(i % 256)
}
original := Entry{
ID: 777,
Data: largeData,
MimeType: "application/octet-stream",
Preview: "binary data...",
Size: len(largeData),
Timestamp: time.Now().Truncate(time.Second),
IsImage: false,
}
encoded, err := encodeEntry(original)
assert.NoError(t, err)
decoded, err := decodeEntry(encoded)
assert.NoError(t, err)
assert.Equal(t, original.Data, decoded.Data)
assert.Equal(t, original.Size, decoded.Size)
}
func TestStateEqual_BothNil(t *testing.T) {
assert.False(t, stateEqual(nil, nil))
}
func TestStateEqual_OneNil(t *testing.T) {
s := &State{Enabled: true}
assert.False(t, stateEqual(s, nil))
assert.False(t, stateEqual(nil, s))
}
func TestStateEqual_EnabledDiffers(t *testing.T) {
a := &State{Enabled: true, History: []Entry{}}
b := &State{Enabled: false, History: []Entry{}}
assert.False(t, stateEqual(a, b))
}
func TestStateEqual_HistoryLengthDiffers(t *testing.T) {
a := &State{Enabled: true, History: []Entry{{ID: 1}}}
b := &State{Enabled: true, History: []Entry{}}
assert.False(t, stateEqual(a, b))
}
func TestStateEqual_BothEqual(t *testing.T) {
a := &State{Enabled: true, History: []Entry{{ID: 1}, {ID: 2}}}
b := &State{Enabled: true, History: []Entry{{ID: 3}, {ID: 4}}}
assert.True(t, stateEqual(a, b))
}
func TestManager_ConcurrentSubscriberAccess(t *testing.T) {
m := &Manager{
subscribers: make(map[string]chan State),
dirty: make(chan struct{}, 1),
}
var wg sync.WaitGroup
const goroutines = 20
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
subID := string(rune('a' + id))
ch := m.Subscribe(subID)
assert.NotNil(t, ch)
time.Sleep(time.Millisecond)
m.Unsubscribe(subID)
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentGetState(t *testing.T) {
m := &Manager{
state: &State{
Enabled: true,
History: []Entry{{ID: 1}, {ID: 2}},
},
}
var wg sync.WaitGroup
const goroutines = 50
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
s := m.GetState()
_ = s.Enabled
_ = len(s.History)
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.stateMutex.Lock()
m.state = &State{
Enabled: j%2 == 0,
History: []Entry{{ID: uint64(j)}},
}
m.stateMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentConfigAccess(t *testing.T) {
m := &Manager{
config: DefaultConfig(),
}
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
cfg := m.getConfig()
_ = cfg.MaxHistory
_ = cfg.MaxEntrySize
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.configMutex.Lock()
m.config.MaxHistory = 50 + j
m.config.MaxEntrySize = int64(1024 * j)
m.configMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_NotifySubscribersNonBlocking(t *testing.T) {
m := &Manager{
dirty: make(chan struct{}, 1),
}
for i := 0; i < 10; i++ {
m.notifySubscribers()
}
assert.Len(t, m.dirty, 1)
}
func TestManager_ConcurrentOfferAccess(t *testing.T) {
m := &Manager{
offerMimeTypes: make(map[any][]string),
offerRegistry: make(map[uint32]any),
}
var wg sync.WaitGroup
const goroutines = 20
const iterations = 50
for i := 0; i < goroutines; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
key := uint32(id)
for j := 0; j < iterations; j++ {
m.offerMutex.Lock()
m.offerRegistry[key] = struct{}{}
m.offerMimeTypes[key] = []string{"text/plain"}
m.offerMutex.Unlock()
m.offerMutex.RLock()
_ = m.offerRegistry[key]
_ = m.offerMimeTypes[key]
m.offerMutex.RUnlock()
m.offerMutex.Lock()
delete(m.offerRegistry, key)
delete(m.offerMimeTypes, key)
m.offerMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentPersistAccess(t *testing.T) {
m := &Manager{
persistData: make(map[string][]byte),
persistMimeTypes: []string{},
}
var wg sync.WaitGroup
const goroutines = 20
const iterations = 50
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.persistMutex.RLock()
_ = m.persistData
_ = m.persistMimeTypes
m.persistMutex.RUnlock()
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.persistMutex.Lock()
m.persistMimeTypes = []string{"text/plain", "text/html"}
m.persistData = map[string][]byte{
"text/plain": []byte("test"),
}
m.persistMutex.Unlock()
}
}(i)
}
wg.Wait()
}
func TestManager_ConcurrentOwnerAccess(t *testing.T) {
m := &Manager{}
var wg sync.WaitGroup
const goroutines = 30
const iterations = 100
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.ownerLock.Lock()
_ = m.isOwner
m.ownerLock.Unlock()
}
}()
}
for i := 0; i < goroutines/2; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < iterations; j++ {
m.ownerLock.Lock()
m.isOwner = j%2 == 0
m.ownerLock.Unlock()
}
}()
}
wg.Wait()
}
func TestItob(t *testing.T) {
tests := []struct {
input uint64
expected []byte
}{
{0, []byte{0, 0, 0, 0, 0, 0, 0, 0}},
{1, []byte{0, 0, 0, 0, 0, 0, 0, 1}},
{256, []byte{0, 0, 0, 0, 0, 0, 1, 0}},
{0xFFFFFFFFFFFFFFFF, []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}},
}
for _, tt := range tests {
result := itob(tt.input)
assert.Equal(t, tt.expected, result)
}
}
func TestSizeStr(t *testing.T) {
tests := []struct {
input int
expected string
}{
{0, "0 B"},
{100, "100 B"},
{1024, "1 KiB"},
{2048, "2 KiB"},
{1048576, "1 MiB"},
{5242880, "5 MiB"},
}
for _, tt := range tests {
result := sizeStr(tt.input)
assert.Equal(t, tt.expected, result)
}
}
func TestSelectMimeType(t *testing.T) {
m := &Manager{}
tests := []struct {
mimes []string
expected string
}{
{[]string{"text/plain;charset=utf-8", "text/html"}, "text/plain;charset=utf-8"},
{[]string{"text/html", "text/plain"}, "text/plain"},
{[]string{"text/html", "image/png"}, "image/png"},
{[]string{"image/png", "image/jpeg"}, "image/png"},
{[]string{"image/png"}, "image/png"},
{[]string{"application/octet-stream"}, "application/octet-stream"},
{[]string{}, ""},
}
for _, tt := range tests {
result := m.selectMimeType(tt.mimes)
assert.Equal(t, tt.expected, result)
}
}
func TestIsImageMimeType(t *testing.T) {
m := &Manager{}
assert.True(t, m.isImageMimeType("image/png"))
assert.True(t, m.isImageMimeType("image/jpeg"))
assert.True(t, m.isImageMimeType("image/gif"))
assert.False(t, m.isImageMimeType("text/plain"))
assert.False(t, m.isImageMimeType("application/json"))
}
func TestTextPreview(t *testing.T) {
m := &Manager{}
short := m.textPreview([]byte("hello world"))
assert.Equal(t, "hello world", short)
withWhitespace := m.textPreview([]byte(" hello world "))
assert.Equal(t, "hello world", withWhitespace)
longText := make([]byte, 200)
for i := range longText {
longText[i] = 'a'
}
preview := m.textPreview(longText)
assert.True(t, len(preview) > 100)
assert.Contains(t, preview, "…")
}
func TestDefaultConfig(t *testing.T) {
cfg := DefaultConfig()
assert.Equal(t, 100, cfg.MaxHistory)
assert.Equal(t, int64(5*1024*1024), cfg.MaxEntrySize)
assert.Equal(t, 0, cfg.AutoClearDays)
assert.False(t, cfg.ClearAtStartup)
assert.False(t, cfg.Disabled)
}
func TestManager_PostDelegatesToWlContext(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var called atomic.Bool
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
called.Store(true)
fn()
}).Once()
m := &Manager{
wlCtx: mockCtx,
}
executed := false
m.post(func() {
executed = true
})
assert.True(t, called.Load())
assert.True(t, executed)
}
func TestManager_PostExecutesFunctionViaContext(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var capturedFn func()
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
capturedFn = fn
}).Times(3)
m := &Manager{
wlCtx: mockCtx,
}
counter := 0
m.post(func() { counter++ })
m.post(func() { counter += 10 })
m.post(func() { counter += 100 })
assert.NotNil(t, capturedFn)
capturedFn()
assert.Equal(t, 100, counter)
}
func TestManager_ConcurrentPostWithMock(t *testing.T) {
mockCtx := mocks_wlcontext.NewMockWaylandContext(t)
var postCount atomic.Int32
mockCtx.EXPECT().Post(mock.AnythingOfType("func()")).Run(func(fn func()) {
postCount.Add(1)
}).Times(100)
m := &Manager{
wlCtx: mockCtx,
}
var wg sync.WaitGroup
for i := 0; i < 10; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < 10; j++ {
m.post(func() {})
}
}()
}
wg.Wait()
assert.Equal(t, int32(100), postCount.Load())
}

Some files were not shown because too many files have changed in this diff Show More