mirror of
https://github.com/AvengeMedia/DankMaterialShell.git
synced 2025-12-05 21:15:38 -05:00
Compare commits
92 Commits
v0.6.1
...
0864179085
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0864179085 | ||
|
|
8de77f283d | ||
|
|
004a014000 | ||
|
|
80f6eb94aa | ||
|
|
4035c9cc5f | ||
|
|
3a365f6807 | ||
|
|
9920a0a59f | ||
|
|
c17bb9e171 | ||
|
|
03073f6875 | ||
|
|
609caf6e5f | ||
|
|
411141ff88 | ||
|
|
3e472e18bd | ||
|
|
e5b6fbd12a | ||
|
|
c2787f1282 | ||
|
|
df940124b1 | ||
|
|
5288d042ca | ||
|
|
fa98a27c90 | ||
|
|
d341a5a60b | ||
|
|
7f15227de1 | ||
|
|
bb45240665 | ||
|
|
29f84aeab5 | ||
|
|
5a52edcad8 | ||
|
|
b078e23aa1 | ||
|
|
7fa87125b5 | ||
|
|
f618df46d8 | ||
|
|
ee03853901 | ||
|
|
6c4a9bcfb8 | ||
|
|
1bec20ecef | ||
|
|
08c9bf570d | ||
|
|
5e77a10a81 | ||
|
|
3bc6461e2a | ||
|
|
d3194e15e2 | ||
|
|
2db79ef202 | ||
|
|
b3c07edef6 | ||
|
|
b773fdca34 | ||
|
|
2e9f9f7b7e | ||
|
|
30cbfe729d | ||
|
|
b036da2446 | ||
|
|
c8a9fb1674 | ||
|
|
43bea80cad | ||
|
|
23538c0323 | ||
|
|
2ae911230d | ||
|
|
5ce1cb87ea | ||
|
|
2a37028b6a | ||
|
|
8130feb2a0 | ||
|
|
c49a875ec2 | ||
|
|
2a002304b9 | ||
|
|
d9522818ae | ||
|
|
800588e121 | ||
|
|
991c31ebdb | ||
|
|
48f77e1691 | ||
|
|
42de6fd074 | ||
|
|
62845b470c | ||
|
|
fd20986cf8 | ||
|
|
61369cde9e | ||
|
|
644384ce8b | ||
|
|
97c11a2482 | ||
|
|
1e7e1c2d78 | ||
|
|
1c7201fb04 | ||
|
|
61ec0c697a | ||
|
|
4b5fce1bfc | ||
|
|
6cc6e7c8e9 | ||
|
|
89298fce30 | ||
|
|
a3a27e07fa | ||
|
|
4f32376f22 | ||
|
|
58bf189941 | ||
|
|
bcfa508da5 | ||
|
|
c0ae3ef58b | ||
|
|
1e70d7b4c3 | ||
|
|
f8dc6ad2bc | ||
|
|
e22482988f | ||
|
|
4eb896629d | ||
|
|
b310e66275 | ||
|
|
b39da1bea7 | ||
|
|
fa575d0574 | ||
|
|
dfe2f3771b | ||
|
|
46caeb0445 | ||
|
|
59cc9c7006 | ||
|
|
12e91534eb | ||
|
|
d9da88ceb5 | ||
|
|
2dbfec0307 | ||
|
|
09cf8c9641 | ||
|
|
f1bed4d6a3 | ||
|
|
2ed6c33c83 | ||
|
|
7ad532ed17 | ||
|
|
92fe8c5b14 | ||
|
|
8e95572589 | ||
|
|
62da862a66 | ||
|
|
993e34f548 | ||
|
|
e39465aece | ||
|
|
8fd616b680 | ||
|
|
cc054b27de |
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
@@ -386,6 +386,68 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
trigger-obs-update:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
- name: Update OBS packages
|
||||
run: |
|
||||
VERSION="${{ github.ref_name }}"
|
||||
cd distro
|
||||
bash scripts/obs-upload.sh dms "Update to $VERSION"
|
||||
|
||||
trigger-ppa-update:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dput \
|
||||
lftp \
|
||||
build-essential \
|
||||
fakeroot \
|
||||
dpkg-dev
|
||||
|
||||
- name: Configure GPG
|
||||
env:
|
||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
run: |
|
||||
echo "$GPG_KEY" | gpg --import
|
||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Upload to PPA
|
||||
run: |
|
||||
VERSION="${{ github.ref_name }}"
|
||||
cd distro/ubuntu/ppa
|
||||
bash create-and-upload.sh ../dms dms questing
|
||||
|
||||
copr-build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: DMS Copr Stable Release (Manual)
|
||||
name: DMS Copr Stable Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
238
.github/workflows/run-obs.yml
vendored
Normal file
238
.github/workflows/run-obs.yml
vendored
Normal file
@@ -0,0 +1,238 @@
|
||||
name: Update OBS Packages
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: 'Package to update (dms, dms-git, or all)'
|
||||
required: false
|
||||
default: 'all'
|
||||
rebuild_release:
|
||||
description: 'Release number for rebuilds (e.g., 2, 3, 4 to increment spec Release)'
|
||||
required: false
|
||||
default: ''
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
schedule:
|
||||
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
|
||||
|
||||
jobs:
|
||||
check-updates:
|
||||
name: Check for updates
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
has_updates: ${{ steps.check.outputs.has_updates }}
|
||||
packages: ${{ steps.check.outputs.packages }}
|
||||
version: ${{ steps.check.outputs.version }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
- name: Check for updates
|
||||
id: check
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
||||
VERSION="${GITHUB_REF#refs/tags/}"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by tag: $VERSION (always update)"
|
||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
echo "Checking if dms-git source has changed..."
|
||||
|
||||
# Get latest commit hash from master branch
|
||||
LATEST_COMMIT=$(git rev-parse origin/master 2>/dev/null || git rev-parse master 2>/dev/null || echo "")
|
||||
|
||||
if [[ -z "$LATEST_COMMIT" ]]; then
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "Could not determine git commit, proceeding with update"
|
||||
else
|
||||
# Check OBS for last uploaded commit
|
||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
||||
mkdir -p "$OBS_BASE"
|
||||
OBS_PROJECT="home:AvengeMedia:dms-git"
|
||||
|
||||
if [[ -d "$OBS_BASE/$OBS_PROJECT/dms-git" ]]; then
|
||||
cd "$OBS_BASE/$OBS_PROJECT/dms-git"
|
||||
osc up -q 2>/dev/null || true
|
||||
|
||||
# Check tarball age - if older than 3 hours, update needed
|
||||
if [[ -f "dms-git-source.tar.gz" ]]; then
|
||||
TARBALL_MTIME=$(stat -c%Y "dms-git-source.tar.gz" 2>/dev/null || echo "0")
|
||||
CURRENT_TIME=$(date +%s)
|
||||
AGE_SECONDS=$((CURRENT_TIME - TARBALL_MTIME))
|
||||
AGE_HOURS=$((AGE_SECONDS / 3600))
|
||||
|
||||
# If tarball is older than 3 hours, check for new commits
|
||||
if [[ $AGE_HOURS -ge 3 ]]; then
|
||||
# Check if there are new commits in the last 3 hours
|
||||
cd "${{ github.workspace }}"
|
||||
NEW_COMMITS=$(git log --since="3 hours ago" --oneline origin/master 2>/dev/null | wc -l)
|
||||
|
||||
if [[ $NEW_COMMITS -gt 0 ]]; then
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "📋 New commits detected in last 3 hours, update needed"
|
||||
else
|
||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
||||
echo "📋 No new commits in last 3 hours, skipping update"
|
||||
fi
|
||||
else
|
||||
echo "has_updates=false" >> $GITHUB_OUTPUT
|
||||
echo "📋 Recent upload exists (< 3 hours), skipping update"
|
||||
fi
|
||||
else
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "📋 No existing tarball in OBS, update needed"
|
||||
fi
|
||||
cd "${{ github.workspace }}"
|
||||
else
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "📋 First upload to OBS, update needed"
|
||||
fi
|
||||
fi
|
||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
||||
else
|
||||
echo "packages=all" >> $GITHUB_OUTPUT
|
||||
echo "has_updates=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
update-obs:
|
||||
name: Upload to OBS
|
||||
needs: check-updates
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
needs.check-updates.outputs.has_updates == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine packages to update
|
||||
id: packages
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" =~ ^refs/tags/ ]]; then
|
||||
echo "packages=dms" >> $GITHUB_OUTPUT
|
||||
VERSION="${GITHUB_REF#refs/tags/}"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by tag: $VERSION"
|
||||
elif [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by schedule: updating git package"
|
||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
||||
else
|
||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Update dms-git spec version
|
||||
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
|
||||
run: |
|
||||
# Get commit info for dms-git versioning
|
||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
|
||||
|
||||
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
|
||||
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
|
||||
|
||||
# Update version in spec
|
||||
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
|
||||
|
||||
# Add changelog entry
|
||||
DATE_STR=$(date "+%a %b %d %Y")
|
||||
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
|
||||
sed -i "/%changelog/a\\$CHANGELOG_ENTRY" distro/opensuse/dms-git.spec
|
||||
|
||||
- name: Update dms stable version
|
||||
if: steps.packages.outputs.version != ''
|
||||
run: |
|
||||
VERSION="${{ steps.packages.outputs.version }}"
|
||||
VERSION_NO_V="${VERSION#v}"
|
||||
echo "Updating packaging to version $VERSION_NO_V"
|
||||
|
||||
# Update openSUSE dms spec (stable only)
|
||||
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
|
||||
|
||||
# Update Debian _service files
|
||||
for service in distro/debian/*/_service; do
|
||||
if [[ -f "$service" ]]; then
|
||||
sed -i "s|<param name=\"revision\">v[0-9.]*</param>|<param name=\"revision\">$VERSION</param>|" "$service"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
- name: Upload to OBS
|
||||
env:
|
||||
FORCE_REBUILD: ${{ github.event_name == 'workflow_dispatch' && 'true' || '' }}
|
||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
MESSAGE="Automated update from GitHub Actions"
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
|
||||
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
|
||||
else
|
||||
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
if [[ "${{ needs.check-updates.outputs.has_updates }}" == "false" ]]; then
|
||||
echo "- **Status**: Skipped (no changes detected)" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo "- **Project**: https://build.opensuse.org/project/show/home:AvengeMedia" >> $GITHUB_STEP_SUMMARY
|
||||
108
.github/workflows/run-ppa.yml
vendored
Normal file
108
.github/workflows/run-ppa.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
name: Update PPA Packages
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
description: 'Package to upload (dms, dms-git, or all)'
|
||||
required: false
|
||||
default: 'dms-git'
|
||||
rebuild_release:
|
||||
description: 'Release number for rebuilds (e.g., 2, 3, 4 for ppa2, ppa3, ppa4)'
|
||||
required: false
|
||||
default: ''
|
||||
schedule:
|
||||
- cron: '0 */3 * * *' # Every 3 hours for dms-git builds
|
||||
|
||||
jobs:
|
||||
upload-ppa:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dput \
|
||||
lftp \
|
||||
build-essential \
|
||||
fakeroot \
|
||||
dpkg-dev
|
||||
|
||||
- name: Configure GPG
|
||||
env:
|
||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
run: |
|
||||
echo "$GPG_KEY" | gpg --import
|
||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Determine packages to upload
|
||||
id: packages
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "schedule" ]]; then
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
echo "Triggered by schedule: uploading git package"
|
||||
elif [[ -n "${{ github.event.inputs.package }}" ]]; then
|
||||
echo "packages=${{ github.event.inputs.package }}" >> $GITHUB_OUTPUT
|
||||
echo "Manual trigger: ${{ github.event.inputs.package }}"
|
||||
else
|
||||
echo "packages=dms-git" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload to PPA
|
||||
env:
|
||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms to PPA..."
|
||||
if [ -n "$REBUILD_RELEASE" ]; then
|
||||
echo "🔄 Using rebuild release number: ppa$REBUILD_RELEASE"
|
||||
fi
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms-git to PPA..."
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing
|
||||
else
|
||||
PPA_NAME="$PACKAGES"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading $PACKAGES to PPA..."
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Packages**: ${{ steps.packages.outputs.packages }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
echo "- **PPA dms**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **PPA dms-git**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ "$PACKAGES" == "dms" ]]; then
|
||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ "$PACKAGES" == "dms-git" ]]; then
|
||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/dms-git/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Builds will appear once Launchpad processes the uploads." >> $GITHUB_STEP_SUMMARY
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -136,3 +136,9 @@ go.work.sum
|
||||
# .vscode/
|
||||
|
||||
bin/
|
||||
|
||||
# Extracted source trees in Ubuntu package directories
|
||||
distro/ubuntu/*/dms-git-repo/
|
||||
distro/ubuntu/*/DankMaterialShell-*/
|
||||
distro/ubuntu/danklinux/*/dsearch-*/
|
||||
distro/ubuntu/danklinux/*/dgop-*/
|
||||
|
||||
@@ -2,28 +2,42 @@
|
||||
|
||||
Contributions are welcome and encouraged.
|
||||
|
||||
## Formatting
|
||||
To contribute fork this repository, make your changes, and open a pull request.
|
||||
|
||||
The preferred tool for formatting files is [qmlfmt](https://github.com/jesperhh/qmlfmt) (also available on aur as qmlfmt-git). It actually kinda sucks, but `qmlformat` doesn't work with null safe operators and ternarys and pragma statements and a bunch of other things that are supported.
|
||||
## VSCode Setup
|
||||
|
||||
We need some consistent style, so this at least gives the same formatter that Qt Creator uses.
|
||||
This is a monorepo, the easiest thing to do is to open an editor in either `quickshell`, `core`, or both depending on which part of the project you are working on.
|
||||
|
||||
You can configure it to format on save in vscode by configuring the "custom local formatters" extension then adding this to settings json.
|
||||
### QML (`quickshell` directory)
|
||||
|
||||
1. Install the [QML Extension](https://doc.qt.io/vscodeext/)
|
||||
2. Configure `ctrl+shift+p` -> user preferences (json) with qmlls path
|
||||
|
||||
```json
|
||||
"customLocalFormatters.formatters": [
|
||||
{
|
||||
"command": "sh -c \"qmlfmt -t 4 -i 4 -b 250 | sed 's/pragma ComponentBehavior$/pragma ComponentBehavior: Bound/g'\"",
|
||||
"languages": ["qml"]
|
||||
}
|
||||
],
|
||||
"[qml]": {
|
||||
"editor.defaultFormatter": "jkillian.custom-local-formatters",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
{
|
||||
"qt-qml.doNotAskForQmllsDownload": true,
|
||||
"qt-qml.qmlls.customExePath": "/usr/lib/qt6/bin/qmlls"
|
||||
}
|
||||
```
|
||||
|
||||
Sometimes it just breaks code though. Like turning `"_\""` into `"_""`, so you may not want to do formatOnSave.
|
||||
3. Create empty `.qmlls.ini` file in `quickshell/` directory
|
||||
|
||||
```bash
|
||||
cd quickshell
|
||||
touch .qmlls.ini
|
||||
```
|
||||
|
||||
4. Restart dms to generate the `.qmlls.ini` file
|
||||
|
||||
5. Make your changes, test, and open a pull request.
|
||||
|
||||
### GO (`core` directory)
|
||||
|
||||
1. Install the [Go Extension](https://code.visualstudio.com/docs/languages/go)
|
||||
2. Ensure code is formatted with `make fmt`
|
||||
3. Add appropriate test coverage and ensure tests pass with `make test`
|
||||
4. Run `go mod tidy`
|
||||
5. Open pull request
|
||||
|
||||
## Pull request
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
[](https://github.com/AvengeMedia/DankMaterialShell/releases)
|
||||
[](https://aur.archlinux.org/packages/dms-shell-bin)
|
||||
[)](https://aur.archlinux.org/packages/dms-shell-git)
|
||||
[](https://ko-fi.com/avengemediallc)
|
||||
[](https://ko-fi.com/danklinux)
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ packages:
|
||||
outpkg: mocks_brightness
|
||||
interfaces:
|
||||
DBusConn:
|
||||
github.com/AvengeMedia/danklinux/internal/server/network:
|
||||
github.com/AvengeMedia/DankMaterialShell/core/internal/server/network:
|
||||
config:
|
||||
dir: "internal/mocks/network"
|
||||
outpkg: mocks_network
|
||||
|
||||
@@ -57,6 +57,11 @@ func getRuntimeDir() string {
|
||||
return os.TempDir()
|
||||
}
|
||||
|
||||
func hasSystemdRun() bool {
|
||||
_, err := exec.LookPath("systemd-run")
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func getPIDFilePath() string {
|
||||
return filepath.Join(getRuntimeDir(), fmt.Sprintf("danklinux-%d.pid", os.Getpid()))
|
||||
}
|
||||
@@ -165,6 +170,10 @@ func runShellInteractive(session bool) {
|
||||
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
|
||||
}
|
||||
|
||||
if isSessionManaged && hasSystemdRun() {
|
||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||
}
|
||||
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err == nil && os.Getenv("DMS_DISABLE_HOT_RELOAD") == "" {
|
||||
if !strings.HasPrefix(configPath, homeDir) {
|
||||
@@ -387,6 +396,10 @@ func runShellDaemon(session bool) {
|
||||
cmd.Env = append(cmd.Env, "QT_LOGGING_RULES="+qtRules)
|
||||
}
|
||||
|
||||
if isSessionManaged && hasSystemdRun() {
|
||||
cmd.Env = append(cmd.Env, "DMS_DEFAULT_LAUNCH_PREFIX=systemd-run --user --scope")
|
||||
}
|
||||
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err == nil && os.Getenv("DMS_DISABLE_HOT_RELOAD") == "" {
|
||||
if !strings.HasPrefix(configPath, homeDir) {
|
||||
|
||||
@@ -125,6 +125,8 @@ windowrulev2 = noborder, class:^(kitty)$
|
||||
windowrulev2 = float, class:^(firefox)$, title:^(Picture-in-Picture)$
|
||||
windowrulev2 = float, class:^(zoom)$
|
||||
|
||||
# DMS windows floating by default
|
||||
windowrulev2 = float, class:^(org.quickshell)$
|
||||
windowrulev2 = opacity 0.9 0.9, floating:0, focus:0
|
||||
|
||||
layerrule = noanim, ^(quickshell)$
|
||||
|
||||
@@ -218,6 +218,11 @@ window-rule {
|
||||
geometry-corner-radius 12
|
||||
clip-to-geometry true
|
||||
}
|
||||
// Open dms windows as floating by default
|
||||
window-rule {
|
||||
match app-id=r#"org.quickshell$"#
|
||||
open-floating true
|
||||
}
|
||||
binds {
|
||||
// === System & Overview ===
|
||||
Mod+D { spawn "niri" "msg" "action" "toggle-overview"; }
|
||||
|
||||
@@ -19,10 +19,12 @@ func init() {
|
||||
Register("fedora-asahi-remix", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
|
||||
Register("bluefin", "#0B57A4", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
Register("ultramarine", "#00078b", FamilyFedora, func(config DistroConfig, logChan chan<- string) Distribution {
|
||||
return NewFedoraDistribution(config, logChan)
|
||||
})
|
||||
}
|
||||
|
||||
type FedoraDistribution struct {
|
||||
@@ -506,6 +508,14 @@ func (f *FedoraDistribution) installDNFPackages(ctx context.Context, packages []
|
||||
f.log(fmt.Sprintf("Installing DNF packages: %s", strings.Join(packages, ", ")))
|
||||
|
||||
args := []string{"dnf", "install", "-y"}
|
||||
|
||||
for _, pkg := range packages {
|
||||
if pkg == "niri" || pkg == "niri-git" {
|
||||
args = append(args, "--setopt=install_weak_deps=False")
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
args = append(args, packages...)
|
||||
|
||||
progressChan <- InstallProgressMsg{
|
||||
|
||||
@@ -509,6 +509,52 @@ func (_c *MockBackend_DisconnectWiFi_Call) RunAndReturn(run func() error) *MockB
|
||||
return _c
|
||||
}
|
||||
|
||||
// DisconnectWiFiDevice provides a mock function with given fields: device
|
||||
func (_m *MockBackend) DisconnectWiFiDevice(device string) error {
|
||||
ret := _m.Called(device)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DisconnectWiFiDevice")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(string) error); ok {
|
||||
r0 = rf(device)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// MockBackend_DisconnectWiFiDevice_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DisconnectWiFiDevice'
|
||||
type MockBackend_DisconnectWiFiDevice_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// DisconnectWiFiDevice is a helper method to define mock.On call
|
||||
// - device string
|
||||
func (_e *MockBackend_Expecter) DisconnectWiFiDevice(device interface{}) *MockBackend_DisconnectWiFiDevice_Call {
|
||||
return &MockBackend_DisconnectWiFiDevice_Call{Call: _e.mock.On("DisconnectWiFiDevice", device)}
|
||||
}
|
||||
|
||||
func (_c *MockBackend_DisconnectWiFiDevice_Call) Run(run func(device string)) *MockBackend_DisconnectWiFiDevice_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(string))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_DisconnectWiFiDevice_Call) Return(_a0 error) *MockBackend_DisconnectWiFiDevice_Call {
|
||||
_c.Call.Return(_a0)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_DisconnectWiFiDevice_Call) RunAndReturn(run func(string) error) *MockBackend_DisconnectWiFiDevice_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// ForgetWiFiNetwork provides a mock function with given fields: ssid
|
||||
func (_m *MockBackend) ForgetWiFiNetwork(ssid string) error {
|
||||
ret := _m.Called(ssid)
|
||||
@@ -659,6 +705,53 @@ func (_c *MockBackend_GetPromptBroker_Call) RunAndReturn(run func() network.Prom
|
||||
return _c
|
||||
}
|
||||
|
||||
// GetWiFiDevices provides a mock function with no fields
|
||||
func (_m *MockBackend) GetWiFiDevices() []network.WiFiDevice {
|
||||
ret := _m.Called()
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetWiFiDevices")
|
||||
}
|
||||
|
||||
var r0 []network.WiFiDevice
|
||||
if rf, ok := ret.Get(0).(func() []network.WiFiDevice); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]network.WiFiDevice)
|
||||
}
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// MockBackend_GetWiFiDevices_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetWiFiDevices'
|
||||
type MockBackend_GetWiFiDevices_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// GetWiFiDevices is a helper method to define mock.On call
|
||||
func (_e *MockBackend_Expecter) GetWiFiDevices() *MockBackend_GetWiFiDevices_Call {
|
||||
return &MockBackend_GetWiFiDevices_Call{Call: _e.mock.On("GetWiFiDevices")}
|
||||
}
|
||||
|
||||
func (_c *MockBackend_GetWiFiDevices_Call) Run(run func()) *MockBackend_GetWiFiDevices_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run()
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_GetWiFiDevices_Call) Return(_a0 []network.WiFiDevice) *MockBackend_GetWiFiDevices_Call {
|
||||
_c.Call.Return(_a0)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_GetWiFiDevices_Call) RunAndReturn(run func() []network.WiFiDevice) *MockBackend_GetWiFiDevices_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// GetWiFiEnabled provides a mock function with no fields
|
||||
func (_m *MockBackend) GetWiFiEnabled() (bool, error) {
|
||||
ret := _m.Called()
|
||||
@@ -1091,6 +1184,52 @@ func (_c *MockBackend_ScanWiFi_Call) RunAndReturn(run func() error) *MockBackend
|
||||
return _c
|
||||
}
|
||||
|
||||
// ScanWiFiDevice provides a mock function with given fields: device
|
||||
func (_m *MockBackend) ScanWiFiDevice(device string) error {
|
||||
ret := _m.Called(device)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for ScanWiFiDevice")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(string) error); ok {
|
||||
r0 = rf(device)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// MockBackend_ScanWiFiDevice_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ScanWiFiDevice'
|
||||
type MockBackend_ScanWiFiDevice_Call struct {
|
||||
*mock.Call
|
||||
}
|
||||
|
||||
// ScanWiFiDevice is a helper method to define mock.On call
|
||||
// - device string
|
||||
func (_e *MockBackend_Expecter) ScanWiFiDevice(device interface{}) *MockBackend_ScanWiFiDevice_Call {
|
||||
return &MockBackend_ScanWiFiDevice_Call{Call: _e.mock.On("ScanWiFiDevice", device)}
|
||||
}
|
||||
|
||||
func (_c *MockBackend_ScanWiFiDevice_Call) Run(run func(device string)) *MockBackend_ScanWiFiDevice_Call {
|
||||
_c.Call.Run(func(args mock.Arguments) {
|
||||
run(args[0].(string))
|
||||
})
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_ScanWiFiDevice_Call) Return(_a0 error) *MockBackend_ScanWiFiDevice_Call {
|
||||
_c.Call.Return(_a0)
|
||||
return _c
|
||||
}
|
||||
|
||||
func (_c *MockBackend_ScanWiFiDevice_Call) RunAndReturn(run func(string) error) *MockBackend_ScanWiFiDevice_Call {
|
||||
_c.Call.Return(run)
|
||||
return _c
|
||||
}
|
||||
|
||||
// SetPromptBroker provides a mock function with given fields: broker
|
||||
func (_m *MockBackend) SetPromptBroker(broker network.PromptBroker) error {
|
||||
ret := _m.Called(broker)
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/internal/errdefs"
|
||||
@@ -125,8 +126,9 @@ func (a *SecretAgent) GetSecrets(
|
||||
connType, displayName, vpnSvc := readConnTypeAndName(conn)
|
||||
ssid := readSSID(conn)
|
||||
fields := fieldsNeeded(settingName, hints)
|
||||
vpnPasswordFlags := readVPNPasswordFlags(conn, settingName)
|
||||
|
||||
log.Infof("[SecretAgent] connType=%s, name=%s, vpnSvc=%s, fields=%v, flags=%d", connType, displayName, vpnSvc, fields, flags)
|
||||
log.Infof("[SecretAgent] connType=%s, name=%s, vpnSvc=%s, fields=%v, flags=%d, vpnPasswordFlags=%d", connType, displayName, vpnSvc, fields, flags, vpnPasswordFlags)
|
||||
|
||||
if a.backend != nil {
|
||||
a.backend.stateMutex.RLock()
|
||||
@@ -163,57 +165,70 @@ func (a *SecretAgent) GetSecrets(
|
||||
}
|
||||
|
||||
if len(fields) == 0 {
|
||||
// For VPN connections with no hints, we can't provide a proper UI.
|
||||
// Defer to other agents (like nm-applet or VPN-specific auth dialogs)
|
||||
// that can handle the VPN type properly (e.g., OpenConnect with SAML, etc.)
|
||||
if settingName == "vpn" {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
if a.backend != nil {
|
||||
a.backend.stateMutex.RLock()
|
||||
isConnectingVPN := a.backend.state.IsConnectingVPN
|
||||
a.backend.stateMutex.RUnlock()
|
||||
|
||||
const (
|
||||
NM_SETTING_SECRET_FLAG_NONE = 0
|
||||
NM_SETTING_SECRET_FLAG_AGENT_OWNED = 1
|
||||
NM_SETTING_SECRET_FLAG_NOT_SAVED = 2
|
||||
NM_SETTING_SECRET_FLAG_NOT_REQUIRED = 4
|
||||
)
|
||||
if !isConnectingVPN {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
|
||||
var passwordFlags uint32 = 0xFFFF
|
||||
switch settingName {
|
||||
case "802-11-wireless-security":
|
||||
if wifiSecSettings, ok := conn["802-11-wireless-security"]; ok {
|
||||
if flagsVariant, ok := wifiSecSettings["psk-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
case "802-1x":
|
||||
if dot1xSettings, ok := conn["802-1x"]; ok {
|
||||
if flagsVariant, ok := dot1xSettings["password-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
log.Infof("[SecretAgent] VPN with empty hints but we're connecting - prompting for password")
|
||||
fields = []string{"password"}
|
||||
} else {
|
||||
log.Infof("[SecretAgent] VPN with empty hints - deferring to other agents for %s", vpnSvc)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
}
|
||||
}
|
||||
|
||||
if passwordFlags == 0xFFFF {
|
||||
log.Warnf("[SecretAgent] Could not determine password-flags for empty hints - returning NoSecrets error")
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_NOT_REQUIRED != 0 {
|
||||
log.Infof("[SecretAgent] Secrets not required (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_AGENT_OWNED != 0 {
|
||||
log.Warnf("[SecretAgent] Secrets are agent-owned but we don't store secrets (flags=%d) - returning NoSecrets error", passwordFlags)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else {
|
||||
log.Infof("[SecretAgent] No secrets needed, using system stored secrets (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
if len(fields) == 0 {
|
||||
const (
|
||||
NM_SETTING_SECRET_FLAG_NONE = 0
|
||||
NM_SETTING_SECRET_FLAG_AGENT_OWNED = 1
|
||||
NM_SETTING_SECRET_FLAG_NOT_SAVED = 2
|
||||
NM_SETTING_SECRET_FLAG_NOT_REQUIRED = 4
|
||||
)
|
||||
|
||||
var passwordFlags uint32 = 0xFFFF
|
||||
switch settingName {
|
||||
case "802-11-wireless-security":
|
||||
if wifiSecSettings, ok := conn["802-11-wireless-security"]; ok {
|
||||
if flagsVariant, ok := wifiSecSettings["psk-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
case "802-1x":
|
||||
if dot1xSettings, ok := conn["802-1x"]; ok {
|
||||
if flagsVariant, ok := dot1xSettings["password-flags"]; ok {
|
||||
if pwdFlags, ok := flagsVariant.Value().(uint32); ok {
|
||||
passwordFlags = pwdFlags
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if passwordFlags == 0xFFFF {
|
||||
log.Warnf("[SecretAgent] Could not determine password-flags for empty hints - returning NoSecrets error")
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_NOT_REQUIRED != 0 {
|
||||
log.Infof("[SecretAgent] Secrets not required (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
} else if passwordFlags&NM_SETTING_SECRET_FLAG_AGENT_OWNED != 0 {
|
||||
log.Warnf("[SecretAgent] Secrets are agent-owned but we don't store secrets (flags=%d) - returning NoSecrets error", passwordFlags)
|
||||
return nil, dbus.NewError("org.freedesktop.NetworkManager.SecretAgent.Error.NoSecrets", nil)
|
||||
} else {
|
||||
log.Infof("[SecretAgent] No secrets needed, using system stored secrets (flags=%d)", passwordFlags)
|
||||
out := nmSettingMap{}
|
||||
out[settingName] = nmVariantMap{}
|
||||
return out, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -343,13 +358,11 @@ func (a *SecretAgent) GetSecrets(
|
||||
// Update settings based on type
|
||||
switch settingName {
|
||||
case "vpn":
|
||||
// Set password-flags=0 and add secrets to vpn section
|
||||
vpn, ok := existingSettings["vpn"]
|
||||
if !ok {
|
||||
vpn = make(map[string]dbus.Variant)
|
||||
}
|
||||
|
||||
// Get existing data map (vpn.data is string->string)
|
||||
var data map[string]string
|
||||
if dataVariant, ok := vpn["data"]; ok {
|
||||
if dm, ok := dataVariant.Value().(map[string]string); ok {
|
||||
@@ -364,11 +377,9 @@ func (a *SecretAgent) GetSecrets(
|
||||
data = make(map[string]string)
|
||||
}
|
||||
|
||||
// Update password-flags to 0 (system-stored)
|
||||
data["password-flags"] = "0"
|
||||
vpn["data"] = dbus.MakeVariant(data)
|
||||
|
||||
// Add secrets (vpn.secrets is string->string)
|
||||
secs := make(map[string]string)
|
||||
for k, v := range reply.Secrets {
|
||||
secs[k] = v
|
||||
@@ -379,14 +390,12 @@ func (a *SecretAgent) GetSecrets(
|
||||
log.Infof("[SecretAgent] Updated VPN settings: password-flags=0, secrets with %d fields", len(secs))
|
||||
|
||||
case "802-11-wireless-security":
|
||||
// Set psk-flags=0 for WiFi
|
||||
wifiSec, ok := existingSettings["802-11-wireless-security"]
|
||||
if !ok {
|
||||
wifiSec = make(map[string]dbus.Variant)
|
||||
}
|
||||
wifiSec["psk-flags"] = dbus.MakeVariant(uint32(0))
|
||||
|
||||
// Add PSK secret
|
||||
if psk, ok := reply.Secrets["psk"]; ok {
|
||||
wifiSec["psk"] = dbus.MakeVariant(psk)
|
||||
log.Infof("[SecretAgent] Updated WiFi settings: psk-flags=0")
|
||||
@@ -394,14 +403,12 @@ func (a *SecretAgent) GetSecrets(
|
||||
settings["802-11-wireless-security"] = wifiSec
|
||||
|
||||
case "802-1x":
|
||||
// Set password-flags=0 for 802.1x
|
||||
dot1x, ok := existingSettings["802-1x"]
|
||||
if !ok {
|
||||
dot1x = make(map[string]dbus.Variant)
|
||||
}
|
||||
dot1x["password-flags"] = dbus.MakeVariant(uint32(0))
|
||||
|
||||
// Add password secret
|
||||
if password, ok := reply.Secrets["password"]; ok {
|
||||
dot1x["password"] = dbus.MakeVariant(password)
|
||||
log.Infof("[SecretAgent] Updated 802.1x settings: password-flags=0")
|
||||
@@ -507,6 +514,39 @@ func fieldsNeeded(setting string, hints []string) []string {
|
||||
}
|
||||
}
|
||||
|
||||
func readVPNPasswordFlags(conn map[string]nmVariantMap, settingName string) uint32 {
|
||||
if settingName != "vpn" {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
vpnSettings, ok := conn["vpn"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
dataVariant, ok := vpnSettings["data"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
dataMap, ok := dataVariant.Value().(map[string]string)
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
flagsStr, ok := dataMap["password-flags"]
|
||||
if !ok {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
flags64, err := strconv.ParseUint(flagsStr, 10, 32)
|
||||
if err != nil {
|
||||
return 0xFFFF
|
||||
}
|
||||
|
||||
return uint32(flags64)
|
||||
}
|
||||
|
||||
func reasonFromFlags(flags uint32) string {
|
||||
const (
|
||||
NM_SECRET_AGENT_GET_SECRETS_FLAG_NONE = 0x0
|
||||
|
||||
@@ -8,10 +8,13 @@ type Backend interface {
|
||||
SetWiFiEnabled(enabled bool) error
|
||||
|
||||
ScanWiFi() error
|
||||
ScanWiFiDevice(device string) error
|
||||
GetWiFiNetworkDetails(ssid string) (*NetworkInfoResponse, error)
|
||||
GetWiFiDevices() []WiFiDevice
|
||||
|
||||
ConnectWiFi(req ConnectionRequest) error
|
||||
DisconnectWiFi() error
|
||||
DisconnectWiFiDevice(device string) error
|
||||
ForgetWiFiNetwork(ssid string) error
|
||||
SetWiFiAutoconnect(ssid string, autoconnect bool) error
|
||||
|
||||
@@ -54,11 +57,13 @@ type BackendState struct {
|
||||
WiFiBSSID string
|
||||
WiFiSignal uint8
|
||||
WiFiNetworks []WiFiNetwork
|
||||
WiFiDevices []WiFiDevice
|
||||
WiredConnections []WiredConnection
|
||||
VPNProfiles []VPNProfile
|
||||
VPNActive []VPNActive
|
||||
IsConnecting bool
|
||||
ConnectingSSID string
|
||||
ConnectingDevice string
|
||||
IsConnectingVPN bool
|
||||
ConnectingVPNUUID string
|
||||
LastError string
|
||||
|
||||
@@ -196,3 +196,15 @@ func (b *HybridIwdNetworkdBackend) CancelCredentials(token string) error {
|
||||
func (b *HybridIwdNetworkdBackend) SetWiFiAutoconnect(ssid string, autoconnect bool) error {
|
||||
return b.wifi.SetWiFiAutoconnect(ssid, autoconnect)
|
||||
}
|
||||
|
||||
func (b *HybridIwdNetworkdBackend) ScanWiFiDevice(device string) error {
|
||||
return b.wifi.ScanWiFiDevice(device)
|
||||
}
|
||||
|
||||
func (b *HybridIwdNetworkdBackend) DisconnectWiFiDevice(device string) error {
|
||||
return b.wifi.DisconnectWiFiDevice(device)
|
||||
}
|
||||
|
||||
func (b *HybridIwdNetworkdBackend) GetWiFiDevices() []WiFiDevice {
|
||||
return b.wifi.GetWiFiDevices()
|
||||
}
|
||||
|
||||
@@ -139,9 +139,13 @@ func (b *IWDBackend) discoverDevices() error {
|
||||
}
|
||||
|
||||
func (b *IWDBackend) GetCurrentState() (*BackendState, error) {
|
||||
b.stateMutex.RLock()
|
||||
defer b.stateMutex.RUnlock()
|
||||
|
||||
state := *b.state
|
||||
state.WiFiNetworks = append([]WiFiNetwork(nil), b.state.WiFiNetworks...)
|
||||
state.WiredConnections = append([]WiredConnection(nil), b.state.WiredConnections...)
|
||||
state.WiFiDevices = b.getWiFiDevicesLocked()
|
||||
|
||||
return &state, nil
|
||||
}
|
||||
|
||||
@@ -45,3 +45,38 @@ func (b *IWDBackend) DisconnectAllVPN() error {
|
||||
func (b *IWDBackend) ClearVPNCredentials(uuidOrName string) error {
|
||||
return fmt.Errorf("VPN not supported by iwd backend")
|
||||
}
|
||||
|
||||
func (b *IWDBackend) ScanWiFiDevice(device string) error {
|
||||
return b.ScanWiFi()
|
||||
}
|
||||
|
||||
func (b *IWDBackend) DisconnectWiFiDevice(device string) error {
|
||||
return b.DisconnectWiFi()
|
||||
}
|
||||
|
||||
func (b *IWDBackend) GetWiFiDevices() []WiFiDevice {
|
||||
b.stateMutex.RLock()
|
||||
defer b.stateMutex.RUnlock()
|
||||
return b.getWiFiDevicesLocked()
|
||||
}
|
||||
|
||||
func (b *IWDBackend) getWiFiDevicesLocked() []WiFiDevice {
|
||||
if b.state.WiFiDevice == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
stateStr := "disconnected"
|
||||
if b.state.WiFiConnected {
|
||||
stateStr = "connected"
|
||||
}
|
||||
|
||||
return []WiFiDevice{{
|
||||
Name: b.state.WiFiDevice,
|
||||
State: stateStr,
|
||||
Connected: b.state.WiFiConnected,
|
||||
SSID: b.state.WiFiSSID,
|
||||
Signal: b.state.WiFiSignal,
|
||||
IP: b.state.WiFiIP,
|
||||
Networks: b.state.WiFiNetworks,
|
||||
}}
|
||||
}
|
||||
|
||||
@@ -57,3 +57,15 @@ func (b *SystemdNetworkdBackend) ClearVPNCredentials(uuidOrName string) error {
|
||||
func (b *SystemdNetworkdBackend) SetWiFiAutoconnect(ssid string, autoconnect bool) error {
|
||||
return fmt.Errorf("WiFi autoconnect not supported by networkd backend")
|
||||
}
|
||||
|
||||
func (b *SystemdNetworkdBackend) ScanWiFiDevice(device string) error {
|
||||
return fmt.Errorf("WiFi scan not supported by networkd backend")
|
||||
}
|
||||
|
||||
func (b *SystemdNetworkdBackend) DisconnectWiFiDevice(device string) error {
|
||||
return fmt.Errorf("WiFi disconnect not supported by networkd backend")
|
||||
}
|
||||
|
||||
func (b *SystemdNetworkdBackend) GetWiFiDevices() []WiFiDevice {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -30,12 +30,20 @@ const (
|
||||
NmDeviceStateReasonNewActivation = 60
|
||||
)
|
||||
|
||||
type wifiDeviceInfo struct {
|
||||
device gonetworkmanager.Device
|
||||
wireless gonetworkmanager.DeviceWireless
|
||||
name string
|
||||
hwAddress string
|
||||
}
|
||||
|
||||
type NetworkManagerBackend struct {
|
||||
nmConn interface{}
|
||||
ethernetDevice interface{}
|
||||
wifiDevice interface{}
|
||||
settings interface{}
|
||||
wifiDev interface{}
|
||||
wifiDevices map[string]*wifiDeviceInfo
|
||||
|
||||
dbusConn *dbus.Conn
|
||||
signals chan *dbus.Signal
|
||||
@@ -71,8 +79,9 @@ func NewNetworkManagerBackend(nmConn ...gonetworkmanager.NetworkManager) (*Netwo
|
||||
}
|
||||
|
||||
backend := &NetworkManagerBackend{
|
||||
nmConn: nm,
|
||||
stopChan: make(chan struct{}),
|
||||
nmConn: nm,
|
||||
stopChan: make(chan struct{}),
|
||||
wifiDevices: make(map[string]*wifiDeviceInfo),
|
||||
state: &BackendState{
|
||||
Backend: "networkmanager",
|
||||
},
|
||||
@@ -114,27 +123,48 @@ func (b *NetworkManagerBackend) Initialize() error {
|
||||
}
|
||||
|
||||
case gonetworkmanager.NmDeviceTypeWifi:
|
||||
b.wifiDevice = dev
|
||||
if w, err := gonetworkmanager.NewDeviceWireless(dev.GetPath()); err == nil {
|
||||
b.wifiDev = w
|
||||
}
|
||||
wifiEnabled, err := nm.GetPropertyWirelessEnabled()
|
||||
if err == nil {
|
||||
b.stateMutex.Lock()
|
||||
b.state.WiFiEnabled = wifiEnabled
|
||||
b.stateMutex.Unlock()
|
||||
}
|
||||
if err := b.updateWiFiState(); err != nil {
|
||||
iface, err := dev.GetPropertyInterface()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if wifiEnabled {
|
||||
if _, err := b.updateWiFiNetworks(); err != nil {
|
||||
log.Warnf("Failed to get initial networks: %v", err)
|
||||
}
|
||||
w, err := gonetworkmanager.NewDeviceWireless(dev.GetPath())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
hwAddr, _ := w.GetPropertyHwAddress()
|
||||
|
||||
b.wifiDevices[iface] = &wifiDeviceInfo{
|
||||
device: dev,
|
||||
wireless: w,
|
||||
name: iface,
|
||||
hwAddress: hwAddr,
|
||||
}
|
||||
|
||||
if b.wifiDevice == nil {
|
||||
b.wifiDevice = dev
|
||||
b.wifiDev = w
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
wifiEnabled, err := nm.GetPropertyWirelessEnabled()
|
||||
if err == nil {
|
||||
b.stateMutex.Lock()
|
||||
b.state.WiFiEnabled = wifiEnabled
|
||||
b.stateMutex.Unlock()
|
||||
}
|
||||
|
||||
if err := b.updateWiFiState(); err != nil {
|
||||
log.Warnf("Failed to update WiFi state: %v", err)
|
||||
}
|
||||
|
||||
if wifiEnabled {
|
||||
if _, err := b.updateWiFiNetworks(); err != nil {
|
||||
log.Warnf("Failed to get initial networks: %v", err)
|
||||
}
|
||||
b.updateAllWiFiDevices()
|
||||
}
|
||||
|
||||
if err := b.updatePrimaryConnection(); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -165,6 +195,7 @@ func (b *NetworkManagerBackend) GetCurrentState() (*BackendState, error) {
|
||||
|
||||
state := *b.state
|
||||
state.WiFiNetworks = append([]WiFiNetwork(nil), b.state.WiFiNetworks...)
|
||||
state.WiFiDevices = append([]WiFiDevice(nil), b.state.WiFiDevices...)
|
||||
state.WiredConnections = append([]WiredConnection(nil), b.state.WiredConnections...)
|
||||
state.VPNProfiles = append([]VPNProfile(nil), b.state.VPNProfiles...)
|
||||
state.VPNActive = append([]VPNActive(nil), b.state.VPNActive...)
|
||||
|
||||
@@ -235,7 +235,7 @@ func (b *NetworkManagerBackend) ConnectVPN(uuidOrName string, singleActive bool)
|
||||
}
|
||||
|
||||
nm := b.nmConn.(gonetworkmanager.NetworkManager)
|
||||
activeConn, err := nm.ActivateConnection(targetConn, nil, nil)
|
||||
_, err = nm.ActivateConnection(targetConn, nil, nil)
|
||||
if err != nil {
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnectingVPN = false
|
||||
@@ -249,20 +249,6 @@ func (b *NetworkManagerBackend) ConnectVPN(uuidOrName string, singleActive bool)
|
||||
return fmt.Errorf("failed to activate VPN: %w", err)
|
||||
}
|
||||
|
||||
if activeConn != nil {
|
||||
state, _ := activeConn.GetPropertyState()
|
||||
if state == 2 {
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnectingVPN = false
|
||||
b.state.ConnectingVPNUUID = ""
|
||||
b.stateMutex.Unlock()
|
||||
b.ListActiveVPN()
|
||||
if b.onStateChange != nil {
|
||||
b.onStateChange()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -197,21 +197,23 @@ func (b *NetworkManagerBackend) GetWiFiNetworkDetails(ssid string) (*NetworkInfo
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) ConnectWiFi(req ConnectionRequest) error {
|
||||
if b.wifiDevice == nil {
|
||||
return fmt.Errorf("no WiFi device available")
|
||||
devInfo, err := b.getWifiDeviceForConnection(req.Device)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b.stateMutex.RLock()
|
||||
alreadyConnected := b.state.WiFiConnected && b.state.WiFiSSID == req.SSID
|
||||
b.stateMutex.RUnlock()
|
||||
|
||||
if alreadyConnected && !req.Interactive {
|
||||
if alreadyConnected && !req.Interactive && req.Device == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnecting = true
|
||||
b.state.ConnectingSSID = req.SSID
|
||||
b.state.ConnectingDevice = req.Device
|
||||
b.state.LastError = ""
|
||||
b.stateMutex.Unlock()
|
||||
|
||||
@@ -223,14 +225,13 @@ func (b *NetworkManagerBackend) ConnectWiFi(req ConnectionRequest) error {
|
||||
|
||||
existingConn, err := b.findConnection(req.SSID)
|
||||
if err == nil && existingConn != nil {
|
||||
dev := b.wifiDevice.(gonetworkmanager.Device)
|
||||
|
||||
_, err := nm.ActivateConnection(existingConn, dev, nil)
|
||||
_, err := nm.ActivateConnection(existingConn, devInfo.device, nil)
|
||||
if err != nil {
|
||||
log.Warnf("[ConnectWiFi] Failed to activate existing connection: %v", err)
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnecting = false
|
||||
b.state.ConnectingSSID = ""
|
||||
b.state.ConnectingDevice = ""
|
||||
b.state.LastError = fmt.Sprintf("failed to activate connection: %v", err)
|
||||
b.stateMutex.Unlock()
|
||||
if b.onStateChange != nil {
|
||||
@@ -242,11 +243,12 @@ func (b *NetworkManagerBackend) ConnectWiFi(req ConnectionRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := b.createAndConnectWiFi(req); err != nil {
|
||||
if err := b.createAndConnectWiFiOnDevice(req, devInfo); err != nil {
|
||||
log.Warnf("[ConnectWiFi] Failed to create and connect: %v", err)
|
||||
b.stateMutex.Lock()
|
||||
b.state.IsConnecting = false
|
||||
b.state.ConnectingSSID = ""
|
||||
b.state.ConnectingDevice = ""
|
||||
b.state.LastError = err.Error()
|
||||
b.stateMutex.Unlock()
|
||||
if b.onStateChange != nil {
|
||||
@@ -502,19 +504,17 @@ func (b *NetworkManagerBackend) findConnection(ssid string) (gonetworkmanager.Co
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) createAndConnectWiFi(req ConnectionRequest) error {
|
||||
if b.wifiDevice == nil {
|
||||
return fmt.Errorf("no WiFi device available")
|
||||
}
|
||||
|
||||
nm := b.nmConn.(gonetworkmanager.NetworkManager)
|
||||
dev := b.wifiDevice.(gonetworkmanager.Device)
|
||||
|
||||
if err := b.ensureWiFiDevice(); err != nil {
|
||||
devInfo, err := b.getWifiDeviceForConnection(req.Device)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
wifiDev := b.wifiDev
|
||||
return b.createAndConnectWiFiOnDevice(req, devInfo)
|
||||
}
|
||||
|
||||
w := wifiDev.(gonetworkmanager.DeviceWireless)
|
||||
func (b *NetworkManagerBackend) createAndConnectWiFiOnDevice(req ConnectionRequest, devInfo *wifiDeviceInfo) error {
|
||||
nm := b.nmConn.(gonetworkmanager.NetworkManager)
|
||||
dev := devInfo.device
|
||||
w := devInfo.wireless
|
||||
apPaths, err := w.GetAccessPoints()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get access points: %w", err)
|
||||
@@ -716,3 +716,254 @@ func (b *NetworkManagerBackend) SetWiFiAutoconnect(ssid string, autoconnect bool
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) ScanWiFiDevice(device string) error {
|
||||
devInfo, ok := b.wifiDevices[device]
|
||||
if !ok {
|
||||
return fmt.Errorf("WiFi device not found: %s", device)
|
||||
}
|
||||
|
||||
b.stateMutex.RLock()
|
||||
enabled := b.state.WiFiEnabled
|
||||
b.stateMutex.RUnlock()
|
||||
|
||||
if !enabled {
|
||||
return fmt.Errorf("WiFi is disabled")
|
||||
}
|
||||
|
||||
if err := devInfo.wireless.RequestScan(); err != nil {
|
||||
return fmt.Errorf("scan request failed: %w", err)
|
||||
}
|
||||
|
||||
b.updateAllWiFiDevices()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) DisconnectWiFiDevice(device string) error {
|
||||
devInfo, ok := b.wifiDevices[device]
|
||||
if !ok {
|
||||
return fmt.Errorf("WiFi device not found: %s", device)
|
||||
}
|
||||
|
||||
if err := devInfo.device.Disconnect(); err != nil {
|
||||
return fmt.Errorf("failed to disconnect: %w", err)
|
||||
}
|
||||
|
||||
b.updateWiFiState()
|
||||
b.updateAllWiFiDevices()
|
||||
b.updatePrimaryConnection()
|
||||
|
||||
if b.onStateChange != nil {
|
||||
b.onStateChange()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) GetWiFiDevices() []WiFiDevice {
|
||||
b.stateMutex.RLock()
|
||||
defer b.stateMutex.RUnlock()
|
||||
return append([]WiFiDevice(nil), b.state.WiFiDevices...)
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) updateAllWiFiDevices() {
|
||||
s := b.settings
|
||||
if s == nil {
|
||||
var err error
|
||||
s, err = gonetworkmanager.NewSettings()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
b.settings = s
|
||||
}
|
||||
|
||||
settingsMgr := s.(gonetworkmanager.Settings)
|
||||
connections, err := settingsMgr.ListConnections()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
savedSSIDs := make(map[string]bool)
|
||||
autoconnectMap := make(map[string]bool)
|
||||
for _, conn := range connections {
|
||||
connSettings, err := conn.GetSettings()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
connMeta, ok := connSettings["connection"]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
connType, ok := connMeta["type"].(string)
|
||||
if !ok || connType != "802-11-wireless" {
|
||||
continue
|
||||
}
|
||||
|
||||
wifiSettings, ok := connSettings["802-11-wireless"]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
ssidBytes, ok := wifiSettings["ssid"].([]byte)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
ssid := string(ssidBytes)
|
||||
savedSSIDs[ssid] = true
|
||||
autoconnect := true
|
||||
if ac, ok := connMeta["autoconnect"].(bool); ok {
|
||||
autoconnect = ac
|
||||
}
|
||||
autoconnectMap[ssid] = autoconnect
|
||||
}
|
||||
|
||||
var devices []WiFiDevice
|
||||
|
||||
for name, devInfo := range b.wifiDevices {
|
||||
state, _ := devInfo.device.GetPropertyState()
|
||||
connected := state == gonetworkmanager.NmDeviceStateActivated
|
||||
|
||||
var ssid, bssid, ip string
|
||||
var signal uint8
|
||||
|
||||
if connected {
|
||||
if activeAP, err := devInfo.wireless.GetPropertyActiveAccessPoint(); err == nil && activeAP != nil && activeAP.GetPath() != "/" {
|
||||
ssid, _ = activeAP.GetPropertySSID()
|
||||
signal, _ = activeAP.GetPropertyStrength()
|
||||
bssid, _ = activeAP.GetPropertyHWAddress()
|
||||
}
|
||||
ip = b.getDeviceIP(devInfo.device)
|
||||
}
|
||||
|
||||
stateStr := "disconnected"
|
||||
switch state {
|
||||
case gonetworkmanager.NmDeviceStateActivated:
|
||||
stateStr = "connected"
|
||||
case gonetworkmanager.NmDeviceStateConfig, gonetworkmanager.NmDeviceStateIpConfig:
|
||||
stateStr = "connecting"
|
||||
case gonetworkmanager.NmDeviceStatePrepare:
|
||||
stateStr = "preparing"
|
||||
case gonetworkmanager.NmDeviceStateDeactivating:
|
||||
stateStr = "disconnecting"
|
||||
}
|
||||
|
||||
apPaths, err := devInfo.wireless.GetAccessPoints()
|
||||
var networks []WiFiNetwork
|
||||
if err == nil {
|
||||
seenSSIDs := make(map[string]*WiFiNetwork)
|
||||
for _, ap := range apPaths {
|
||||
apSSID, err := ap.GetPropertySSID()
|
||||
if err != nil || apSSID == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if existing, exists := seenSSIDs[apSSID]; exists {
|
||||
strength, _ := ap.GetPropertyStrength()
|
||||
if strength > existing.Signal {
|
||||
existing.Signal = strength
|
||||
freq, _ := ap.GetPropertyFrequency()
|
||||
existing.Frequency = freq
|
||||
apBSSID, _ := ap.GetPropertyHWAddress()
|
||||
existing.BSSID = apBSSID
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
strength, _ := ap.GetPropertyStrength()
|
||||
flags, _ := ap.GetPropertyFlags()
|
||||
wpaFlags, _ := ap.GetPropertyWPAFlags()
|
||||
rsnFlags, _ := ap.GetPropertyRSNFlags()
|
||||
freq, _ := ap.GetPropertyFrequency()
|
||||
maxBitrate, _ := ap.GetPropertyMaxBitrate()
|
||||
apBSSID, _ := ap.GetPropertyHWAddress()
|
||||
mode, _ := ap.GetPropertyMode()
|
||||
|
||||
secured := flags != uint32(gonetworkmanager.Nm80211APFlagsNone) ||
|
||||
wpaFlags != uint32(gonetworkmanager.Nm80211APSecNone) ||
|
||||
rsnFlags != uint32(gonetworkmanager.Nm80211APSecNone)
|
||||
|
||||
enterprise := (rsnFlags&uint32(gonetworkmanager.Nm80211APSecKeyMgmt8021X) != 0) ||
|
||||
(wpaFlags&uint32(gonetworkmanager.Nm80211APSecKeyMgmt8021X) != 0)
|
||||
|
||||
var modeStr string
|
||||
switch mode {
|
||||
case gonetworkmanager.Nm80211ModeAdhoc:
|
||||
modeStr = "adhoc"
|
||||
case gonetworkmanager.Nm80211ModeInfra:
|
||||
modeStr = "infrastructure"
|
||||
case gonetworkmanager.Nm80211ModeAp:
|
||||
modeStr = "ap"
|
||||
default:
|
||||
modeStr = "unknown"
|
||||
}
|
||||
|
||||
channel := frequencyToChannel(freq)
|
||||
|
||||
network := WiFiNetwork{
|
||||
SSID: apSSID,
|
||||
BSSID: apBSSID,
|
||||
Signal: strength,
|
||||
Secured: secured,
|
||||
Enterprise: enterprise,
|
||||
Connected: connected && apSSID == ssid,
|
||||
Saved: savedSSIDs[apSSID],
|
||||
Autoconnect: autoconnectMap[apSSID],
|
||||
Frequency: freq,
|
||||
Mode: modeStr,
|
||||
Rate: maxBitrate / 1000,
|
||||
Channel: channel,
|
||||
Device: name,
|
||||
}
|
||||
|
||||
seenSSIDs[apSSID] = &network
|
||||
networks = append(networks, network)
|
||||
}
|
||||
sortWiFiNetworks(networks)
|
||||
}
|
||||
|
||||
devices = append(devices, WiFiDevice{
|
||||
Name: name,
|
||||
HwAddress: devInfo.hwAddress,
|
||||
State: stateStr,
|
||||
Connected: connected,
|
||||
SSID: ssid,
|
||||
BSSID: bssid,
|
||||
Signal: signal,
|
||||
IP: ip,
|
||||
Networks: networks,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(devices, func(i, j int) bool {
|
||||
return devices[i].Name < devices[j].Name
|
||||
})
|
||||
|
||||
b.stateMutex.Lock()
|
||||
b.state.WiFiDevices = devices
|
||||
b.stateMutex.Unlock()
|
||||
}
|
||||
|
||||
func (b *NetworkManagerBackend) getWifiDeviceForConnection(deviceName string) (*wifiDeviceInfo, error) {
|
||||
if deviceName != "" {
|
||||
devInfo, ok := b.wifiDevices[deviceName]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("WiFi device not found: %s", deviceName)
|
||||
}
|
||||
return devInfo, nil
|
||||
}
|
||||
|
||||
if b.wifiDevice == nil {
|
||||
return nil, fmt.Errorf("no WiFi device available")
|
||||
}
|
||||
|
||||
dev := b.wifiDevice.(gonetworkmanager.Device)
|
||||
iface, _ := dev.GetPropertyInterface()
|
||||
if devInfo, ok := b.wifiDevices[iface]; ok {
|
||||
return devInfo, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("no WiFi device available")
|
||||
}
|
||||
|
||||
@@ -101,10 +101,21 @@ func TestNetworkManagerBackend_ConnectWiFi_AlreadyConnected(t *testing.T) {
|
||||
|
||||
backend.wifiDevice = mockDeviceWireless
|
||||
backend.wifiDev = mockDeviceWireless
|
||||
backend.wifiDevices = map[string]*wifiDeviceInfo{
|
||||
"wlan0": {
|
||||
device: nil,
|
||||
wireless: mockDeviceWireless,
|
||||
name: "wlan0",
|
||||
hwAddress: "00:11:22:33:44:55",
|
||||
},
|
||||
}
|
||||
|
||||
mockDeviceWireless.EXPECT().GetPropertyInterface().Return("wlan0", nil)
|
||||
|
||||
backend.stateMutex.Lock()
|
||||
backend.state.WiFiConnected = true
|
||||
backend.state.WiFiSSID = "TestNetwork"
|
||||
backend.state.WiFiDevice = "wlan0"
|
||||
backend.stateMutex.Unlock()
|
||||
|
||||
req := ConnectionRequest{SSID: "TestNetwork", Password: "password"}
|
||||
|
||||
@@ -135,7 +135,14 @@ func handleGetState(conn net.Conn, req Request, manager *Manager) {
|
||||
}
|
||||
|
||||
func handleScanWiFi(conn net.Conn, req Request, manager *Manager) {
|
||||
if err := manager.ScanWiFi(); err != nil {
|
||||
device, _ := req.Params["device"].(string)
|
||||
var err error
|
||||
if device != "" {
|
||||
err = manager.ScanWiFiDevice(device)
|
||||
} else {
|
||||
err = manager.ScanWiFi()
|
||||
}
|
||||
if err != nil {
|
||||
models.RespondError(conn, req.ID, err.Error())
|
||||
return
|
||||
}
|
||||
@@ -163,6 +170,9 @@ func handleConnectWiFi(conn net.Conn, req Request, manager *Manager) {
|
||||
if username, ok := req.Params["username"].(string); ok {
|
||||
connReq.Username = username
|
||||
}
|
||||
if device, ok := req.Params["device"].(string); ok {
|
||||
connReq.Device = device
|
||||
}
|
||||
|
||||
if interactive, ok := req.Params["interactive"].(bool); ok {
|
||||
connReq.Interactive = interactive
|
||||
@@ -170,7 +180,7 @@ func handleConnectWiFi(conn net.Conn, req Request, manager *Manager) {
|
||||
state := manager.GetState()
|
||||
alreadyConnected := state.WiFiConnected && state.WiFiSSID == ssid
|
||||
|
||||
if alreadyConnected {
|
||||
if alreadyConnected && connReq.Device == "" {
|
||||
connReq.Interactive = false
|
||||
} else {
|
||||
networkInfo, err := manager.GetNetworkInfo(ssid)
|
||||
@@ -200,7 +210,14 @@ func handleConnectWiFi(conn net.Conn, req Request, manager *Manager) {
|
||||
}
|
||||
|
||||
func handleDisconnectWiFi(conn net.Conn, req Request, manager *Manager) {
|
||||
if err := manager.DisconnectWiFi(); err != nil {
|
||||
device, _ := req.Params["device"].(string)
|
||||
var err error
|
||||
if device != "" {
|
||||
err = manager.DisconnectWiFiDevice(device)
|
||||
} else {
|
||||
err = manager.DisconnectWiFi()
|
||||
}
|
||||
if err != nil {
|
||||
models.RespondError(conn, req.ID, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@@ -117,11 +117,13 @@ func (m *Manager) syncStateFromBackend() error {
|
||||
m.state.WiFiBSSID = backendState.WiFiBSSID
|
||||
m.state.WiFiSignal = backendState.WiFiSignal
|
||||
m.state.WiFiNetworks = backendState.WiFiNetworks
|
||||
m.state.WiFiDevices = backendState.WiFiDevices
|
||||
m.state.WiredConnections = backendState.WiredConnections
|
||||
m.state.VPNProfiles = backendState.VPNProfiles
|
||||
m.state.VPNActive = backendState.VPNActive
|
||||
m.state.IsConnecting = backendState.IsConnecting
|
||||
m.state.ConnectingSSID = backendState.ConnectingSSID
|
||||
m.state.ConnectingDevice = backendState.ConnectingDevice
|
||||
m.state.LastError = backendState.LastError
|
||||
m.stateMutex.Unlock()
|
||||
|
||||
@@ -151,6 +153,7 @@ func (m *Manager) snapshotState() NetworkState {
|
||||
defer m.stateMutex.RUnlock()
|
||||
s := *m.state
|
||||
s.WiFiNetworks = append([]WiFiNetwork(nil), m.state.WiFiNetworks...)
|
||||
s.WiFiDevices = append([]WiFiDevice(nil), m.state.WiFiDevices...)
|
||||
s.WiredConnections = append([]WiredConnection(nil), m.state.WiredConnections...)
|
||||
s.VPNProfiles = append([]VPNProfile(nil), m.state.VPNProfiles...)
|
||||
s.VPNActive = append([]VPNActive(nil), m.state.VPNActive...)
|
||||
@@ -204,6 +207,9 @@ func stateChangedMeaningfully(old, new *NetworkState) bool {
|
||||
if len(old.WiFiNetworks) != len(new.WiFiNetworks) {
|
||||
return true
|
||||
}
|
||||
if len(old.WiFiDevices) != len(new.WiFiDevices) {
|
||||
return true
|
||||
}
|
||||
if len(old.WiredConnections) != len(new.WiredConnections) {
|
||||
return true
|
||||
}
|
||||
@@ -505,3 +511,19 @@ func (m *Manager) ClearVPNCredentials(uuidOrName string) error {
|
||||
func (m *Manager) SetWiFiAutoconnect(ssid string, autoconnect bool) error {
|
||||
return m.backend.SetWiFiAutoconnect(ssid, autoconnect)
|
||||
}
|
||||
|
||||
func (m *Manager) GetWiFiDevices() []WiFiDevice {
|
||||
m.stateMutex.RLock()
|
||||
defer m.stateMutex.RUnlock()
|
||||
devices := make([]WiFiDevice, len(m.state.WiFiDevices))
|
||||
copy(devices, m.state.WiFiDevices)
|
||||
return devices
|
||||
}
|
||||
|
||||
func (m *Manager) ScanWiFiDevice(device string) error {
|
||||
return m.backend.ScanWiFiDevice(device)
|
||||
}
|
||||
|
||||
func (m *Manager) DisconnectWiFiDevice(device string) error {
|
||||
return m.backend.DisconnectWiFiDevice(device)
|
||||
}
|
||||
|
||||
@@ -37,6 +37,19 @@ type WiFiNetwork struct {
|
||||
Mode string `json:"mode"`
|
||||
Rate uint32 `json:"rate"`
|
||||
Channel uint32 `json:"channel"`
|
||||
Device string `json:"device,omitempty"`
|
||||
}
|
||||
|
||||
type WiFiDevice struct {
|
||||
Name string `json:"name"`
|
||||
HwAddress string `json:"hwAddress"`
|
||||
State string `json:"state"`
|
||||
Connected bool `json:"connected"`
|
||||
SSID string `json:"ssid,omitempty"`
|
||||
BSSID string `json:"bssid,omitempty"`
|
||||
Signal uint8 `json:"signal,omitempty"`
|
||||
IP string `json:"ip,omitempty"`
|
||||
Networks []WiFiNetwork `json:"networks"`
|
||||
}
|
||||
|
||||
type VPNProfile struct {
|
||||
@@ -76,11 +89,13 @@ type NetworkState struct {
|
||||
WiFiBSSID string `json:"wifiBSSID"`
|
||||
WiFiSignal uint8 `json:"wifiSignal"`
|
||||
WiFiNetworks []WiFiNetwork `json:"wifiNetworks"`
|
||||
WiFiDevices []WiFiDevice `json:"wifiDevices"`
|
||||
WiredConnections []WiredConnection `json:"wiredConnections"`
|
||||
VPNProfiles []VPNProfile `json:"vpnProfiles"`
|
||||
VPNActive []VPNActive `json:"vpnActive"`
|
||||
IsConnecting bool `json:"isConnecting"`
|
||||
ConnectingSSID string `json:"connectingSSID"`
|
||||
ConnectingDevice string `json:"connectingDevice,omitempty"`
|
||||
LastError string `json:"lastError"`
|
||||
}
|
||||
|
||||
@@ -91,6 +106,7 @@ type ConnectionRequest struct {
|
||||
AnonymousIdentity string `json:"anonymousIdentity,omitempty"`
|
||||
DomainSuffixMatch string `json:"domainSuffixMatch,omitempty"`
|
||||
Interactive bool `json:"interactive,omitempty"`
|
||||
Device string `json:"device,omitempty"`
|
||||
}
|
||||
|
||||
type WiredConnection struct {
|
||||
|
||||
@@ -31,7 +31,7 @@ import (
|
||||
"github.com/AvengeMedia/DankMaterialShell/core/pkg/syncmap"
|
||||
)
|
||||
|
||||
const APIVersion = 19
|
||||
const APIVersion = 20
|
||||
|
||||
type Capabilities struct {
|
||||
Capabilities []string `json:"capabilities"`
|
||||
@@ -1071,10 +1071,10 @@ func Start(printDocs bool) error {
|
||||
log.Info(" plugins.search - Search plugins (params: query, category?, compositor?, capability?)")
|
||||
log.Info("Network:")
|
||||
log.Info(" network.getState - Get current network state")
|
||||
log.Info(" network.wifi.scan - Scan for WiFi networks")
|
||||
log.Info(" network.wifi.scan - Scan for WiFi networks (params: device?)")
|
||||
log.Info(" network.wifi.networks - Get WiFi network list")
|
||||
log.Info(" network.wifi.connect - Connect to WiFi (params: ssid, password?, username?)")
|
||||
log.Info(" network.wifi.disconnect - Disconnect WiFi")
|
||||
log.Info(" network.wifi.connect - Connect to WiFi (params: ssid, password?, username?, device?)")
|
||||
log.Info(" network.wifi.disconnect - Disconnect WiFi (params: device?)")
|
||||
log.Info(" network.wifi.forget - Forget network (params: ssid)")
|
||||
log.Info(" network.wifi.toggle - Toggle WiFi radio")
|
||||
log.Info(" network.wifi.enable - Enable WiFi")
|
||||
|
||||
@@ -607,41 +607,6 @@ func (m *Manager) transitionWorker() {
|
||||
|
||||
if finalTarget == targetTemp {
|
||||
log.Debugf("Transition complete: now at %dK", targetTemp)
|
||||
|
||||
m.configMutex.RLock()
|
||||
enabled := m.config.Enabled
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
if !enabled && targetTemp == identityTemp && m.controlsInitialized {
|
||||
m.post(func() {
|
||||
log.Info("Destroying gamma controls after transition to identity")
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
return true
|
||||
})
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
if _, err := m.display.Sync(); err != nil {
|
||||
log.Warnf("Failed to sync Wayland display after destroying controls: %v", err)
|
||||
}
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1262,46 +1227,33 @@ func (m *Manager) SetEnabled(enabled bool) {
|
||||
}
|
||||
} else {
|
||||
if m.controlsInitialized {
|
||||
m.configMutex.RLock()
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
m.transitionMutex.RLock()
|
||||
currentTemp := m.currentTemp
|
||||
m.transitionMutex.RUnlock()
|
||||
|
||||
if currentTemp == identityTemp {
|
||||
m.post(func() {
|
||||
log.Infof("Already at %dK, destroying gamma controls immediately", identityTemp)
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
return true
|
||||
})
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
if _, err := m.display.Sync(); err != nil {
|
||||
log.Warnf("Failed to sync Wayland display after destroying controls: %v", err)
|
||||
m.post(func() {
|
||||
log.Info("Disabling gamma, destroying controls immediately")
|
||||
m.outputs.Range(func(id uint32, out *outputState) bool {
|
||||
if out.gammaControl != nil {
|
||||
control := out.gammaControl.(*wlr_gamma_control.ZwlrGammaControlV1)
|
||||
control.Destroy()
|
||||
log.Debugf("Destroyed gamma control for output %d", id)
|
||||
}
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
return true
|
||||
})
|
||||
} else {
|
||||
log.Infof("Disabling: transitioning to %dK before destroying controls", identityTemp)
|
||||
m.startTransition(identityTemp)
|
||||
}
|
||||
m.outputs.Range(func(key uint32, value *outputState) bool {
|
||||
m.outputs.Delete(key)
|
||||
return true
|
||||
})
|
||||
m.controlsInitialized = false
|
||||
|
||||
m.configMutex.RLock()
|
||||
identityTemp := m.config.HighTemp
|
||||
m.configMutex.RUnlock()
|
||||
|
||||
m.transitionMutex.Lock()
|
||||
m.currentTemp = identityTemp
|
||||
m.targetTemp = identityTemp
|
||||
m.transitionMutex.Unlock()
|
||||
|
||||
log.Info("All gamma controls destroyed")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
24
distro/debian/dms-git/_service
Normal file
24
distro/debian/dms-git/_service
Normal file
@@ -0,0 +1,24 @@
|
||||
<services>
|
||||
<!-- Pull full git repository for master branch -->
|
||||
<service name="tar_scm" mode="disabled">
|
||||
<param name="scm">git</param>
|
||||
<param name="url">https://github.com/AvengeMedia/DankMaterialShell.git</param>
|
||||
<param name="revision">master</param>
|
||||
<param name="filename">dms-git-source</param>
|
||||
</service>
|
||||
<service name="recompress" mode="disabled">
|
||||
<param name="file">*.tar</param>
|
||||
<param name="compression">gz</param>
|
||||
</service>
|
||||
<!-- Download pre-built binaries (fallback for Debian 13 with Go 1.22) -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
8
distro/debian/dms-git/debian/changelog
Normal file
8
distro/debian/dms-git/debian/changelog
Normal file
@@ -0,0 +1,8 @@
|
||||
dms-git (0.6.2+git) nightly; urgency=medium
|
||||
|
||||
* Build dms binary from source for true git version strings
|
||||
* Match Fedora COPR git build behavior
|
||||
* Now shows proper git version (e.g., v0.6.2-11-g12e91534)
|
||||
* Add golang-go and make as build dependencies
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Fri, 22 Nov 2025 00:00:00 -0500
|
||||
50
distro/debian/dms-git/debian/control
Normal file
50
distro/debian/dms-git/debian/control
Normal file
@@ -0,0 +1,50 @@
|
||||
Source: dms-git
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms-git
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Replaces: dms
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell (git nightly)
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
This is the nightly/git version built from the latest master branch.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/debian/dms-git/debian/copyright
Normal file
27
distro/debian/dms-git/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/debian/dms-git/debian/files
Normal file
1
distro/debian/dms-git/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms-git_0.6.0+git2061.5ddea836ppa1_source.buildinfo x11 optional
|
||||
54
distro/debian/dms-git/debian/rules
Executable file
54
distro/debian/dms-git/debian/rules
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
if [ "$(DEB_HOST_ARCH)" = "amd64" ]; then \
|
||||
if [ -f dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
elif [ "$(DEB_HOST_ARCH)" = "arm64" ]; then \
|
||||
if [ -f dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-arm64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
else \
|
||||
echo "Unsupported architecture: $(DEB_HOST_ARCH)" && exit 1; \
|
||||
fi
|
||||
chmod +x dms
|
||||
|
||||
override_dh_auto_install:
|
||||
install -Dm755 dms debian/dms-git/usr/bin/dms
|
||||
|
||||
mkdir -p debian/dms-git/usr/share/quickshell/dms debian/dms-git/usr/lib/systemd/user
|
||||
if [ -d quickshell ]; then \
|
||||
cp -r quickshell/* debian/dms-git/usr/share/quickshell/dms/; \
|
||||
install -Dm644 quickshell/assets/systemd/dms.service debian/dms-git/usr/lib/systemd/user/dms.service; \
|
||||
elif [ -d dms-git-source/quickshell ]; then \
|
||||
cp -r dms-git-source/quickshell/* debian/dms-git/usr/share/quickshell/dms/; \
|
||||
install -Dm644 dms-git-source/quickshell/assets/systemd/dms.service debian/dms-git/usr/lib/systemd/user/dms.service; \
|
||||
else \
|
||||
echo "ERROR: quickshell directory not found (checked root and dms-git-source/)!" && \
|
||||
echo "Contents of current directory:" && ls -la && \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/core \
|
||||
debian/dms-git/usr/share/quickshell/dms/distro
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
[ ! -d dms-git-source ] || rm -rf dms-git-source
|
||||
dh_auto_clean
|
||||
1
distro/debian/dms-git/debian/source/format
Normal file
1
distro/debian/dms-git/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
1
distro/debian/dms-git/debian/source/include-binaries
Normal file
1
distro/debian/dms-git/debian/source/include-binaries
Normal file
@@ -0,0 +1 @@
|
||||
dms-distropkg-amd64.gz
|
||||
4
distro/debian/dms-git/debian/source/options
Normal file
4
distro/debian/dms-git/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-git-repo
|
||||
21
distro/debian/dms/_service
Normal file
21
distro/debian/dms/_service
Normal file
@@ -0,0 +1,21 @@
|
||||
<services>
|
||||
<!-- Download source tarball from GitHub releases -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/archive/refs/tags/v0.6.2.tar.gz</param>
|
||||
<param name="filename">dms-source.tar.gz</param>
|
||||
</service>
|
||||
<!-- Download amd64 binary -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/download/v0.6.2/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<!-- Download arm64 binary -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/download/v0.6.2/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
7
distro/debian/dms/debian/changelog
Normal file
7
distro/debian/dms/debian/changelog
Normal file
@@ -0,0 +1,7 @@
|
||||
dms (0.6.2) stable; urgency=medium
|
||||
|
||||
* Update to v0.6.2 release
|
||||
* Fix binary download paths for OBS builds
|
||||
* Native format: removed revisions
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Tue, 19 Nov 2025 10:00:00 -0500
|
||||
47
distro/debian/dms/debian/control
Normal file
47
distro/debian/dms/debian/control
Normal file
@@ -0,0 +1,47 @@
|
||||
Source: dms
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Conflicts: dms-git
|
||||
Replaces: dms-git
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/debian/dms/debian/copyright
Normal file
27
distro/debian/dms/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/debian/dms/debian/files
Normal file
1
distro/debian/dms/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms_0.6.0ppa2_source.buildinfo x11 optional
|
||||
71
distro/debian/dms/debian/rules
Executable file
71
distro/debian/dms/debian/rules
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
if [ "$(DEB_HOST_ARCH)" = "amd64" ]; then \
|
||||
if [ -f dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
elif [ -f ../../SOURCES/dms-distropkg-amd64.gz ]; then \
|
||||
gunzip -c ../../SOURCES/dms-distropkg-amd64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
elif [ "$(DEB_HOST_ARCH)" = "arm64" ]; then \
|
||||
if [ -f dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
elif [ -f ../../SOURCES/dms-distropkg-arm64.gz ]; then \
|
||||
gunzip -c ../../SOURCES/dms-distropkg-arm64.gz > dms; \
|
||||
else \
|
||||
echo "ERROR: dms-distropkg-arm64.gz not found!" && exit 1; \
|
||||
fi \
|
||||
else \
|
||||
echo "Unsupported architecture: $(DEB_HOST_ARCH)" && exit 1; \
|
||||
fi
|
||||
chmod +x dms
|
||||
|
||||
if [ ! -d DankMaterialShell-$(UPSTREAM_VERSION) ]; then \
|
||||
if [ -f ../SOURCES/dms-source.tar.gz ]; then \
|
||||
tar -xzf ../SOURCES/dms-source.tar.gz; \
|
||||
elif [ -f dms-source.tar.gz ]; then \
|
||||
tar -xzf dms-source.tar.gz; \
|
||||
fi; \
|
||||
if [ ! -d DankMaterialShell-$(UPSTREAM_VERSION) ] && [ -d DankMaterialShell-0.6.2 ]; then \
|
||||
mv DankMaterialShell-0.6.2 DankMaterialShell-$(UPSTREAM_VERSION); \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
|
||||
override_dh_auto_install:
|
||||
install -Dm755 dms debian/dms/usr/bin/dms
|
||||
|
||||
mkdir -p debian/dms/usr/share/quickshell/dms debian/dms/usr/lib/systemd/user
|
||||
# Handle directory name mismatch again for install step if needed
|
||||
if [ ! -d DankMaterialShell-$(UPSTREAM_VERSION) ] && [ -d DankMaterialShell-0.6.2 ]; then \
|
||||
mv DankMaterialShell-0.6.2 DankMaterialShell-$(UPSTREAM_VERSION); \
|
||||
fi
|
||||
if [ -d DankMaterialShell-$(UPSTREAM_VERSION) ]; then \
|
||||
cp -r DankMaterialShell-$(UPSTREAM_VERSION)/quickshell/* debian/dms/usr/share/quickshell/dms/; \
|
||||
install -Dm644 DankMaterialShell-$(UPSTREAM_VERSION)/quickshell/assets/systemd/dms.service debian/dms/usr/lib/systemd/user/dms.service; \
|
||||
else \
|
||||
echo "ERROR: DankMaterialShell-$(UPSTREAM_VERSION) directory not found!" && \
|
||||
echo "Contents of current directory:" && ls -la && \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/core \
|
||||
debian/dms/usr/share/quickshell/dms/distro
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
rm -rf DankMaterialShell-$(UPSTREAM_VERSION)
|
||||
dh_auto_clean
|
||||
1
distro/debian/dms/debian/source/format
Normal file
1
distro/debian/dms/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
2
distro/debian/dms/debian/source/include-binaries
Normal file
2
distro/debian/dms/debian/source/include-binaries
Normal file
@@ -0,0 +1,2 @@
|
||||
dms-distropkg-amd64.gz
|
||||
dms-source.tar.gz
|
||||
4
distro/debian/dms/debian/source/options
Normal file
4
distro/debian/dms/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-source.tar.gz
|
||||
24
distro/opensuse/_service
Normal file
24
distro/opensuse/_service
Normal file
@@ -0,0 +1,24 @@
|
||||
<services>
|
||||
<!-- Pull full git repository for master branch (QML code) -->
|
||||
<service name="tar_scm">
|
||||
<param name="scm">git</param>
|
||||
<param name="url">https://github.com/AvengeMedia/DankMaterialShell.git</param>
|
||||
<param name="revision">master</param>
|
||||
<param name="filename">dms-git-source</param>
|
||||
</service>
|
||||
<service name="recompress">
|
||||
<param name="file">*.tar</param>
|
||||
<param name="compression">gz</param>
|
||||
</service>
|
||||
<!-- Download pre-built binaries -->
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz</param>
|
||||
</service>
|
||||
<service name="download_url">
|
||||
<param name="protocol">https</param>
|
||||
<param name="host">github.com</param>
|
||||
<param name="path">/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-arm64.gz</param>
|
||||
</service>
|
||||
</services>
|
||||
109
distro/opensuse/dms-git.spec
Normal file
109
distro/opensuse/dms-git.spec
Normal file
@@ -0,0 +1,109 @@
|
||||
%global debug_package %{nil}
|
||||
|
||||
Name: dms-git
|
||||
Version: 0.6.2+git2147.03073f68
|
||||
Release: 5%{?dist}
|
||||
Epoch: 1
|
||||
Summary: DankMaterialShell - Material 3 inspired shell (git nightly)
|
||||
|
||||
License: MIT
|
||||
URL: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Source0: dms-git-source.tar.gz
|
||||
Source1: dms-distropkg-amd64.gz
|
||||
Source2: dms-distropkg-arm64.gz
|
||||
|
||||
BuildRequires: gzip
|
||||
BuildRequires: systemd-rpm-macros
|
||||
|
||||
Requires: (quickshell-git or quickshell)
|
||||
Requires: accountsservice
|
||||
Requires: dgop
|
||||
|
||||
Recommends: cava
|
||||
Recommends: cliphist
|
||||
Recommends: danksearch
|
||||
Recommends: matugen
|
||||
Recommends: quickshell-git
|
||||
Recommends: wl-clipboard
|
||||
|
||||
Recommends: NetworkManager
|
||||
Recommends: qt6-qtmultimedia
|
||||
Suggests: qt6ct
|
||||
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Obsoletes: dms
|
||||
|
||||
%description
|
||||
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
|
||||
and optimized for niri, Hyprland, Sway, and other wlroots compositors.
|
||||
|
||||
This git version tracks the master branch and includes the latest features
|
||||
and fixes. Includes pre-built dms CLI binary and QML shell files.
|
||||
|
||||
%prep
|
||||
%setup -q -n dms-git-source
|
||||
|
||||
%ifarch x86_64
|
||||
gunzip -c %{SOURCE1} > dms
|
||||
%endif
|
||||
%ifarch aarch64
|
||||
gunzip -c %{SOURCE2} > dms
|
||||
%endif
|
||||
chmod +x dms
|
||||
|
||||
%build
|
||||
|
||||
%install
|
||||
install -Dm755 dms %{buildroot}%{_bindir}/dms
|
||||
|
||||
install -d %{buildroot}%{_datadir}/bash-completion/completions
|
||||
install -d %{buildroot}%{_datadir}/zsh/site-functions
|
||||
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
|
||||
./dms completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
|
||||
./dms completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
|
||||
./dms completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
|
||||
|
||||
install -Dm644 quickshell/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
|
||||
|
||||
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
|
||||
cp -r quickshell/* %{buildroot}%{_datadir}/quickshell/dms/
|
||||
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
|
||||
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/core
|
||||
|
||||
%posttrans
|
||||
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [ "$1" -ge 2 ]; then
|
||||
pkill -USR1 -x dms >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
%doc CONTRIBUTING.md
|
||||
%doc quickshell/README.md
|
||||
%{_bindir}/dms
|
||||
%dir %{_datadir}/fish
|
||||
%dir %{_datadir}/fish/vendor_completions.d
|
||||
%{_datadir}/fish/vendor_completions.d/dms.fish
|
||||
%dir %{_datadir}/zsh
|
||||
%dir %{_datadir}/zsh/site-functions
|
||||
%{_datadir}/zsh/site-functions/_dms
|
||||
%{_datadir}/bash-completion/completions/dms
|
||||
%dir %{_datadir}/quickshell
|
||||
%{_datadir}/quickshell/dms/
|
||||
%{_userunitdir}/dms.service
|
||||
|
||||
%changelog
|
||||
* Tue Nov 25 2025 Avenge Media <AvengeMedia.US@gmail.com> - 0.6.2+git2147.03073f68-1
|
||||
- Git snapshot (commit 2147: 03073f68)
|
||||
* Fri Nov 22 2025 AvengeMedia <maintainer@avengemedia.com> - 0.6.2+git-5
|
||||
- Git nightly build from master branch
|
||||
- Multi-arch support (x86_64, aarch64)
|
||||
107
distro/opensuse/dms.spec
Normal file
107
distro/opensuse/dms.spec
Normal file
@@ -0,0 +1,107 @@
|
||||
# Spec for DMS for OpenSUSE/OBS
|
||||
|
||||
%global debug_package %{nil}
|
||||
|
||||
Name: dms
|
||||
Version: 0.6.2
|
||||
Release: 1%{?dist}
|
||||
Summary: DankMaterialShell - Material 3 inspired shell for Wayland compositors
|
||||
|
||||
License: MIT
|
||||
URL: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Source0: dms-source.tar.gz
|
||||
Source1: dms-distropkg-amd64.gz
|
||||
Source2: dms-distropkg-arm64.gz
|
||||
|
||||
BuildRequires: gzip
|
||||
BuildRequires: systemd-rpm-macros
|
||||
|
||||
# Core requirements
|
||||
Requires: (quickshell-git or quickshell)
|
||||
Requires: accountsservice
|
||||
Requires: dgop
|
||||
|
||||
# Core utilities (Highly recommended for DMS functionality)
|
||||
Recommends: cava
|
||||
Recommends: cliphist
|
||||
Recommends: danksearch
|
||||
Recommends: matugen
|
||||
Recommends: NetworkManager
|
||||
Recommends: qt6-qtmultimedia
|
||||
Recommends: wl-clipboard
|
||||
Suggests: qt6ct
|
||||
|
||||
%description
|
||||
DankMaterialShell (DMS) is a modern Wayland desktop shell built with Quickshell
|
||||
and optimized for niri, Hyprland, Sway, and other wlroots compositors. Features
|
||||
notifications, app launcher, wallpaper customization, and plugin system.
|
||||
|
||||
Includes auto-theming for GTK/Qt apps with matugen, 20+ customizable widgets,
|
||||
process monitoring, notification center, clipboard history, dock, control center,
|
||||
lock screen, and comprehensive plugin system.
|
||||
|
||||
%prep
|
||||
%setup -q -n DankMaterialShell-%{version}
|
||||
|
||||
%ifarch x86_64
|
||||
gunzip -c %{SOURCE1} > dms
|
||||
%endif
|
||||
%ifarch aarch64
|
||||
gunzip -c %{SOURCE2} > dms
|
||||
%endif
|
||||
chmod +x dms
|
||||
|
||||
%build
|
||||
|
||||
%install
|
||||
install -Dm755 dms %{buildroot}%{_bindir}/dms
|
||||
|
||||
install -d %{buildroot}%{_datadir}/bash-completion/completions
|
||||
install -d %{buildroot}%{_datadir}/zsh/site-functions
|
||||
install -d %{buildroot}%{_datadir}/fish/vendor_completions.d
|
||||
./dms completion bash > %{buildroot}%{_datadir}/bash-completion/completions/dms || :
|
||||
./dms completion zsh > %{buildroot}%{_datadir}/zsh/site-functions/_dms || :
|
||||
./dms completion fish > %{buildroot}%{_datadir}/fish/vendor_completions.d/dms.fish || :
|
||||
|
||||
install -Dm644 quickshell/assets/systemd/dms.service %{buildroot}%{_userunitdir}/dms.service
|
||||
|
||||
install -dm755 %{buildroot}%{_datadir}/quickshell/dms
|
||||
cp -r quickshell/* %{buildroot}%{_datadir}/quickshell/dms/
|
||||
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.git*
|
||||
rm -f %{buildroot}%{_datadir}/quickshell/dms/.gitignore
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/.github
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/distro
|
||||
rm -rf %{buildroot}%{_datadir}/quickshell/dms/core
|
||||
|
||||
%posttrans
|
||||
if [ -d "%{_sysconfdir}/xdg/quickshell/dms" ]; then
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell/dms" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg/quickshell" 2>/dev/null || true
|
||||
rmdir "%{_sysconfdir}/xdg" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [ "$1" -ge 2 ]; then
|
||||
pkill -USR1 -x dms >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
%doc CONTRIBUTING.md
|
||||
%doc quickshell/README.md
|
||||
%{_bindir}/dms
|
||||
%dir %{_datadir}/fish
|
||||
%dir %{_datadir}/fish/vendor_completions.d
|
||||
%{_datadir}/fish/vendor_completions.d/dms.fish
|
||||
%dir %{_datadir}/zsh
|
||||
%dir %{_datadir}/zsh/site-functions
|
||||
%{_datadir}/zsh/site-functions/_dms
|
||||
%{_datadir}/bash-completion/completions/dms
|
||||
%dir %{_datadir}/quickshell
|
||||
%{_datadir}/quickshell/dms/
|
||||
%{_userunitdir}/dms.service
|
||||
|
||||
%changelog
|
||||
* Fri Nov 22 2025 AvengeMedia <maintainer@avengemedia.com> - 0.6.2-1
|
||||
- Stable release build with pre-built binaries
|
||||
- Multi-arch support (x86_64, aarch64)
|
||||
106
distro/scripts/obs-status.sh
Executable file
106
distro/scripts/obs-status.sh
Executable file
@@ -0,0 +1,106 @@
|
||||
#!/bin/bash
|
||||
# Unified OBS status checker for dms packages
|
||||
# Checks all platforms (Debian, OpenSUSE) and architectures (x86_64, aarch64)
|
||||
# Only pulls logs if build failed
|
||||
# Usage: ./distro/scripts/obs-status.sh [package-name]
|
||||
#
|
||||
# Examples:
|
||||
# ./distro/scripts/obs-status.sh # Check all packages
|
||||
# ./distro/scripts/obs-status.sh dms # Check specific package
|
||||
|
||||
OBS_BASE_PROJECT="home:AvengeMedia"
|
||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
||||
|
||||
ALL_PACKAGES=(dms dms-git)
|
||||
|
||||
REPOS=("Debian_13" "openSUSE_Tumbleweed" "16.0")
|
||||
ARCHES=("x86_64" "aarch64")
|
||||
|
||||
if [[ -n "$1" ]]; then
|
||||
PACKAGES=("$1")
|
||||
else
|
||||
PACKAGES=("${ALL_PACKAGES[@]}")
|
||||
fi
|
||||
|
||||
cd "$OBS_BASE"
|
||||
|
||||
for pkg in "${PACKAGES[@]}"; do
|
||||
case "$pkg" in
|
||||
dms)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$pkg'"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "=========================================="
|
||||
echo "=== $pkg ==="
|
||||
echo "=========================================="
|
||||
|
||||
# Checkout if needed
|
||||
if [[ ! -d "$PROJECT/$pkg" ]]; then
|
||||
osc co "$PROJECT/$pkg" 2>&1 | tail -1
|
||||
fi
|
||||
|
||||
cd "$PROJECT/$pkg"
|
||||
|
||||
ALL_RESULTS=$(osc results 2>&1)
|
||||
|
||||
# Check each repository and architecture
|
||||
FAILED_BUILDS=()
|
||||
for repo in "${REPOS[@]}"; do
|
||||
for arch in "${ARCHES[@]}"; do
|
||||
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1)
|
||||
|
||||
if [[ -n "$STATUS" ]]; then
|
||||
# Color code status
|
||||
case "$STATUS" in
|
||||
succeeded)
|
||||
COLOR="\033[0;32m" # Green
|
||||
SYMBOL="✅"
|
||||
;;
|
||||
failed)
|
||||
COLOR="\033[0;31m" # Red
|
||||
SYMBOL="❌"
|
||||
FAILED_BUILDS+=("$repo $arch")
|
||||
;;
|
||||
unresolvable)
|
||||
COLOR="\033[0;33m" # Yellow
|
||||
SYMBOL="⚠️"
|
||||
;;
|
||||
*)
|
||||
COLOR="\033[0;37m" # White
|
||||
SYMBOL="⏳"
|
||||
;;
|
||||
esac
|
||||
echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Pull logs for failed builds
|
||||
if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo " 📋 Fetching logs for failed builds..."
|
||||
for build in "${FAILED_BUILDS[@]}"; do
|
||||
read -r repo arch <<< "$build"
|
||||
echo ""
|
||||
echo " ────────────────────────────────────────────"
|
||||
echo " Build log: $repo $arch"
|
||||
echo " ────────────────────────────────────────────"
|
||||
osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
echo "=========================================="
|
||||
echo "Status check complete!"
|
||||
|
||||
868
distro/scripts/obs-upload.sh
Executable file
868
distro/scripts/obs-upload.sh
Executable file
@@ -0,0 +1,868 @@
|
||||
#!/bin/bash
|
||||
# Unified OBS upload script for dms packages
|
||||
# Handles Debian and OpenSUSE builds for both x86_64 and aarch64
|
||||
# Usage: ./distro/scripts/obs-upload.sh [distro] <package-name> [commit-message]
|
||||
#
|
||||
# Examples:
|
||||
# ./distro/scripts/obs-upload.sh dms "Update to v0.6.2"
|
||||
# ./distro/scripts/obs-upload.sh debian dms
|
||||
# ./distro/scripts/obs-upload.sh opensuse dms-git
|
||||
|
||||
set -e
|
||||
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=true
|
||||
PACKAGE=""
|
||||
MESSAGE=""
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
debian)
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=false
|
||||
;;
|
||||
opensuse)
|
||||
UPLOAD_DEBIAN=false
|
||||
UPLOAD_OPENSUSE=true
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
PACKAGE="$arg"
|
||||
elif [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="$arg"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
OBS_BASE_PROJECT="home:AvengeMedia"
|
||||
OBS_BASE="$HOME/.cache/osc-checkouts"
|
||||
AVAILABLE_PACKAGES=(dms dms-git)
|
||||
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
echo "Available packages:"
|
||||
echo ""
|
||||
echo " 1. dms - Stable DMS"
|
||||
echo " 2. dms-git - Nightly DMS"
|
||||
echo " a. all"
|
||||
echo ""
|
||||
read -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection
|
||||
|
||||
if [[ "$selection" == "a" ]] || [[ "$selection" == "all" ]]; then
|
||||
PACKAGE="all"
|
||||
elif [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -le ${#AVAILABLE_PACKAGES[@]} ]]; then
|
||||
PACKAGE="${AVAILABLE_PACKAGES[$((selection-1))]}"
|
||||
else
|
||||
echo "Error: Invalid selection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="Update packaging"
|
||||
fi
|
||||
|
||||
REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
if [[ ! -d "distro/debian" ]]; then
|
||||
echo "Error: Run this script from the repository root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle "all" option
|
||||
if [[ "$PACKAGE" == "all" ]]; then
|
||||
echo "==> Uploading all packages"
|
||||
DISTRO_ARG=""
|
||||
if [[ "$UPLOAD_DEBIAN" == true && "$UPLOAD_OPENSUSE" == false ]]; then
|
||||
DISTRO_ARG="debian"
|
||||
elif [[ "$UPLOAD_DEBIAN" == false && "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
DISTRO_ARG="opensuse"
|
||||
fi
|
||||
echo ""
|
||||
FAILED=()
|
||||
for pkg in "${AVAILABLE_PACKAGES[@]}"; do
|
||||
if [[ -d "distro/debian/$pkg" ]]; then
|
||||
echo "=========================================="
|
||||
echo "Uploading $pkg..."
|
||||
echo "=========================================="
|
||||
if [[ -n "$DISTRO_ARG" ]]; then
|
||||
if bash "$0" "$DISTRO_ARG" "$pkg" "$MESSAGE"; then
|
||||
echo "✅ $pkg uploaded successfully"
|
||||
else
|
||||
echo "❌ $pkg failed to upload"
|
||||
FAILED+=("$pkg")
|
||||
fi
|
||||
else
|
||||
if bash "$0" "$pkg" "$MESSAGE"; then
|
||||
echo "✅ $pkg uploaded successfully"
|
||||
else
|
||||
echo "❌ $pkg failed to upload"
|
||||
FAILED+=("$pkg")
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
else
|
||||
echo "⚠️ Skipping $pkg (not found in distro/debian/)"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#FAILED[@]} -eq 0 ]]; then
|
||||
echo "✅ All packages uploaded successfully!"
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Some packages failed: ${FAILED[*]}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if package exists
|
||||
if [[ ! -d "distro/debian/$PACKAGE" ]]; then
|
||||
echo "Error: Package '$PACKAGE' not found in distro/debian/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
PROJECT="dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$PACKAGE'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
OBS_PROJECT="${OBS_BASE_PROJECT}:${PROJECT}"
|
||||
|
||||
echo "==> Target: $OBS_PROJECT / $PACKAGE"
|
||||
if [[ "$UPLOAD_DEBIAN" == true && "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo "==> Distributions: Debian + OpenSUSE"
|
||||
elif [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
echo "==> Distribution: Debian only"
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo "==> Distribution: OpenSUSE only"
|
||||
fi
|
||||
|
||||
mkdir -p "$OBS_BASE"
|
||||
|
||||
if [[ ! -d "$OBS_BASE/$OBS_PROJECT/$PACKAGE" ]]; then
|
||||
echo "Checking out $OBS_PROJECT/$PACKAGE..."
|
||||
cd "$OBS_BASE"
|
||||
osc co "$OBS_PROJECT/$PACKAGE"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
|
||||
WORK_DIR="$OBS_BASE/$OBS_PROJECT/$PACKAGE"
|
||||
|
||||
echo "==> Preparing $PACKAGE for OBS upload"
|
||||
|
||||
find "$WORK_DIR" -maxdepth 1 -type f \( -name "*.tar.gz" -o -name "*.tar.xz" -o -name "*.tar.bz2" -o -name "*.tar" -o -name "*.spec" -o -name "_service" -o -name "*.dsc" \) -delete 2>/dev/null || true
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
echo " - Copying _service (for binary downloads)"
|
||||
cp "distro/debian/$PACKAGE/_service" "$WORK_DIR/"
|
||||
fi
|
||||
|
||||
CHANGELOG_VERSION=""
|
||||
if [[ -d "distro/debian/$PACKAGE/debian" ]]; then
|
||||
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" "distro/debian/$PACKAGE/debian/changelog" 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "")
|
||||
if [[ -n "$CHANGELOG_VERSION" ]] && [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
|
||||
SOURCE_FORMAT_CHECK=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)")
|
||||
if [[ "$SOURCE_FORMAT_CHECK" == *"native"* ]]; then
|
||||
CHANGELOG_VERSION=$(echo "$CHANGELOG_VERSION" | sed 's/-[0-9]*$//')
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - Copying $PACKAGE.spec for OpenSUSE"
|
||||
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
|
||||
|
||||
if [[ -f "$WORK_DIR/.osc/$PACKAGE.spec" ]]; then
|
||||
NEW_VERSION=$(grep "^Version:" "$WORK_DIR/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
NEW_RELEASE=$(grep "^Release:" "$WORK_DIR/$PACKAGE.spec" | sed 's/^Release:[[:space:]]*//' | sed 's/%{?dist}//' | head -1)
|
||||
OLD_VERSION=$(grep "^Version:" "$WORK_DIR/.osc/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
OLD_RELEASE=$(grep "^Release:" "$WORK_DIR/.osc/$PACKAGE.spec" | sed 's/^Release:[[:space:]]*//' | sed 's/%{?dist}//' | head -1)
|
||||
|
||||
if [[ "$NEW_VERSION" == "$OLD_VERSION" ]]; then
|
||||
if [[ "$OLD_RELEASE" =~ ^([0-9]+) ]]; then
|
||||
BASE_RELEASE="${BASH_REMATCH[1]}"
|
||||
NEXT_RELEASE=$((BASE_RELEASE + 1))
|
||||
echo " - Detected rebuild of same version $NEW_VERSION (release $OLD_RELEASE -> $NEXT_RELEASE)"
|
||||
sed -i "s/^Release:[[:space:]]*${NEW_RELEASE}%{?dist}/Release: ${NEXT_RELEASE}%{?dist}/" "$WORK_DIR/$PACKAGE.spec"
|
||||
fi
|
||||
else
|
||||
echo " - New version detected: $OLD_VERSION -> $NEW_VERSION (keeping release $NEW_RELEASE)"
|
||||
fi
|
||||
else
|
||||
echo " - First upload to OBS (no previous spec found)"
|
||||
fi
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
echo " - Warning: OpenSUSE spec file not found, skipping OpenSUSE upload"
|
||||
fi
|
||||
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - OpenSUSE-only upload: creating source tarball"
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]] && grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
fi
|
||||
rm -rf .git
|
||||
SOURCE_DIR=$(pwd)
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE_DIR" && -d "$SOURCE_DIR" ]]; then
|
||||
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
|
||||
if [[ -n "$SOURCE0" ]]; then
|
||||
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
;;
|
||||
*)
|
||||
EXPECTED_DIR=$(basename "$SOURCE_DIR")
|
||||
;;
|
||||
esac
|
||||
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
tar -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$OBS_TARBALL_DIR"
|
||||
fi
|
||||
else
|
||||
echo " - Warning: Could not obtain source for OpenSUSE tarball"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate .dsc file and handle source format (for Debian only)
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; then
|
||||
# Use CHANGELOG_VERSION already set above, or get it if not set
|
||||
if [[ -z "$CHANGELOG_VERSION" ]]; then
|
||||
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/$PACKAGE/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11")
|
||||
fi
|
||||
|
||||
# Determine source format
|
||||
SOURCE_FORMAT=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)")
|
||||
|
||||
# For native format, remove any Debian revision (-N) from version
|
||||
# Native format cannot have revisions, so strip them if present
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]] && [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
|
||||
# Remove Debian revision (everything from - onwards)
|
||||
CHANGELOG_VERSION=$(echo "$CHANGELOG_VERSION" | sed 's/-[0-9]*$//')
|
||||
echo " Warning: Removed Debian revision from version for native format: $CHANGELOG_VERSION"
|
||||
fi
|
||||
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
|
||||
echo " - Native format detected: creating combined tarball"
|
||||
|
||||
VERSION="$CHANGELOG_VERSION"
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
|
||||
SOURCE_DIR=""
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
fi
|
||||
rm -rf .git
|
||||
SOURCE_DIR=$(pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to clone git repository"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then
|
||||
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" | \
|
||||
grep '<param name="path">' | \
|
||||
sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
SOURCE_PATH=""
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" && \
|
||||
! echo "$path" | grep -qE "(distropkg|binary)"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$SOURCE_PATH" ]]; then
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE_PATH" ]]; then
|
||||
SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" '
|
||||
/<service name="download_url">/ { in_block=1; block="" }
|
||||
in_block { block=block"\n"$0 }
|
||||
/<\/service>/ {
|
||||
if (in_block && block ~ target) {
|
||||
print block
|
||||
exit
|
||||
}
|
||||
in_block=0
|
||||
}
|
||||
' "distro/debian/$PACKAGE/_service")
|
||||
|
||||
URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_PATH="$SOURCE_PATH"
|
||||
fi
|
||||
|
||||
if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then
|
||||
SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}"
|
||||
echo " Downloading source from: $SOURCE_URL"
|
||||
|
||||
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null || \
|
||||
curl -L -f -s -o "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null; then
|
||||
cd "$TEMP_DIR"
|
||||
if [[ "$SOURCE_URL" == *.tar.xz ]]; then
|
||||
tar -xJf source-archive
|
||||
elif [[ "$SOURCE_URL" == *.tar.gz ]] || [[ "$SOURCE_URL" == *.tgz ]]; then
|
||||
tar -xzf source-archive
|
||||
fi
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d -name "DankMaterialShell-*" | head -1)
|
||||
if [[ -z "$SOURCE_DIR" ]]; then
|
||||
SOURCE_DIR=$(find . -maxdepth 1 -type d ! -name "." | head -1)
|
||||
fi
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Failed to extract source archive or find source directory"
|
||||
echo "Contents of $TEMP_DIR:"
|
||||
ls -la "$TEMP_DIR"
|
||||
cd "$REPO_ROOT"
|
||||
exit 1
|
||||
fi
|
||||
SOURCE_DIR=$(cd "$SOURCE_DIR" && pwd)
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo "Error: Failed to download source from $SOURCE_URL"
|
||||
echo "Tried both wget and curl. Please check the URL and network connectivity."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Could not determine or obtain source for $PACKAGE"
|
||||
echo "SOURCE_DIR: $SOURCE_DIR"
|
||||
if [[ -d "$TEMP_DIR" ]]; then
|
||||
echo "Contents of temp directory:"
|
||||
ls -la "$TEMP_DIR"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo " Found source directory: $SOURCE_DIR"
|
||||
|
||||
# Create OpenSUSE-compatible source tarballs BEFORE adding debian/ directory
|
||||
# (OpenSUSE doesn't need debian/ directory)
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - Creating OpenSUSE-compatible source tarballs"
|
||||
|
||||
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
if [[ -z "$SOURCE0" && "$PACKAGE" == "dms-git" ]]; then
|
||||
SOURCE0="dms-git-source.tar.gz"
|
||||
fi
|
||||
|
||||
if [[ -n "$SOURCE0" ]]; then
|
||||
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
if [[ -n "$CHANGELOG_VERSION" ]]; then
|
||||
DMS_VERSION="$CHANGELOG_VERSION"
|
||||
else
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
fi
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
*)
|
||||
DIR_NAME=$(basename "$SOURCE_DIR")
|
||||
echo " Creating $SOURCE0 (directory: $DIR_NAME)"
|
||||
cp -r "$SOURCE_DIR" "$DIR_NAME"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
fi
|
||||
rm -rf "$DIR_NAME"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
esac
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$OBS_TARBALL_DIR"
|
||||
echo " - OpenSUSE source tarballs created"
|
||||
fi
|
||||
|
||||
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
|
||||
fi
|
||||
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
echo " Copying debian/ directory into source"
|
||||
cp -r "distro/debian/$PACKAGE/debian" "$SOURCE_DIR/"
|
||||
|
||||
# For dms, rename directory to match what debian/rules expects
|
||||
# debian/rules uses UPSTREAM_VERSION which is the full version from changelog
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
CHANGELOG_IN_SOURCE="$SOURCE_DIR/debian/changelog"
|
||||
if [[ -f "$CHANGELOG_IN_SOURCE" ]]; then
|
||||
ACTUAL_VERSION=$(grep -m1 "^$PACKAGE" "$CHANGELOG_IN_SOURCE" 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "$VERSION")
|
||||
CURRENT_DIR=$(basename "$SOURCE_DIR")
|
||||
EXPECTED_DIR="DankMaterialShell-${ACTUAL_VERSION}"
|
||||
if [[ "$CURRENT_DIR" != "$EXPECTED_DIR" ]]; then
|
||||
echo " Renaming directory from $CURRENT_DIR to $EXPECTED_DIR to match debian/rules"
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
mv "$CURRENT_DIR" "$EXPECTED_DIR"
|
||||
SOURCE_DIR="$(pwd)/$EXPECTED_DIR"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f "$WORK_DIR/$COMBINED_TARBALL"
|
||||
|
||||
echo " Creating combined tarball: $COMBINED_TARBALL"
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
TARBALL_BASE=$(basename "$SOURCE_DIR")
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
TARBALL_DIR=$(tar -tzf "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null | head -1 | cut -d'/' -f1)
|
||||
EXPECTED_TARBALL_DIR="DankMaterialShell-${VERSION}"
|
||||
if [[ "$TARBALL_DIR" != "$EXPECTED_TARBALL_DIR" ]]; then
|
||||
echo " Warning: Tarball directory name mismatch: $TARBALL_DIR != $EXPECTED_TARBALL_DIR"
|
||||
echo " This may cause build failures. Recreating tarball..."
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
rm -f "$WORK_DIR/$COMBINED_TARBALL"
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
fi
|
||||
|
||||
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
|
||||
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
|
||||
|
||||
BUILD_DEPS="debhelper-compat (= 13)"
|
||||
if [[ -f "distro/debian/$PACKAGE/debian/control" ]]; then
|
||||
CONTROL_DEPS=$(sed -n '/^Build-Depends:/,/^[A-Z]/p' "distro/debian/$PACKAGE/debian/control" | \
|
||||
sed '/^Build-Depends:/s/^Build-Depends: *//' | \
|
||||
sed '/^[A-Z]/d' | \
|
||||
tr '\n' ' ' | \
|
||||
sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/[[:space:]]\+/ /g')
|
||||
if [[ -n "$CONTROL_DEPS" && "$CONTROL_DEPS" != "" ]]; then
|
||||
BUILD_DEPS="$CONTROL_DEPS"
|
||||
fi
|
||||
fi
|
||||
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
Format: 3.0 (native)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
Architecture: any
|
||||
Version: $VERSION
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: $BUILD_DEPS
|
||||
Files:
|
||||
$TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL
|
||||
EOF
|
||||
|
||||
echo " - Generated $PACKAGE.dsc for native format"
|
||||
fi
|
||||
else
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
if [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
|
||||
VERSION="$CHANGELOG_VERSION"
|
||||
else
|
||||
VERSION="${CHANGELOG_VERSION}-1"
|
||||
fi
|
||||
|
||||
echo " - Quilt format detected: creating debian.tar.gz"
|
||||
tar -czf "$WORK_DIR/debian.tar.gz" -C "distro/debian/$PACKAGE" debian/
|
||||
|
||||
echo " - Generating $PACKAGE.dsc for quilt format"
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
Format: 3.0 (quilt)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
Architecture: any
|
||||
Version: $VERSION
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13), wget, gzip
|
||||
DEBTRANSFORM-TAR: debian.tar.gz
|
||||
Files:
|
||||
00000000000000000000000000000000 1 debian.tar.gz
|
||||
EOF
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
cd "$WORK_DIR"
|
||||
|
||||
echo "==> Updating working copy"
|
||||
if ! osc up; then
|
||||
echo "Error: Failed to update working copy"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Only auto-increment on manual runs (REBUILD_RELEASE set or not in CI), not automated workflows
|
||||
OLD_DSC_FILE=""
|
||||
if [[ -f "$WORK_DIR/$PACKAGE.dsc" ]]; then
|
||||
OLD_DSC_FILE="$WORK_DIR/$PACKAGE.dsc"
|
||||
elif [[ -f "$WORK_DIR/.osc/sources/$PACKAGE.dsc" ]]; then
|
||||
OLD_DSC_FILE="$WORK_DIR/.osc/sources/$PACKAGE.dsc"
|
||||
fi
|
||||
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[ -n "$OLD_DSC_FILE" ]]; then
|
||||
OLD_DSC_VERSION=$(grep "^Version:" "$OLD_DSC_FILE" 2>/dev/null | awk '{print $2}' | head -1)
|
||||
|
||||
IS_MANUAL=false
|
||||
if [[ -n "${REBUILD_RELEASE:-}" ]]; then
|
||||
IS_MANUAL=true
|
||||
echo "==> Manual rebuild detected (REBUILD_RELEASE=$REBUILD_RELEASE)"
|
||||
elif [[ -n "${FORCE_REBUILD:-}" ]] && [[ "${FORCE_REBUILD}" == "true" ]]; then
|
||||
IS_MANUAL=true
|
||||
echo "==> Manual workflow trigger detected (FORCE_REBUILD=true)"
|
||||
elif [[ -z "${GITHUB_ACTIONS:-}" ]] && [[ -z "${CI:-}" ]]; then
|
||||
IS_MANUAL=true
|
||||
echo "==> Local/manual run detected (not in CI)"
|
||||
fi
|
||||
|
||||
if [[ -n "$OLD_DSC_VERSION" ]] && [[ "$OLD_DSC_VERSION" == "$CHANGELOG_VERSION" ]] && [[ "$IS_MANUAL" == true ]]; then
|
||||
echo "==> Detected rebuild of same version $CHANGELOG_VERSION, incrementing version"
|
||||
|
||||
if [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git$ ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
NEW_VERSION="${BASE_VERSION}+git1"
|
||||
echo " Incrementing git number: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
elif [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git([0-9]+)$ ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
GIT_NUM="${BASH_REMATCH[2]}"
|
||||
NEW_GIT_NUM=$((GIT_NUM + 1))
|
||||
NEW_VERSION="${BASE_VERSION}+git${NEW_GIT_NUM}"
|
||||
echo " Incrementing git number: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
elif [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)ppa([0-9]+)$ ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
PPA_NUM="${BASH_REMATCH[2]}"
|
||||
NEW_PPA_NUM=$((PPA_NUM + 1))
|
||||
NEW_VERSION="${BASE_VERSION}ppa${NEW_PPA_NUM}"
|
||||
echo " Incrementing PPA number: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
elif [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git([0-9]+)(\.[a-f0-9]+)?(ppa([0-9]+))?$ ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
GIT_NUM="${BASH_REMATCH[2]}"
|
||||
GIT_HASH="${BASH_REMATCH[3]}"
|
||||
PPA_NUM="${BASH_REMATCH[5]}"
|
||||
if [[ -n "$PPA_NUM" ]]; then
|
||||
NEW_PPA_NUM=$((PPA_NUM + 1))
|
||||
NEW_VERSION="${BASE_VERSION}+git${GIT_NUM}${GIT_HASH}ppa${NEW_PPA_NUM}"
|
||||
echo " Incrementing PPA number: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
else
|
||||
NEW_VERSION="${BASE_VERSION}+git${GIT_NUM}${GIT_HASH}ppa1"
|
||||
echo " Adding PPA number: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
fi
|
||||
elif [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)(-([0-9]+))?$ ]]; then
|
||||
BASE_VERSION="${BASH_REMATCH[1]}"
|
||||
NEW_VERSION="${BASE_VERSION}ppa1"
|
||||
echo " Warning: Native format cannot have Debian revision, converting to PPA format: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
else
|
||||
NEW_VERSION="${CHANGELOG_VERSION}ppa1"
|
||||
echo " Warning: Could not parse version format, appending ppa1: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
fi
|
||||
|
||||
if [[ -z "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR/debian" ]]; then
|
||||
echo " Error: Source directory with debian/ not found for version increment"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SOURCE_CHANGELOG="$SOURCE_DIR/debian/changelog"
|
||||
if [[ ! -f "$SOURCE_CHANGELOG" ]]; then
|
||||
echo " Error: Changelog not found in source directory: $SOURCE_CHANGELOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
REPO_CHANGELOG="$REPO_ROOT/distro/debian/$PACKAGE/debian/changelog"
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
echo "$PACKAGE ($NEW_VERSION) unstable; urgency=medium"
|
||||
echo ""
|
||||
echo " * Rebuild to fix repository metadata issues"
|
||||
echo ""
|
||||
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
echo ""
|
||||
if [[ -f "$REPO_CHANGELOG" ]]; then
|
||||
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
|
||||
if [[ -n "$OLD_ENTRY_START" ]]; then
|
||||
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG"
|
||||
fi
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
cp "$TEMP_CHANGELOG" "$SOURCE_CHANGELOG"
|
||||
rm -f "$TEMP_CHANGELOG"
|
||||
|
||||
CHANGELOG_VERSION="$NEW_VERSION"
|
||||
VERSION="$NEW_VERSION"
|
||||
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
|
||||
|
||||
for old_tarball in "${PACKAGE}"_*.tar.gz; do
|
||||
if [[ -f "$old_tarball" ]] && [[ "$old_tarball" != "${PACKAGE}_${NEW_VERSION}.tar.gz" ]]; then
|
||||
echo " Removing old tarball from OBS: $old_tarball"
|
||||
osc rm -f "$old_tarball" 2>/dev/null || rm -f "$old_tarball"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$PACKAGE" == "dms" ]] && [[ -f "$WORK_DIR/dms-source.tar.gz" ]]; then
|
||||
echo " Recreating dms-source.tar.gz with new directory name for incremented version"
|
||||
EXPECTED_SOURCE_DIR="DankMaterialShell-${NEW_VERSION}"
|
||||
TEMP_SOURCE_DIR=$(mktemp -d)
|
||||
cd "$TEMP_SOURCE_DIR"
|
||||
tar -xzf "$WORK_DIR/dms-source.tar.gz" 2>/dev/null || tar -xJf "$WORK_DIR/dms-source.tar.gz" 2>/dev/null || tar -xjf "$WORK_DIR/dms-source.tar.gz" 2>/dev/null
|
||||
EXTRACTED=$(find . -maxdepth 1 -type d -name "DankMaterialShell-*" | head -1)
|
||||
if [[ -n "$EXTRACTED" ]] && [[ "$EXTRACTED" != "./$EXPECTED_SOURCE_DIR" ]]; then
|
||||
echo " Renaming $EXTRACTED to $EXPECTED_SOURCE_DIR"
|
||||
mv "$EXTRACTED" "$EXPECTED_SOURCE_DIR"
|
||||
rm -f "$WORK_DIR/dms-source.tar.gz"
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/dms-source.tar.gz" "$EXPECTED_SOURCE_DIR"
|
||||
ROOT_DIR=$(tar -tf "$WORK_DIR/dms-source.tar.gz" | head -1 | cut -d/ -f1)
|
||||
if [[ "$ROOT_DIR" != "$EXPECTED_SOURCE_DIR" ]]; then
|
||||
echo " Error: Recreated tarball has wrong root directory: $ROOT_DIR (expected $EXPECTED_SOURCE_DIR)"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$TEMP_SOURCE_DIR"
|
||||
fi
|
||||
|
||||
echo " Recreating tarball with new version: $COMBINED_TARBALL"
|
||||
if [[ -n "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR/debian" ]]; then
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
CURRENT_DIR=$(basename "$SOURCE_DIR")
|
||||
EXPECTED_DIR="DankMaterialShell-${NEW_VERSION}"
|
||||
if [[ "$CURRENT_DIR" != "$EXPECTED_DIR" ]]; then
|
||||
echo " Renaming directory from $CURRENT_DIR to $EXPECTED_DIR to match debian/rules"
|
||||
if [[ -d "$CURRENT_DIR" ]]; then
|
||||
mv "$CURRENT_DIR" "$EXPECTED_DIR"
|
||||
SOURCE_DIR="$(pwd)/$EXPECTED_DIR"
|
||||
else
|
||||
echo " Warning: Source directory $CURRENT_DIR not found, extracting from existing tarball"
|
||||
OLD_TARBALL=$(ls "${PACKAGE}"_*.tar.gz 2>/dev/null | head -1)
|
||||
if [[ -f "$OLD_TARBALL" ]]; then
|
||||
EXTRACT_DIR=$(mktemp -d)
|
||||
cd "$EXTRACT_DIR"
|
||||
tar -xzf "$WORK_DIR/$OLD_TARBALL"
|
||||
EXTRACTED_DIR=$(find . -maxdepth 1 -type d -name "DankMaterialShell-*" | head -1)
|
||||
if [[ -n "$EXTRACTED_DIR" ]] && [[ "$EXTRACTED_DIR" != "./$EXPECTED_DIR" ]]; then
|
||||
mv "$EXTRACTED_DIR" "$EXPECTED_DIR"
|
||||
if [[ -f "$EXPECTED_DIR/debian/changelog" ]]; then
|
||||
ACTUAL_VER=$(grep -m1 "^$PACKAGE" "$EXPECTED_DIR/debian/changelog" 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/')
|
||||
if [[ "$ACTUAL_VER" != "$NEW_VERSION" ]]; then
|
||||
echo " Updating changelog version in extracted directory"
|
||||
REPO_CHANGELOG="$REPO_ROOT/distro/debian/$PACKAGE/debian/changelog"
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
echo "$PACKAGE ($NEW_VERSION) unstable; urgency=medium"
|
||||
echo ""
|
||||
echo " * Rebuild to fix repository metadata issues"
|
||||
echo ""
|
||||
echo " -- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
echo ""
|
||||
if [[ -f "$REPO_CHANGELOG" ]]; then
|
||||
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
|
||||
if [[ -n "$OLD_ENTRY_START" ]]; then
|
||||
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG"
|
||||
fi
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
cp "$TEMP_CHANGELOG" "$EXPECTED_DIR/debian/changelog"
|
||||
rm -f "$TEMP_CHANGELOG"
|
||||
fi
|
||||
fi
|
||||
SOURCE_DIR="$(pwd)/$EXPECTED_DIR"
|
||||
cd "$REPO_ROOT"
|
||||
else
|
||||
echo " Error: Could not extract or find source directory"
|
||||
rm -rf "$EXTRACT_DIR"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo " Error: No existing tarball found to extract"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
TARBALL_BASE=$(basename "$SOURCE_DIR")
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$WORK_DIR"
|
||||
|
||||
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
|
||||
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
|
||||
|
||||
BUILD_DEPS="debhelper-compat (= 13)"
|
||||
if [[ -f "distro/debian/$PACKAGE/debian/control" ]]; then
|
||||
CONTROL_DEPS=$(sed -n '/^Build-Depends:/,/^[A-Z]/p' "distro/debian/$PACKAGE/debian/control" | \
|
||||
sed '/^Build-Depends:/s/^Build-Depends: *//' | \
|
||||
sed '/^[A-Z]/d' | \
|
||||
tr '\n' ' ' | \
|
||||
sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/[[:space:]]\+/ /g')
|
||||
if [[ -n "$CONTROL_DEPS" && "$CONTROL_DEPS" != "" ]]; then
|
||||
BUILD_DEPS="$CONTROL_DEPS"
|
||||
fi
|
||||
fi
|
||||
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
Format: 3.0 (native)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
Architecture: any
|
||||
Version: $VERSION
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: $BUILD_DEPS
|
||||
Files:
|
||||
$TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL
|
||||
EOF
|
||||
echo " - Updated changelog and recreated tarball with version $NEW_VERSION"
|
||||
else
|
||||
echo " Error: Source directory not found, cannot recreate tarball"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
find . -maxdepth 1 -type f \( -name "*.dsc" -o -name "*.spec" \) -exec grep -l "^<<<<<<< " {} \; 2>/dev/null | while read -r conflicted_file; do
|
||||
echo " Removing conflicted text file: $conflicted_file"
|
||||
rm -f "$conflicted_file"
|
||||
done
|
||||
|
||||
echo "==> Staging changes"
|
||||
echo "Files to upload:"
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
ls -lh *.tar.gz *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
fi
|
||||
echo ""
|
||||
|
||||
osc addremove 2>&1 | grep -v "Git SCM package" || true
|
||||
|
||||
SOURCE_TARBALL="${PACKAGE}-source.tar.gz"
|
||||
if [[ -f "$SOURCE_TARBALL" ]]; then
|
||||
echo "==> Ensuring $SOURCE_TARBALL is tracked by OBS"
|
||||
osc add "$SOURCE_TARBALL" 2>&1 | grep -v "already added\|already tracked\|Git SCM package" || true
|
||||
elif [[ -f "$WORK_DIR/$SOURCE_TARBALL" ]]; then
|
||||
echo "==> Copying $SOURCE_TARBALL from WORK_DIR and adding to OBS"
|
||||
cp "$WORK_DIR/$SOURCE_TARBALL" "$SOURCE_TARBALL"
|
||||
osc add "$SOURCE_TARBALL" 2>&1 | grep -v "already added\|already tracked\|Git SCM package" || true
|
||||
fi
|
||||
ADDREMOVE_EXIT=${PIPESTATUS[0]}
|
||||
if [[ $ADDREMOVE_EXIT -ne 0 ]] && [[ $ADDREMOVE_EXIT -ne 1 ]]; then
|
||||
echo "Warning: osc addremove returned exit code $ADDREMOVE_EXIT"
|
||||
fi
|
||||
|
||||
if osc status | grep -q '^C'; then
|
||||
echo "==> Resolving conflicts"
|
||||
osc status | grep '^C' | awk '{print $2}' | xargs -r osc resolved
|
||||
fi
|
||||
|
||||
if ! osc status 2>/dev/null | grep -qE '^[MAD]|^[?]'; then
|
||||
echo "==> No changes to commit (package already up to date)"
|
||||
else
|
||||
echo "==> Committing to OBS"
|
||||
set +e
|
||||
osc commit -m "$MESSAGE" 2>&1 | grep -v "Git SCM package" | grep -v "apiurl\|project\|_ObsPrj\|_manifest\|git-obs"
|
||||
COMMIT_EXIT=${PIPESTATUS[0]}
|
||||
set -e
|
||||
if [[ $COMMIT_EXIT -ne 0 ]]; then
|
||||
echo "Error: Upload failed with exit code $COMMIT_EXIT"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
osc results
|
||||
|
||||
echo ""
|
||||
echo "✅ Upload complete!"
|
||||
cd "$WORK_DIR"
|
||||
osc results 2>&1 | head -10
|
||||
cd "$REPO_ROOT"
|
||||
echo ""
|
||||
echo "Check build status with:"
|
||||
echo " ./distro/scripts/obs-status.sh $PACKAGE"
|
||||
566
distro/scripts/ppa-build.sh
Executable file
566
distro/scripts/ppa-build.sh
Executable file
@@ -0,0 +1,566 @@
|
||||
#!/bin/bash
|
||||
# Generic source package builder for DMS PPA packages
|
||||
# Usage: ./create-source.sh <package-dir> [ubuntu-series]
|
||||
#
|
||||
# Example:
|
||||
# ./create-source.sh ../dms questing
|
||||
# ./create-source.sh ../dms-git questing
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
error "Usage: $0 <package-dir> [ubuntu-series]"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " package-dir : Path to package directory (e.g., ../dms)"
|
||||
echo " ubuntu-series : Ubuntu series (optional, default: noble)"
|
||||
echo " Options: noble, jammy, oracular, mantic"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms questing"
|
||||
echo " $0 ../dms-git questing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PACKAGE_DIR="$1"
|
||||
UBUNTU_SERIES="${2:-noble}"
|
||||
|
||||
# Validate package directory
|
||||
if [ ! -d "$PACKAGE_DIR" ]; then
|
||||
error "Package directory not found: $PACKAGE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d "$PACKAGE_DIR/debian" ]; then
|
||||
error "No debian/ directory found in $PACKAGE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
PACKAGE_DIR=$(cd "$PACKAGE_DIR" && pwd)
|
||||
PACKAGE_NAME=$(basename "$PACKAGE_DIR")
|
||||
|
||||
info "Building source package for: $PACKAGE_NAME"
|
||||
info "Package directory: $PACKAGE_DIR"
|
||||
info "Target Ubuntu series: $UBUNTU_SERIES"
|
||||
|
||||
# Check for required files
|
||||
REQUIRED_FILES=(
|
||||
"debian/control"
|
||||
"debian/rules"
|
||||
"debian/changelog"
|
||||
"debian/copyright"
|
||||
"debian/source/format"
|
||||
)
|
||||
|
||||
for file in "${REQUIRED_FILES[@]}"; do
|
||||
if [ ! -f "$PACKAGE_DIR/$file" ]; then
|
||||
error "Required file missing: $file"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Verify GPG key is set up
|
||||
info "Checking GPG key setup..."
|
||||
if ! gpg --list-secret-keys &> /dev/null; then
|
||||
error "No GPG secret keys found. Please set up GPG first!"
|
||||
error "See GPG_SETUP.md for instructions"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
success "GPG key found"
|
||||
|
||||
# Check if debuild is installed
|
||||
if ! command -v debuild &> /dev/null; then
|
||||
error "debuild not found. Install devscripts:"
|
||||
error " sudo dnf install devscripts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract package info from changelog
|
||||
cd "$PACKAGE_DIR"
|
||||
CHANGELOG_VERSION=$(dpkg-parsechangelog -S Version)
|
||||
SOURCE_NAME=$(dpkg-parsechangelog -S Source)
|
||||
|
||||
info "Source package: $SOURCE_NAME"
|
||||
info "Version: $CHANGELOG_VERSION"
|
||||
|
||||
# Check if version targets correct Ubuntu series
|
||||
CHANGELOG_SERIES=$(dpkg-parsechangelog -S Distribution)
|
||||
if [ "$CHANGELOG_SERIES" != "$UBUNTU_SERIES" ] && [ "$CHANGELOG_SERIES" != "UNRELEASED" ]; then
|
||||
warn "Changelog targets '$CHANGELOG_SERIES' but building for '$UBUNTU_SERIES'"
|
||||
warn "Consider updating changelog with: dch -r '' -D $UBUNTU_SERIES"
|
||||
fi
|
||||
|
||||
# Detect package type and update version automatically
|
||||
cd "$PACKAGE_DIR"
|
||||
|
||||
# Function to get latest tag from GitHub
|
||||
get_latest_tag() {
|
||||
local repo="$1"
|
||||
# Try GitHub API first (faster)
|
||||
if command -v curl &> /dev/null; then
|
||||
LATEST_TAG=$(curl -s "https://api.github.com/repos/$repo/releases/latest" 2>/dev/null | grep '"tag_name":' | sed 's/.*"tag_name": "\(.*\)".*/\1/' | head -1)
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "$LATEST_TAG" | sed 's/^v//'
|
||||
return
|
||||
fi
|
||||
fi
|
||||
# Fallback: clone and get latest tag
|
||||
TEMP_REPO=$(mktemp -d)
|
||||
if git clone --depth=1 --quiet "https://github.com/$repo.git" "$TEMP_REPO" 2>/dev/null; then
|
||||
LATEST_TAG=$(cd "$TEMP_REPO" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "")
|
||||
rm -rf "$TEMP_REPO"
|
||||
echo "$LATEST_TAG"
|
||||
fi
|
||||
}
|
||||
|
||||
# Detect if package is git-based
|
||||
IS_GIT_PACKAGE=false
|
||||
GIT_REPO=""
|
||||
SOURCE_DIR=""
|
||||
|
||||
# Check package name for -git suffix
|
||||
if [[ "$PACKAGE_NAME" == *"-git" ]]; then
|
||||
IS_GIT_PACKAGE=true
|
||||
fi
|
||||
|
||||
# Check rules file for git clone patterns and extract repo
|
||||
if grep -q "git clone" debian/rules 2>/dev/null; then
|
||||
IS_GIT_PACKAGE=true
|
||||
# Extract GitHub repo URL from rules
|
||||
GIT_URL=$(grep -o "git clone.*https://github.com/[^/]*/[^/]*\.git" debian/rules 2>/dev/null | head -1 | sed 's/.*github\.com\///' | sed 's/\.git.*//' || echo "")
|
||||
if [ -n "$GIT_URL" ]; then
|
||||
GIT_REPO="$GIT_URL"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Special handling for known packages
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
IS_GIT_PACKAGE=true
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
SOURCE_DIR="dms-git-repo"
|
||||
;;
|
||||
dms)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading pre-built binaries and source for dms..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download amd64 binary (will be included in source package)
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
info "Downloading dms binary for amd64..."
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download source tarball for QML files
|
||||
if [ ! -f "dms-source.tar.gz" ]; then
|
||||
info "Downloading dms source for QML files..."
|
||||
if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
danksearch)
|
||||
# danksearch uses pre-built binary from releases, like dgop
|
||||
GIT_REPO="AvengeMedia/danksearch"
|
||||
;;
|
||||
dgop)
|
||||
# dgop uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/dgop"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle git packages
|
||||
if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
info "Detected git package: $PACKAGE_NAME"
|
||||
|
||||
# Determine source directory name
|
||||
if [ -z "$SOURCE_DIR" ]; then
|
||||
# Default: use package name without -git suffix + -source or -repo
|
||||
BASE_NAME=$(echo "$PACKAGE_NAME" | sed 's/-git$//')
|
||||
if [ -d "${BASE_NAME}-source" ] 2>/dev/null; then
|
||||
SOURCE_DIR="${BASE_NAME}-source"
|
||||
elif [ -d "${BASE_NAME}-repo" ] 2>/dev/null; then
|
||||
SOURCE_DIR="${BASE_NAME}-repo"
|
||||
elif [ -d "$BASE_NAME" ] 2>/dev/null; then
|
||||
SOURCE_DIR="$BASE_NAME"
|
||||
else
|
||||
SOURCE_DIR="${BASE_NAME}-source"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Always clone fresh source to get latest commit info
|
||||
info "Cloning $GIT_REPO from GitHub (getting latest commit info)..."
|
||||
TEMP_CLONE=$(mktemp -d)
|
||||
if git clone "https://github.com/$GIT_REPO.git" "$TEMP_CLONE"; then
|
||||
# Get git commit info from fresh clone
|
||||
GIT_COMMIT_HASH=$(cd "$TEMP_CLONE" && git rev-parse --short HEAD)
|
||||
GIT_COMMIT_COUNT=$(cd "$TEMP_CLONE" && git rev-list --count HEAD)
|
||||
|
||||
# Get upstream version from latest git tag (e.g., 0.2.1)
|
||||
# Sort all tags by version and get the latest one (not just the one reachable from HEAD)
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l "v*" | sed 's/^v//' | sort -V | tail -1)
|
||||
if [ -z "$UPSTREAM_VERSION" ]; then
|
||||
# Fallback: try without v prefix
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l | grep -E '^[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -1)
|
||||
fi
|
||||
if [ -z "$UPSTREAM_VERSION" ]; then
|
||||
# Last resort: use git describe
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.1")
|
||||
fi
|
||||
|
||||
# Verify we got valid commit info
|
||||
if [ -z "$GIT_COMMIT_COUNT" ] || [ "$GIT_COMMIT_COUNT" = "0" ]; then
|
||||
error "Failed to get commit count from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$GIT_COMMIT_HASH" ]; then
|
||||
error "Failed to get commit hash from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
success "Got commit info: $GIT_COMMIT_COUNT ($GIT_COMMIT_HASH), upstream: $UPSTREAM_VERSION"
|
||||
|
||||
# Update changelog with git commit info
|
||||
info "Updating changelog with git commit info..."
|
||||
# Format: 0.2.1+git705.fdbb86appa1
|
||||
# Check if we're rebuilding the same commit (increment PPA number if so)
|
||||
BASE_VERSION="${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
PPA_NUM=1
|
||||
|
||||
# If current version matches the base version, increment PPA number
|
||||
# Escape special regex characters in BASE_VERSION for pattern matching
|
||||
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/+/\\+/g')
|
||||
if [[ "$CURRENT_VERSION" =~ ^${ESCAPED_BASE}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same commit (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New commit or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
|
||||
# Use sed to update changelog (non-interactive, faster)
|
||||
# Get current changelog content - find the next package header line (starts with package name)
|
||||
# Skip the first entry entirely by finding the second occurrence of the package name at start of line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
# Found second entry, use everything from there
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
else
|
||||
# No second entry found, changelog will only have new entry
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
# Create new changelog entry with proper format
|
||||
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
|
||||
|
||||
* Git snapshot (commit ${GIT_COMMIT_COUNT}: ${GIT_COMMIT_HASH})
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
|
||||
# Write new changelog (new entry, blank line, then old entries)
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
|
||||
# Now clone to source directory (without .git for inclusion in package)
|
||||
rm -rf "$SOURCE_DIR"
|
||||
cp -r "$TEMP_CLONE" "$SOURCE_DIR"
|
||||
rm -rf "$SOURCE_DIR/.git"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
|
||||
# Vendor Rust dependencies for packages that need it
|
||||
if false; then
|
||||
# No current packages need Rust vendoring
|
||||
if [ -f "$SOURCE_DIR/Cargo.toml" ]; then
|
||||
info "Vendoring Rust dependencies (Launchpad has no internet access)..."
|
||||
cd "$SOURCE_DIR"
|
||||
|
||||
# Clean up any existing vendor directory and .orig files
|
||||
# (prevents cargo from including .orig files in checksums)
|
||||
rm -rf vendor .cargo
|
||||
find . -type f -name "*.orig" -exec rm -f {} + || true
|
||||
|
||||
# Download all dependencies (crates.io + git repos) to vendor/
|
||||
# cargo vendor outputs the config to stderr, capture it
|
||||
mkdir -p .cargo
|
||||
cargo vendor 2>&1 | awk '
|
||||
/^\[source\.crates-io\]/ { printing=1 }
|
||||
printing { print }
|
||||
/^directory = "vendor"$/ { exit }
|
||||
' > .cargo/config.toml
|
||||
|
||||
# Verify vendor directory was created
|
||||
if [ ! -d "vendor" ]; then
|
||||
error "Failed to vendor dependencies"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify config was created
|
||||
if [ ! -s .cargo/config.toml ]; then
|
||||
error "Failed to create cargo config"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# CRITICAL: Remove ALL .orig files from vendor directory
|
||||
# These break cargo checksums when dh_clean tries to use them
|
||||
info "Cleaning .orig files from vendor directory..."
|
||||
find vendor -type f -name "*.orig" -exec rm -fv {} + || true
|
||||
find vendor -type f -name "*.rej" -exec rm -fv {} + || true
|
||||
|
||||
# Verify no .orig files remain
|
||||
ORIG_COUNT=$(find vendor -type f -name "*.orig" | wc -l)
|
||||
if [ "$ORIG_COUNT" -gt 0 ]; then
|
||||
warn "Found $ORIG_COUNT .orig files still in vendor directory"
|
||||
fi
|
||||
|
||||
success "Rust dependencies vendored (including git dependencies)"
|
||||
cd "$PACKAGE_DIR"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download pre-built binary for dms-git
|
||||
# dms-git uses latest release binary with git master QML files
|
||||
if [ "$PACKAGE_NAME" = "dms-git" ]; then
|
||||
info "Downloading latest release binary for dms-git..."
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/latest/download/dms-distropkg-amd64.gz"; then
|
||||
success "Latest release binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
info "Release binary already downloaded"
|
||||
fi
|
||||
fi
|
||||
|
||||
success "Source prepared for packaging"
|
||||
else
|
||||
error "Failed to clone $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
# Handle stable packages - get latest tag
|
||||
elif [ -n "$GIT_REPO" ]; then
|
||||
info "Detected stable package: $PACKAGE_NAME"
|
||||
info "Fetching latest tag from $GIT_REPO..."
|
||||
|
||||
LATEST_TAG=$(get_latest_tag "$GIT_REPO")
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
# Check source format - native packages can't use dashes
|
||||
SOURCE_FORMAT=$(cat debian/source/format 2>/dev/null | head -1 || echo "3.0 (quilt)")
|
||||
|
||||
# Get current version to check if we need to increment PPA number
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
PPA_NUM=1
|
||||
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
|
||||
# Native format: 0.2.1ppa1 (no dash, no revision)
|
||||
BASE_VERSION="${LATEST_TAG}"
|
||||
# Check if we're rebuilding the same version (increment PPA number if so)
|
||||
if [[ "$CURRENT_VERSION" =~ ^${LATEST_TAG}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same version (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New version or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
else
|
||||
# Quilt format: 0.2.1-1ppa1 (with revision)
|
||||
BASE_VERSION="${LATEST_TAG}-1"
|
||||
# Check if we're rebuilding the same version (increment PPA number if so)
|
||||
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/-/\\-/g')
|
||||
if [[ "$CURRENT_VERSION" =~ ^${ESCAPED_BASE}ppa([0-9]+)$ ]]; then
|
||||
PPA_NUM=$((BASH_REMATCH[1] + 1))
|
||||
info "Detected rebuild of same version (current: $CURRENT_VERSION), incrementing PPA number to $PPA_NUM"
|
||||
else
|
||||
info "New version or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
fi
|
||||
|
||||
# Check if version needs updating (either new version or PPA number changed)
|
||||
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
|
||||
if [ "$PPA_NUM" -gt 1 ]; then
|
||||
info "Updating changelog for rebuild (PPA number incremented to $PPA_NUM)"
|
||||
else
|
||||
info "Updating changelog to latest tag: $LATEST_TAG"
|
||||
fi
|
||||
# Use sed to update changelog (non-interactive)
|
||||
# Get current changelog content - find the next package header line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
else
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
# Create appropriate changelog message
|
||||
if [ "$PPA_NUM" -gt 1 ]; then
|
||||
CHANGELOG_MSG="Rebuild for packaging fixes (ppa${PPA_NUM})"
|
||||
else
|
||||
CHANGELOG_MSG="Upstream release ${LATEST_TAG}"
|
||||
fi
|
||||
|
||||
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
|
||||
|
||||
* ${CHANGELOG_MSG}
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
else
|
||||
info "Version already at latest tag: $LATEST_TAG"
|
||||
fi
|
||||
else
|
||||
warn "Could not determine latest tag for $GIT_REPO, using existing version"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Handle packages that need pre-built binaries downloaded
|
||||
cd "$PACKAGE_DIR"
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
info "Downloading pre-built binaries for danksearch..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download both amd64 and arm64 binaries (will be included in source package)
|
||||
# Launchpad can't download during build, so we include both architectures
|
||||
if [ ! -f "dsearch-amd64" ]; then
|
||||
info "Downloading dsearch binary for amd64..."
|
||||
if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then
|
||||
gunzip dsearch-amd64.gz
|
||||
chmod +x dsearch-amd64
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f "dsearch-arm64" ]; then
|
||||
info "Downloading dsearch binary for arm64..."
|
||||
if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then
|
||||
gunzip dsearch-arm64.gz
|
||||
chmod +x dsearch-arm64
|
||||
success "arm64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-arm64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
dgop)
|
||||
# dgop binary should already be committed in the repo
|
||||
if [ ! -f "dgop" ]; then
|
||||
warn "dgop binary not found - should be committed to repo"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
cd - > /dev/null
|
||||
|
||||
# Check if this version already exists on PPA (only in CI environment)
|
||||
if command -v rmadison >/dev/null 2>&1; then
|
||||
info "Checking if version already exists on PPA..."
|
||||
PPA_VERSION_CHECK=$(rmadison -u ppa:avengemedia/dms "$PACKAGE_NAME" 2>/dev/null | grep "$VERSION" || true)
|
||||
if [ -n "$PPA_VERSION_CHECK" ]; then
|
||||
warn "Version $VERSION already exists on PPA:"
|
||||
echo "$PPA_VERSION_CHECK"
|
||||
echo
|
||||
warn "Skipping upload to avoid duplicate. If this is a rebuild, increment the ppa number."
|
||||
cd "$PACKAGE_DIR"
|
||||
# Still clean up extracted sources
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
rm -rf DankMaterialShell-*
|
||||
success "Cleaned up DankMaterialShell-*/ directory"
|
||||
;;
|
||||
esac
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Build source package
|
||||
info "Building source package..."
|
||||
echo
|
||||
|
||||
# Determine if we need to include orig tarball (-sa) or just debian changes (-sd)
|
||||
# Check if .orig.tar.xz already exists in parent directory (previous build)
|
||||
ORIG_TARBALL="${PACKAGE_NAME}_${VERSION%.ppa*}.orig.tar.xz"
|
||||
if [ -f "../$ORIG_TARBALL" ]; then
|
||||
info "Found existing orig tarball, using -sd (debian changes only)"
|
||||
DEBUILD_SOURCE_FLAG="-sd"
|
||||
else
|
||||
info "No existing orig tarball found, using -sa (include original source)"
|
||||
DEBUILD_SOURCE_FLAG="-sa"
|
||||
fi
|
||||
|
||||
# Use -S for source only, -sa/-sd for source inclusion
|
||||
# -d skips dependency checking (we're building on Fedora, not Ubuntu)
|
||||
# Pipe yes to automatically answer prompts (e.g., "continue anyway?")
|
||||
if yes | DEBIAN_FRONTEND=noninteractive debuild -S $DEBUILD_SOURCE_FLAG -d; then
|
||||
echo
|
||||
success "Source package built successfully!"
|
||||
|
||||
# List generated files
|
||||
info "Generated files in $(dirname "$PACKAGE_DIR"):"
|
||||
ls -lh "$(dirname "$PACKAGE_DIR")"/${SOURCE_NAME}_${CHANGELOG_VERSION}* 2>/dev/null || true
|
||||
|
||||
# Show what to do next
|
||||
echo
|
||||
info "Next steps:"
|
||||
echo " 1. Review the source package:"
|
||||
echo " cd $(dirname "$PACKAGE_DIR")"
|
||||
echo " ls -lh ${SOURCE_NAME}_${CHANGELOG_VERSION}*"
|
||||
echo
|
||||
echo " 2. Upload to PPA (stable):"
|
||||
echo " dput ppa:avengemedia/dms ${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes"
|
||||
echo
|
||||
echo " 3. Or upload to PPA (nightly):"
|
||||
echo " dput ppa:avengemedia/dms-git ${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes"
|
||||
echo
|
||||
echo " 4. Or use the upload script:"
|
||||
echo " ./upload-ppa.sh $(dirname "$PACKAGE_DIR")/${SOURCE_NAME}_${CHANGELOG_VERSION}_source.changes dms"
|
||||
|
||||
else
|
||||
error "Source package build failed!"
|
||||
exit 1
|
||||
fi
|
||||
179
distro/scripts/ppa-dput.sh
Executable file
179
distro/scripts/ppa-dput.sh
Executable file
@@ -0,0 +1,179 @@
|
||||
#!/bin/bash
|
||||
# Ubuntu PPA uploader for DMS packages
|
||||
# Usage: ./upload-ppa.sh <changes-file> <ppa-name>
|
||||
#
|
||||
# Example:
|
||||
# ./upload-ppa.sh ../dms_0.5.2ppa1_source.changes dms
|
||||
# ./upload-ppa.sh ../dms_0.5.2+git705.fdbb86appa1_source.changes dms-git
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
error "Usage: $0 <changes-file> <ppa-name>"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " changes-file : Path to .changes file (e.g., ../dms_0.5.2ppa1_source.changes)"
|
||||
echo " ppa-name : PPA to upload to (dms or dms-git)"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms_0.5.2ppa1_source.changes dms"
|
||||
echo " $0 ../dms_0.5.2+git705.fdbb86appa1_source.changes dms-git"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHANGES_FILE="$1"
|
||||
PPA_NAME="$2"
|
||||
|
||||
# Validate changes file
|
||||
if [ ! -f "$CHANGES_FILE" ]; then
|
||||
error "Changes file not found: $CHANGES_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$CHANGES_FILE" =~ \.changes$ ]]; then
|
||||
error "File must be a .changes file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate PPA name
|
||||
if [ "$PPA_NAME" != "dms" ] && [ "$PPA_NAME" != "dms-git" ] && [ "$PPA_NAME" != "danklinux" ]; then
|
||||
error "PPA name must be 'dms', 'dms-git', or 'danklinux'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
CHANGES_FILE=$(realpath "$CHANGES_FILE")
|
||||
|
||||
info "Uploading to PPA: ppa:avengemedia/$PPA_NAME"
|
||||
info "Changes file: $CHANGES_FILE"
|
||||
|
||||
# Check if dput or lftp is installed
|
||||
UPLOAD_METHOD=""
|
||||
if command -v dput &> /dev/null; then
|
||||
UPLOAD_METHOD="dput"
|
||||
elif command -v lftp &> /dev/null; then
|
||||
UPLOAD_METHOD="lftp"
|
||||
warn "dput not found, using lftp as fallback"
|
||||
else
|
||||
error "Neither dput nor lftp found. Install one with:"
|
||||
error " sudo dnf install dput-ng # Preferred but broken on Fedora"
|
||||
error " sudo dnf install lftp # Alternative upload method"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if ~/.dput.cf exists
|
||||
if [ ! -f "$HOME/.dput.cf" ]; then
|
||||
error "~/.dput.cf not found!"
|
||||
echo
|
||||
info "Create it from template:"
|
||||
echo " cp $(dirname "$0")/../dput.cf.template ~/.dput.cf"
|
||||
echo
|
||||
info "Or create it manually with:"
|
||||
cat <<'EOF'
|
||||
[ppa:avengemedia/dms]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
[ppa:avengemedia/dms-git]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms-git/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if PPA is configured in dput.cf
|
||||
if ! grep -q "^\[ppa:avengemedia/$PPA_NAME\]" "$HOME/.dput.cf"; then
|
||||
error "PPA 'ppa:avengemedia/$PPA_NAME' not found in ~/.dput.cf"
|
||||
echo
|
||||
info "Add this to ~/.dput.cf:"
|
||||
cat <<EOF
|
||||
[ppa:avengemedia/$PPA_NAME]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/$PPA_NAME/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract package info from changes file
|
||||
PACKAGE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
|
||||
info "Package: $PACKAGE_NAME"
|
||||
info "Version: $VERSION"
|
||||
|
||||
# Show files that will be uploaded
|
||||
echo
|
||||
info "Files to be uploaded:"
|
||||
grep "^ [a-f0-9]" "$CHANGES_FILE" | awk '{print " - " $5}' || true
|
||||
|
||||
# Verify GPG signature
|
||||
info "Verifying GPG signature..."
|
||||
if gpg --verify "$CHANGES_FILE" 2>/dev/null; then
|
||||
success "GPG signature valid"
|
||||
else
|
||||
error "GPG signature verification failed!"
|
||||
error "The .changes file must be signed with your GPG key"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ask for confirmation
|
||||
echo
|
||||
warn "About to upload to: ppa:avengemedia/$PPA_NAME"
|
||||
read -p "Continue? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
info "Upload cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Upload to PPA
|
||||
info "Uploading to Launchpad..."
|
||||
echo
|
||||
|
||||
if dput "ppa:avengemedia/$PPA_NAME" "$CHANGES_FILE"; then
|
||||
echo
|
||||
success "Upload successful!"
|
||||
echo
|
||||
info "Monitor build progress at:"
|
||||
echo " https://launchpad.net/~avengemedia/+archive/ubuntu/$PPA_NAME/+packages"
|
||||
echo
|
||||
info "Builds typically take 5-30 minutes depending on:"
|
||||
echo " - Build queue length"
|
||||
echo " - Package complexity"
|
||||
echo " - Number of target Ubuntu series"
|
||||
echo
|
||||
info "Once built, users can install with:"
|
||||
echo " sudo add-apt-repository ppa:avengemedia/$PPA_NAME"
|
||||
echo " sudo apt update"
|
||||
echo " sudo apt install $PACKAGE_NAME"
|
||||
|
||||
else
|
||||
error "Upload failed!"
|
||||
echo
|
||||
info "Common issues:"
|
||||
echo " - GPG key not verified on Launchpad (check https://launchpad.net/~/+editpgpkeys)"
|
||||
echo " - Version already uploaded (must increment version number)"
|
||||
echo " - Network/firewall blocking FTP (try HTTPS method in dput.cf)"
|
||||
echo " - Email in changelog doesn't match GPG key email"
|
||||
exit 1
|
||||
fi
|
||||
246
distro/scripts/ppa-upload.sh
Executable file
246
distro/scripts/ppa-upload.sh
Executable file
@@ -0,0 +1,246 @@
|
||||
#!/bin/bash
|
||||
# Build and upload PPA package with automatic cleanup
|
||||
# Usage: ./create-and-upload.sh <package-dir> <ppa-name> [ubuntu-series] [--keep-builds]
|
||||
#
|
||||
# Example:
|
||||
# ./create-and-upload.sh ../dms dms questing
|
||||
# ./create-and-upload.sh ../danklinux/dgop danklinux questing --keep-builds
|
||||
|
||||
set -e
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Parse arguments
|
||||
KEEP_BUILDS=false
|
||||
ARGS=()
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "--keep-builds" ]; then
|
||||
KEEP_BUILDS=true
|
||||
else
|
||||
ARGS+=("$arg")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#ARGS[@]} -lt 2 ]; then
|
||||
error "Usage: $0 <package-dir> <ppa-name> [ubuntu-series] [--keep-builds]"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " package-dir : Path to package directory (e.g., ../dms, ../danklinux/dgop)"
|
||||
echo " ppa-name : PPA name (danklinux, dms, dms-git)"
|
||||
echo " ubuntu-series : Ubuntu series (optional, default: questing)"
|
||||
echo " Supported: questing (25.10) and newer only"
|
||||
echo " Note: Requires Qt 6.6+ (quickshell requirement)"
|
||||
echo " --keep-builds : Keep build artifacts after upload (optional)"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 ../dms dms questing"
|
||||
echo " $0 ../danklinux/dgop danklinux questing --keep-builds"
|
||||
echo " $0 ../dms-git dms-git # Defaults to questing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PACKAGE_DIR="${ARGS[0]}"
|
||||
PPA_NAME="${ARGS[1]}"
|
||||
UBUNTU_SERIES="${ARGS[2]:-questing}"
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
BUILD_SCRIPT="$SCRIPT_DIR/ppa-build.sh"
|
||||
UPLOAD_SCRIPT="$SCRIPT_DIR/ppa-dput.sh"
|
||||
|
||||
# Validate scripts exist
|
||||
if [ ! -f "$BUILD_SCRIPT" ]; then
|
||||
error "Build script not found: $BUILD_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path
|
||||
PACKAGE_DIR=$(cd "$PACKAGE_DIR" && pwd)
|
||||
PACKAGE_NAME=$(basename "$PACKAGE_DIR")
|
||||
PARENT_DIR=$(dirname "$PACKAGE_DIR")
|
||||
|
||||
info "Building and uploading: $PACKAGE_NAME"
|
||||
info "Package directory: $PACKAGE_DIR"
|
||||
info "PPA: ppa:avengemedia/$PPA_NAME"
|
||||
info "Ubuntu series: $UBUNTU_SERIES"
|
||||
echo
|
||||
|
||||
# Step 1: Build source package
|
||||
info "Step 1: Building source package..."
|
||||
if ! "$BUILD_SCRIPT" "$PACKAGE_DIR" "$UBUNTU_SERIES"; then
|
||||
error "Build failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Find the changes file
|
||||
CHANGES_FILE=$(find "$PARENT_DIR" -maxdepth 1 -name "${PACKAGE_NAME}_*_source.changes" -type f | sort -V | tail -1)
|
||||
|
||||
if [ -z "$CHANGES_FILE" ]; then
|
||||
error "Changes file not found in $PARENT_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "Found changes file: $CHANGES_FILE"
|
||||
echo
|
||||
|
||||
# Step 2: Upload to PPA
|
||||
info "Step 2: Uploading to PPA..."
|
||||
|
||||
# Check if using lftp (for all PPAs) or dput
|
||||
if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "dms-git" ]; then
|
||||
warn "Using lftp for upload"
|
||||
|
||||
# Extract version from changes file
|
||||
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
SOURCE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
|
||||
# Find all files to upload
|
||||
BUILD_DIR=$(dirname "$CHANGES_FILE")
|
||||
CHANGES_BASENAME=$(basename "$CHANGES_FILE")
|
||||
DSC_FILE="${CHANGES_BASENAME/_source.changes/.dsc}"
|
||||
TARBALL="${CHANGES_BASENAME/_source.changes/.tar.xz}"
|
||||
BUILDINFO="${CHANGES_BASENAME/_source.changes/_source.buildinfo}"
|
||||
|
||||
# Check all files exist
|
||||
MISSING_FILES=()
|
||||
[ ! -f "$BUILD_DIR/$DSC_FILE" ] && MISSING_FILES+=("$DSC_FILE")
|
||||
[ ! -f "$BUILD_DIR/$TARBALL" ] && MISSING_FILES+=("$TARBALL")
|
||||
[ ! -f "$BUILD_DIR/$BUILDINFO" ] && MISSING_FILES+=("$BUILDINFO")
|
||||
|
||||
if [ ${#MISSING_FILES[@]} -gt 0 ]; then
|
||||
error "Missing required files:"
|
||||
for file in "${MISSING_FILES[@]}"; do
|
||||
error " - $file"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "Uploading files:"
|
||||
info " - $CHANGES_BASENAME"
|
||||
info " - $DSC_FILE"
|
||||
info " - $TARBALL"
|
||||
info " - $BUILDINFO"
|
||||
echo
|
||||
|
||||
# lftp build dir change
|
||||
LFTP_SCRIPT=$(mktemp)
|
||||
cat > "$LFTP_SCRIPT" <<EOF
|
||||
cd ~avengemedia/ubuntu/$PPA_NAME/
|
||||
lcd $BUILD_DIR
|
||||
mput $CHANGES_BASENAME
|
||||
mput $DSC_FILE
|
||||
mput $TARBALL
|
||||
mput $BUILDINFO
|
||||
bye
|
||||
EOF
|
||||
|
||||
if lftp -d ftp://anonymous:@ppa.launchpad.net < "$LFTP_SCRIPT"; then
|
||||
success "Upload successful!"
|
||||
rm -f "$LFTP_SCRIPT"
|
||||
else
|
||||
error "Upload failed!"
|
||||
rm -f "$LFTP_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Use dput for other PPAs
|
||||
if [ ! -f "$UPLOAD_SCRIPT" ]; then
|
||||
error "Upload script not found: $UPLOAD_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Auto-confirm upload (pipe 'y' to the confirmation prompt)
|
||||
if ! echo "y" | "$UPLOAD_SCRIPT" "$CHANGES_FILE" "$PPA_NAME"; then
|
||||
error "Upload failed!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
success "Package uploaded successfully!"
|
||||
info "Monitor build progress at:"
|
||||
echo " https://launchpad.net/~avengemedia/+archive/ubuntu/$PPA_NAME/+packages"
|
||||
echo
|
||||
|
||||
# Step 3: Cleanup (unless --keep-builds is specified)
|
||||
if [ "$KEEP_BUILDS" = "false" ]; then
|
||||
info "Step 3: Cleaning up build artifacts..."
|
||||
|
||||
# Find all build artifacts in parent directory
|
||||
ARTIFACTS=(
|
||||
"${PACKAGE_NAME}_*.dsc"
|
||||
"${PACKAGE_NAME}_*.tar.xz"
|
||||
"${PACKAGE_NAME}_*.tar.gz"
|
||||
"${PACKAGE_NAME}_*_source.changes"
|
||||
"${PACKAGE_NAME}_*_source.buildinfo"
|
||||
"${PACKAGE_NAME}_*_source.build"
|
||||
)
|
||||
|
||||
REMOVED=0
|
||||
for pattern in "${ARTIFACTS[@]}"; do
|
||||
for file in "$PARENT_DIR"/$pattern; do
|
||||
if [ -f "$file" ]; then
|
||||
rm -f "$file"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Clean up downloaded binaries in package directory
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-amd64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-arm64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms)
|
||||
# Remove downloaded binaries and source
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-git)
|
||||
# Remove downloaded binary
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
# Remove git source directory
|
||||
if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then
|
||||
rm -rf "$PACKAGE_DIR/dms-git-repo"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ $REMOVED -gt 0 ]; then
|
||||
success "Removed $REMOVED build artifact(s)"
|
||||
else
|
||||
info "No build artifacts to clean up"
|
||||
fi
|
||||
else
|
||||
info "Keeping build artifacts (--keep-builds specified)"
|
||||
info "Build artifacts in: $PARENT_DIR"
|
||||
fi
|
||||
|
||||
echo
|
||||
success "Done!"
|
||||
|
||||
5
distro/ubuntu/danklinux/danksearch/debian/changelog
Normal file
5
distro/ubuntu/danklinux/danksearch/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
danksearch (0.0.7ppa3) questing; urgency=medium
|
||||
|
||||
* Rebuild for packaging fixes (ppa3)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Fri, 21 Nov 2025 14:19:58 -0500
|
||||
24
distro/ubuntu/danklinux/danksearch/debian/control
Normal file
24
distro/ubuntu/danklinux/danksearch/debian/control
Normal file
@@ -0,0 +1,24 @@
|
||||
Source: danksearch
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/danksearch
|
||||
Vcs-Browser: https://github.com/AvengeMedia/danksearch
|
||||
Vcs-Git: https://github.com/AvengeMedia/danksearch.git
|
||||
|
||||
Package: danksearch
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends}
|
||||
Description: Fast file search utility for DMS
|
||||
DankSearch is a fast file search utility designed for DankMaterialShell.
|
||||
It provides efficient file and content search capabilities with minimal
|
||||
dependencies. This package contains the pre-built binary from the official
|
||||
GitHub release.
|
||||
.
|
||||
Features include:
|
||||
- Fast file searching
|
||||
- Lightweight and efficient
|
||||
- Designed for DMS integration
|
||||
- Minimal resource usage
|
||||
24
distro/ubuntu/danklinux/danksearch/debian/copyright
Normal file
24
distro/ubuntu/danklinux/danksearch/debian/copyright
Normal file
@@ -0,0 +1,24 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: danksearch
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/danksearch
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: GPL-3.0-only
|
||||
|
||||
License: GPL-3.0-only
|
||||
This package is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License version 3 as
|
||||
published by the Free Software Foundation.
|
||||
.
|
||||
This package is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>
|
||||
.
|
||||
On Debian systems, the complete text of the GNU General
|
||||
Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||
1
distro/ubuntu/danklinux/danksearch/debian/files
Normal file
1
distro/ubuntu/danklinux/danksearch/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
danksearch_0.0.7ppa3_source.buildinfo utils optional
|
||||
33
distro/ubuntu/danklinux/danksearch/debian/rules
Executable file
33
distro/ubuntu/danklinux/danksearch/debian/rules
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Detect architecture for selecting correct binary
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
# Map Debian arch to binary filename
|
||||
ifeq ($(DEB_HOST_ARCH),amd64)
|
||||
BINARY_FILE := dsearch-amd64
|
||||
else ifeq ($(DEB_HOST_ARCH),arm64)
|
||||
BINARY_FILE := dsearch-arm64
|
||||
else
|
||||
$(error Unsupported architecture: $(DEB_HOST_ARCH))
|
||||
endif
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Binary is already included in source package (native format)
|
||||
# Downloaded by build-source.sh before upload
|
||||
# Just verify it exists and is executable
|
||||
test -f $(BINARY_FILE) || (echo "ERROR: $(BINARY_FILE) not found!" && exit 1)
|
||||
chmod +x $(BINARY_FILE)
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary as danksearch
|
||||
install -Dm755 $(BINARY_FILE) debian/danksearch/usr/bin/danksearch
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete binaries - they're part of the source package (native format)
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/danklinux/danksearch/debian/source/format
Normal file
1
distro/ubuntu/danklinux/danksearch/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
BIN
distro/ubuntu/danklinux/danksearch/dsearch-amd64
Executable file
BIN
distro/ubuntu/danklinux/danksearch/dsearch-amd64
Executable file
Binary file not shown.
BIN
distro/ubuntu/danklinux/danksearch/dsearch-arm64
Executable file
BIN
distro/ubuntu/danklinux/danksearch/dsearch-arm64
Executable file
Binary file not shown.
9
distro/ubuntu/danklinux/dgop/debian/changelog
Normal file
9
distro/ubuntu/danklinux/dgop/debian/changelog
Normal file
@@ -0,0 +1,9 @@
|
||||
dgop (0.1.11ppa2) questing; urgency=medium
|
||||
|
||||
* Rebuild for Questing (25.10) - Ubuntu 25.10+ only
|
||||
* Stateless CPU/GPU monitoring tool
|
||||
* Support for NVIDIA and AMD GPUs
|
||||
* JSON output for integration
|
||||
* Pre-built binary package for amd64 and arm64
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 16 Nov 2025 22:50:00 -0500
|
||||
27
distro/ubuntu/danklinux/dgop/debian/control
Normal file
27
distro/ubuntu/danklinux/dgop/debian/control
Normal file
@@ -0,0 +1,27 @@
|
||||
Source: dgop
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13),
|
||||
wget,
|
||||
gzip
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/dgop
|
||||
Vcs-Browser: https://github.com/AvengeMedia/dgop
|
||||
Vcs-Git: https://github.com/AvengeMedia/dgop.git
|
||||
|
||||
Package: dgop
|
||||
Architecture: amd64 arm64
|
||||
Depends: ${misc:Depends}
|
||||
Description: Stateless CPU/GPU monitor for DankMaterialShell
|
||||
DGOP is a stateless system monitoring tool that provides CPU, GPU,
|
||||
memory, and network statistics. Designed for integration with
|
||||
DankMaterialShell but can be used standalone.
|
||||
.
|
||||
Features:
|
||||
- CPU usage monitoring
|
||||
- GPU usage and temperature (NVIDIA, AMD)
|
||||
- Memory and swap statistics
|
||||
- Network traffic monitoring
|
||||
- Zero-state design (no background processes)
|
||||
- JSON output for easy integration
|
||||
27
distro/ubuntu/danklinux/dgop/debian/copyright
Normal file
27
distro/ubuntu/danklinux/dgop/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dgop
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/dgop
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/danklinux/dgop/debian/files
Normal file
1
distro/ubuntu/danklinux/dgop/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dgop_0.1.11ppa2_source.buildinfo utils optional
|
||||
38
distro/ubuntu/danklinux/dgop/debian/rules
Executable file
38
distro/ubuntu/danklinux/dgop/debian/rules
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Extract version from debian/changelog
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
# Get upstream version (strip -1ppa1 suffix)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
|
||||
# Detect architecture for downloading correct binary
|
||||
DEB_HOST_ARCH := $(shell dpkg-architecture -qDEB_HOST_ARCH)
|
||||
|
||||
# Map Debian arch to GitHub release arch names
|
||||
ifeq ($(DEB_HOST_ARCH),amd64)
|
||||
GITHUB_ARCH := amd64
|
||||
else ifeq ($(DEB_HOST_ARCH),arm64)
|
||||
GITHUB_ARCH := arm64
|
||||
else
|
||||
$(error Unsupported architecture: $(DEB_HOST_ARCH))
|
||||
endif
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Binary is already included in source package (native format)
|
||||
# Just verify it exists and is executable
|
||||
test -f dgop || (echo "ERROR: dgop binary not found!" && exit 1)
|
||||
chmod +x dgop
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dgop debian/dgop/usr/bin/dgop
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete dgop binary - it's part of the source package (native format)
|
||||
rm -f dgop.gz
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/danklinux/dgop/debian/source/format
Normal file
1
distro/ubuntu/danklinux/dgop/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
5
distro/ubuntu/dms-git/debian/changelog
Normal file
5
distro/ubuntu/dms-git/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
dms-git (0.6.2+git2094.6cc6e7c8ppa1) questing; urgency=medium
|
||||
|
||||
* Git snapshot (commit 2094: 6cc6e7c8)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 23 Nov 2025 00:43:28 -0500
|
||||
50
distro/ubuntu/dms-git/debian/control
Normal file
50
distro/ubuntu/dms-git/debian/control
Normal file
@@ -0,0 +1,50 @@
|
||||
Source: dms-git
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms-git
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Provides: dms
|
||||
Conflicts: dms
|
||||
Replaces: dms
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell (git nightly)
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
This is the nightly/git version built from the latest master branch.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/ubuntu/dms-git/debian/copyright
Normal file
27
distro/ubuntu/dms-git/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/dms-git/debian/files
Normal file
1
distro/ubuntu/dms-git/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms-git_0.6.2+git2094.6cc6e7c8ppa1_source.buildinfo x11 optional
|
||||
45
distro/ubuntu/dms-git/debian/rules
Executable file
45
distro/ubuntu/dms-git/debian/rules
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Get git commit date for version
|
||||
GIT_DATE := $(shell date +%Y%m%d)
|
||||
GIT_COMMIT := HEAD
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# Git source is already included in source package (cloned by build-source.sh)
|
||||
# Launchpad build environment has no internet access
|
||||
test -d dms-git-repo || (echo "ERROR: dms-git-repo directory not found!" && exit 1)
|
||||
test -f dms-distropkg-amd64.gz || (echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1)
|
||||
|
||||
# Extract pre-built binary from latest release
|
||||
# Note: For git versions, we use the latest release binary
|
||||
# The QML files come from git master
|
||||
gunzip -c dms-distropkg-amd64.gz > dms
|
||||
chmod +x dms
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dms debian/dms-git/usr/bin/dms
|
||||
|
||||
# Install QML files from git clone
|
||||
mkdir -p debian/dms-git/usr/share/quickshell/dms
|
||||
cp -r dms-git-repo/* debian/dms-git/usr/share/quickshell/dms/
|
||||
|
||||
# Remove unnecessary directories
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/core
|
||||
rm -rf debian/dms-git/usr/share/quickshell/dms/distro
|
||||
|
||||
# Install systemd user service
|
||||
install -Dm644 dms-git-repo/quickshell/assets/systemd/dms.service \
|
||||
debian/dms-git/usr/lib/systemd/user/dms.service
|
||||
|
||||
override_dh_auto_clean:
|
||||
# Don't delete dms-git-repo directory - it's part of the source package (native format)
|
||||
# Clean up build artifacts (but keep dms-distropkg-amd64.gz for Launchpad)
|
||||
rm -f dms
|
||||
# Don't remove dms-distropkg-amd64.gz - it needs to be included in the source package for Launchpad builds
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/dms-git/debian/source/format
Normal file
1
distro/ubuntu/dms-git/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
1
distro/ubuntu/dms-git/debian/source/include-binaries
Normal file
1
distro/ubuntu/dms-git/debian/source/include-binaries
Normal file
@@ -0,0 +1 @@
|
||||
dms-distropkg-amd64.gz
|
||||
4
distro/ubuntu/dms-git/debian/source/options
Normal file
4
distro/ubuntu/dms-git/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad (which has no internet access)
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-git-repo
|
||||
5
distro/ubuntu/dms/debian/changelog
Normal file
5
distro/ubuntu/dms/debian/changelog
Normal file
@@ -0,0 +1,5 @@
|
||||
dms (0.6.2ppa3) questing; urgency=medium
|
||||
|
||||
* Rebuild for packaging fixes (ppa3)
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> Sun, 23 Nov 2025 00:40:41 -0500
|
||||
47
distro/ubuntu/dms/debian/control
Normal file
47
distro/ubuntu/dms/debian/control
Normal file
@@ -0,0 +1,47 @@
|
||||
Source: dms
|
||||
Section: x11
|
||||
Priority: optional
|
||||
Maintainer: Avenge Media <AvengeMedia.US@gmail.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Browser: https://github.com/AvengeMedia/DankMaterialShell
|
||||
Vcs-Git: https://github.com/AvengeMedia/DankMaterialShell.git
|
||||
|
||||
Package: dms
|
||||
Architecture: amd64
|
||||
Depends: ${misc:Depends},
|
||||
quickshell-git | quickshell,
|
||||
accountsservice,
|
||||
cava,
|
||||
cliphist,
|
||||
danksearch,
|
||||
dgop,
|
||||
matugen,
|
||||
qml6-module-qtcore,
|
||||
qml6-module-qtmultimedia,
|
||||
qml6-module-qtqml,
|
||||
qml6-module-qtquick,
|
||||
qml6-module-qtquick-controls,
|
||||
qml6-module-qtquick-dialogs,
|
||||
qml6-module-qtquick-effects,
|
||||
qml6-module-qtquick-layouts,
|
||||
qml6-module-qtquick-templates,
|
||||
qml6-module-qtquick-window,
|
||||
qt6ct,
|
||||
wl-clipboard
|
||||
Conflicts: dms-git
|
||||
Replaces: dms-git
|
||||
Description: DankMaterialShell - Modern Wayland Desktop Shell
|
||||
DMS (DankMaterialShell) is a feature-rich desktop shell built on
|
||||
Quickshell, providing a modern and customizable user interface for
|
||||
Wayland compositors like niri, hyprland, and sway.
|
||||
.
|
||||
Features include:
|
||||
- Material Design inspired UI
|
||||
- Customizable themes and appearance
|
||||
- Built-in application launcher
|
||||
- System tray and notifications
|
||||
- Network and Bluetooth management
|
||||
- Audio controls
|
||||
- Systemd integration
|
||||
27
distro/ubuntu/dms/debian/copyright
Normal file
27
distro/ubuntu/dms/debian/copyright
Normal file
@@ -0,0 +1,27 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: dms
|
||||
Upstream-Contact: Avenge Media LLC <AvengeMedia.US@gmail.com>
|
||||
Source: https://github.com/AvengeMedia/DankMaterialShell
|
||||
|
||||
Files: *
|
||||
Copyright: 2025 Avenge Media LLC
|
||||
License: MIT
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
1
distro/ubuntu/dms/debian/files
Normal file
1
distro/ubuntu/dms/debian/files
Normal file
@@ -0,0 +1 @@
|
||||
dms_0.6.2ppa3_source.buildinfo x11 optional
|
||||
63
distro/ubuntu/dms/debian/rules
Executable file
63
distro/ubuntu/dms/debian/rules
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
export DH_VERBOSE = 1
|
||||
|
||||
# Extract version from debian/changelog
|
||||
DEB_VERSION := $(shell dpkg-parsechangelog -S Version)
|
||||
# Get upstream version (strip -1ppa1 suffix)
|
||||
UPSTREAM_VERSION := $(shell echo $(DEB_VERSION) | sed 's/-[^-]*$$//')
|
||||
# Strip ppa suffix and handle git versions
|
||||
# Examples: 0.5.2ppa9 -> 0.5.2, 0.5.2+git20251116 -> 0.5.2
|
||||
BASE_VERSION := $(shell echo $(UPSTREAM_VERSION) | sed 's/ppa[0-9]*$$//' | sed 's/+git.*//')
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
override_dh_auto_build:
|
||||
# All files are included in source package (downloaded by build-source.sh)
|
||||
# Launchpad build environment has no internet access
|
||||
test -f dms-distropkg-amd64.gz || (echo "ERROR: dms-distropkg-amd64.gz not found!" && exit 1)
|
||||
test -f dms-source.tar.gz || (echo "ERROR: dms-source.tar.gz not found!" && exit 1)
|
||||
|
||||
# Extract pre-built binary
|
||||
gunzip -c dms-distropkg-amd64.gz > dms
|
||||
chmod +x dms
|
||||
|
||||
# Extract source tarball for QML files
|
||||
tar -xzf dms-source.tar.gz
|
||||
# Find the extracted directory (it might have various names)
|
||||
# and create a symlink to expected name for consistent install
|
||||
SOURCE_DIR=$$(find . -maxdepth 1 -type d -name "DankMaterialShell*" | head -n1); \
|
||||
if [ -n "$$SOURCE_DIR" ]; then \
|
||||
ln -sf $$SOURCE_DIR DankMaterialShell-$(BASE_VERSION); \
|
||||
fi
|
||||
|
||||
override_dh_auto_install:
|
||||
# Install binary
|
||||
install -Dm755 dms debian/dms/usr/bin/dms
|
||||
|
||||
# Install QML files from source tarball
|
||||
mkdir -p debian/dms/usr/share/quickshell/dms
|
||||
cp -r DankMaterialShell-$(BASE_VERSION)/* debian/dms/usr/share/quickshell/dms/
|
||||
|
||||
# Remove unnecessary directories
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/core
|
||||
rm -rf debian/dms/usr/share/quickshell/dms/distro
|
||||
|
||||
# Install systemd user service
|
||||
install -Dm644 DankMaterialShell-$(BASE_VERSION)/quickshell/assets/systemd/dms.service \
|
||||
debian/dms/usr/lib/systemd/user/dms.service
|
||||
|
||||
# Generate and install shell completions (if applicable)
|
||||
# Uncomment if dms supports completion generation
|
||||
# ./dms completion bash > dms.bash
|
||||
# ./dms completion zsh > dms.zsh
|
||||
# install -Dm644 dms.bash debian/dms/usr/share/bash-completion/completions/dms
|
||||
# install -Dm644 dms.zsh debian/dms/usr/share/zsh/vendor-completions/_dms
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -f dms
|
||||
rm -rf DankMaterialShell-*
|
||||
# Don't remove dms-distropkg-amd64.gz and dms-source.tar.gz
|
||||
# They need to be included in the source package for Launchpad builds
|
||||
dh_auto_clean
|
||||
1
distro/ubuntu/dms/debian/source/format
Normal file
1
distro/ubuntu/dms/debian/source/format
Normal file
@@ -0,0 +1 @@
|
||||
3.0 (native)
|
||||
2
distro/ubuntu/dms/debian/source/include-binaries
Normal file
2
distro/ubuntu/dms/debian/source/include-binaries
Normal file
@@ -0,0 +1,2 @@
|
||||
dms-distropkg-amd64.gz
|
||||
dms-source.tar.gz
|
||||
4
distro/ubuntu/dms/debian/source/options
Normal file
4
distro/ubuntu/dms/debian/source/options
Normal file
@@ -0,0 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad (which has no internet access)
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-source.tar.gz
|
||||
44
distro/ubuntu/dput.cf.template
Normal file
44
distro/ubuntu/dput.cf.template
Normal file
@@ -0,0 +1,44 @@
|
||||
# dput configuration for AvengeMedia DMS PPAs
|
||||
# Copy this to ~/.dput.cf (or merge with existing ~/.dput.cf)
|
||||
#
|
||||
# Usage:
|
||||
# dput ppa:avengemedia/dms ../package_version_source.changes
|
||||
# dput ppa:avengemedia/dms-git ../package_version_source.changes
|
||||
|
||||
# Stable DMS PPA - for release versions
|
||||
[ppa:avengemedia/dms]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
# Nightly/Git DMS PPA - for development builds
|
||||
[ppa:avengemedia/dms-git]
|
||||
fqdn = ppa.launchpad.net
|
||||
method = ftp
|
||||
incoming = ~avengemedia/ubuntu/dms-git/
|
||||
login = anonymous
|
||||
allow_unsigned_uploads = 0
|
||||
|
||||
# Alternative: Use HTTPS instead of FTP (more reliable through firewalls)
|
||||
# Uncomment these if FTP doesn't work:
|
||||
#
|
||||
# [ppa:avengemedia/dms-https]
|
||||
# fqdn = ppa.launchpad.net
|
||||
# method = https
|
||||
# incoming = ~avengemedia/ubuntu/dms/
|
||||
# login = anonymous
|
||||
# allow_unsigned_uploads = 0
|
||||
#
|
||||
# [ppa:avengemedia/dms-git-https]
|
||||
# fqdn = ppa.launchpad.net
|
||||
# method = https
|
||||
# incoming = ~avengemedia/ubuntu/dms-git/
|
||||
# login = anonymous
|
||||
# allow_unsigned_uploads = 0
|
||||
|
||||
# Notes:
|
||||
# - allow_unsigned_uploads = 0 enforces GPG signing (required by Launchpad)
|
||||
# - anonymous login is standard for PPA uploads
|
||||
# - The incoming path must match your Launchpad username and PPA name
|
||||
@@ -1,5 +1,4 @@
|
||||
pragma Singleton
|
||||
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import QtCore
|
||||
@@ -23,79 +22,79 @@ Singleton {
|
||||
property string profileLastPath: ""
|
||||
|
||||
property var fileBrowserSettings: ({
|
||||
"wallpaper": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"profile": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_save": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_load": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"generic": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"default": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
})
|
||||
"wallpaper": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"profile": {
|
||||
"lastPath": "",
|
||||
"viewMode": "grid",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_save": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"notepad_load": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"generic": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
},
|
||||
"default": {
|
||||
"lastPath": "",
|
||||
"viewMode": "list",
|
||||
"sortBy": "name",
|
||||
"sortAscending": true,
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
})
|
||||
|
||||
Component.onCompleted: {
|
||||
if (!isGreeterMode) {
|
||||
loadCache()
|
||||
loadCache();
|
||||
}
|
||||
}
|
||||
|
||||
function loadCache() {
|
||||
_loading = true
|
||||
parseCache(cacheFile.text())
|
||||
_loading = false
|
||||
_loading = true;
|
||||
parseCache(cacheFile.text());
|
||||
_loading = false;
|
||||
}
|
||||
|
||||
function parseCache(content) {
|
||||
_loading = true
|
||||
_loading = true;
|
||||
try {
|
||||
if (content && content.trim()) {
|
||||
const cache = JSON.parse(content)
|
||||
const cache = JSON.parse(content);
|
||||
|
||||
wallpaperLastPath = cache.wallpaperLastPath !== undefined ? cache.wallpaperLastPath : ""
|
||||
profileLastPath = cache.profileLastPath !== undefined ? cache.profileLastPath : ""
|
||||
wallpaperLastPath = cache.wallpaperLastPath !== undefined ? cache.wallpaperLastPath : "";
|
||||
profileLastPath = cache.profileLastPath !== undefined ? cache.profileLastPath : "";
|
||||
|
||||
if (cache.fileBrowserSettings !== undefined) {
|
||||
fileBrowserSettings = cache.fileBrowserSettings
|
||||
fileBrowserSettings = cache.fileBrowserSettings;
|
||||
} else if (cache.fileBrowserViewMode !== undefined) {
|
||||
fileBrowserSettings = {
|
||||
"wallpaper": {
|
||||
@@ -122,65 +121,60 @@ Singleton {
|
||||
"iconSizeIndex": 1,
|
||||
"showSidebar": true
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (cache.configVersion === undefined) {
|
||||
migrateFromUndefinedToV1(cache)
|
||||
cleanupUnusedKeys()
|
||||
saveCache()
|
||||
migrateFromUndefinedToV1(cache);
|
||||
cleanupUnusedKeys();
|
||||
saveCache();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("CacheData: Failed to parse cache:", e.message)
|
||||
console.warn("CacheData: Failed to parse cache:", e.message);
|
||||
} finally {
|
||||
_loading = false
|
||||
_loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function saveCache() {
|
||||
if (_loading)
|
||||
return
|
||||
return;
|
||||
cacheFile.setText(JSON.stringify({
|
||||
"wallpaperLastPath": wallpaperLastPath,
|
||||
"profileLastPath": profileLastPath,
|
||||
"fileBrowserSettings": fileBrowserSettings,
|
||||
"configVersion": cacheConfigVersion
|
||||
}, null, 2))
|
||||
"wallpaperLastPath": wallpaperLastPath,
|
||||
"profileLastPath": profileLastPath,
|
||||
"fileBrowserSettings": fileBrowserSettings,
|
||||
"configVersion": cacheConfigVersion
|
||||
}, null, 2));
|
||||
}
|
||||
|
||||
function migrateFromUndefinedToV1(cache) {
|
||||
console.info("CacheData: Migrating configuration from undefined to version 1")
|
||||
console.info("CacheData: Migrating configuration from undefined to version 1");
|
||||
}
|
||||
|
||||
function cleanupUnusedKeys() {
|
||||
const validKeys = [
|
||||
"wallpaperLastPath",
|
||||
"profileLastPath",
|
||||
"fileBrowserSettings",
|
||||
"configVersion"
|
||||
]
|
||||
const validKeys = ["wallpaperLastPath", "profileLastPath", "fileBrowserSettings", "configVersion"];
|
||||
|
||||
try {
|
||||
const content = cacheFile.text()
|
||||
if (!content || !content.trim()) return
|
||||
|
||||
const cache = JSON.parse(content)
|
||||
let needsSave = false
|
||||
const content = cacheFile.text();
|
||||
if (!content || !content.trim())
|
||||
return;
|
||||
const cache = JSON.parse(content);
|
||||
let needsSave = false;
|
||||
|
||||
for (const key in cache) {
|
||||
if (!validKeys.includes(key)) {
|
||||
console.log("CacheData: Removing unused key:", key)
|
||||
delete cache[key]
|
||||
needsSave = true
|
||||
console.log("CacheData: Removing unused key:", key);
|
||||
delete cache[key];
|
||||
needsSave = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (needsSave) {
|
||||
cacheFile.setText(JSON.stringify(cache, null, 2))
|
||||
cacheFile.setText(JSON.stringify(cache, null, 2));
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("CacheData: Failed to cleanup unused keys:", e.message)
|
||||
console.warn("CacheData: Failed to cleanup unused keys:", e.message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,12 +188,12 @@ Singleton {
|
||||
watchChanges: !isGreeterMode
|
||||
onLoaded: {
|
||||
if (!isGreeterMode) {
|
||||
parseCache(cacheFile.text())
|
||||
parseCache(cacheFile.text());
|
||||
}
|
||||
}
|
||||
onLoadFailed: error => {
|
||||
if (!isGreeterMode) {
|
||||
console.info("CacheData: No cache file found, starting fresh")
|
||||
console.info("CacheData: No cache file found, starting fresh");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +1,24 @@
|
||||
import Quickshell
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
import Quickshell
|
||||
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
// Clear all image cache
|
||||
function clearImageCache() {
|
||||
Quickshell.execDetached(["rm", "-rf", Paths.stringify(
|
||||
Paths.imagecache)])
|
||||
Paths.mkdir(Paths.imagecache)
|
||||
Quickshell.execDetached(["rm", "-rf", Paths.stringify(Paths.imagecache)]);
|
||||
Paths.mkdir(Paths.imagecache);
|
||||
}
|
||||
|
||||
// Clear cache older than specified minutes
|
||||
function clearOldCache(ageInMinutes) {
|
||||
Quickshell.execDetached(
|
||||
["find", Paths.stringify(
|
||||
Paths.imagecache), "-name", "*.png", "-mmin", `+${ageInMinutes}`, "-delete"])
|
||||
Quickshell.execDetached(["find", Paths.stringify(Paths.imagecache), "-name", "*.png", "-mmin", `+${ageInMinutes}`, "-delete"]);
|
||||
}
|
||||
|
||||
// Clear cache for specific size
|
||||
function clearCacheForSize(size) {
|
||||
Quickshell.execDetached(
|
||||
["find", Paths.stringify(
|
||||
Paths.imagecache), "-name", `*@${size}x${size}.png`, "-delete"])
|
||||
Quickshell.execDetached(["find", Paths.stringify(Paths.imagecache), "-name", `*@${size}x${size}.png`, "-delete"]);
|
||||
}
|
||||
|
||||
// Get cache size in MB
|
||||
@@ -30,8 +26,7 @@ Singleton {
|
||||
var process = Qt.createQmlObject(`
|
||||
import Quickshell.Io
|
||||
Process {
|
||||
command: ["du", "-sm", "${Paths.stringify(
|
||||
Paths.imagecache)}"]
|
||||
command: ["du", "-sm", "${Paths.stringify(Paths.imagecache)}"]
|
||||
running: true
|
||||
stdout: StdioCollector {
|
||||
onStreamFinished: {
|
||||
@@ -40,6 +35,6 @@ Singleton {
|
||||
}
|
||||
}
|
||||
}
|
||||
`, root)
|
||||
`, root);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,26 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
import QtQuick
|
||||
import Qt.labs.folderlistmodel
|
||||
import Quickshell
|
||||
import Quickshell.Io
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
readonly property string _rawLocale: Qt.locale().name
|
||||
readonly property string _lang: _rawLocale.split(/[_-]/)[0]
|
||||
readonly property var _candidates: {
|
||||
readonly property var _candidates: {
|
||||
const fullUnderscore = _rawLocale;
|
||||
const fullHyphen = _rawLocale.replace("_", "-");
|
||||
const fullHyphen = _rawLocale.replace("_", "-");
|
||||
return [fullUnderscore, fullHyphen, _lang].filter(c => c && c !== "en");
|
||||
}
|
||||
|
||||
|
||||
readonly property url translationsFolder: Qt.resolvedUrl("../translations/poexports")
|
||||
|
||||
property string currentLocale: "en"
|
||||
property var translations: ({})
|
||||
property bool translationsLoaded: false
|
||||
property var translations: ({})
|
||||
property bool translationsLoaded: false
|
||||
|
||||
property url _selectedPath: ""
|
||||
|
||||
@@ -32,7 +31,8 @@ Singleton {
|
||||
showDirs: false
|
||||
showDotAndDotDot: false
|
||||
|
||||
onStatusChanged: if (status === FolderListModel.Ready) root._pickTranslation()
|
||||
onStatusChanged: if (status === FolderListModel.Ready)
|
||||
root._pickTranslation()
|
||||
}
|
||||
|
||||
FileView {
|
||||
@@ -41,73 +41,75 @@ Singleton {
|
||||
|
||||
onLoaded: {
|
||||
try {
|
||||
root.translations = JSON.parse(text())
|
||||
root.translationsLoaded = true
|
||||
console.info(`I18n: Loaded translations for '${root.currentLocale}' ` +
|
||||
`(${Object.keys(root.translations).length} contexts)`)
|
||||
root.translations = JSON.parse(text());
|
||||
root.translationsLoaded = true;
|
||||
console.info(`I18n: Loaded translations for '${root.currentLocale}' ` + `(${Object.keys(root.translations).length} contexts)`);
|
||||
} catch (e) {
|
||||
console.warn(`I18n: Error parsing '${root.currentLocale}':`, e,
|
||||
"- falling back to English")
|
||||
root._fallbackToEnglish()
|
||||
console.warn(`I18n: Error parsing '${root.currentLocale}':`, e, "- falling back to English");
|
||||
root._fallbackToEnglish();
|
||||
}
|
||||
}
|
||||
|
||||
onLoadFailed: (error) => {
|
||||
console.warn(`I18n: Failed to load '${root.currentLocale}' (${error}), ` +
|
||||
"falling back to English")
|
||||
root._fallbackToEnglish()
|
||||
onLoadFailed: error => {
|
||||
console.warn(`I18n: Failed to load '${root.currentLocale}' (${error}), ` + "falling back to English");
|
||||
root._fallbackToEnglish();
|
||||
}
|
||||
}
|
||||
|
||||
function _pickTranslation() {
|
||||
const present = new Set()
|
||||
const present = new Set();
|
||||
for (let i = 0; i < dir.count; i++) {
|
||||
const name = dir.get(i, "fileName") // e.g. "zh_CN.json"
|
||||
const name = dir.get(i, "fileName"); // e.g. "zh_CN.json"
|
||||
if (name && name.endsWith(".json")) {
|
||||
present.add(name.slice(0, -5))
|
||||
present.add(name.slice(0, -5));
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < _candidates.length; i++) {
|
||||
const cand = _candidates[i]
|
||||
const cand = _candidates[i];
|
||||
if (present.has(cand)) {
|
||||
_useLocale(cand, dir.folder + "/" + cand + ".json")
|
||||
return
|
||||
_useLocale(cand, dir.folder + "/" + cand + ".json");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_fallbackToEnglish()
|
||||
_fallbackToEnglish();
|
||||
}
|
||||
|
||||
function _useLocale(localeTag, fileUrl) {
|
||||
currentLocale = localeTag
|
||||
_selectedPath = fileUrl
|
||||
translationsLoaded = false
|
||||
translations = ({})
|
||||
console.info(`I18n: Using locale '${localeTag}' from ${fileUrl}`)
|
||||
currentLocale = localeTag;
|
||||
_selectedPath = fileUrl;
|
||||
translationsLoaded = false;
|
||||
translations = ({});
|
||||
console.info(`I18n: Using locale '${localeTag}' from ${fileUrl}`);
|
||||
}
|
||||
|
||||
function _fallbackToEnglish() {
|
||||
currentLocale = "en"
|
||||
_selectedPath = ""
|
||||
translationsLoaded = false
|
||||
translations = ({})
|
||||
console.warn("I18n: Falling back to built-in English strings")
|
||||
currentLocale = "en";
|
||||
_selectedPath = "";
|
||||
translationsLoaded = false;
|
||||
translations = ({});
|
||||
console.warn("I18n: Falling back to built-in English strings");
|
||||
}
|
||||
|
||||
function tr(term, context) {
|
||||
if (!translationsLoaded || !translations) return term
|
||||
const ctx = context || term
|
||||
if (translations[ctx] && translations[ctx][term]) return translations[ctx][term]
|
||||
if (!translationsLoaded || !translations)
|
||||
return term;
|
||||
const ctx = context || term;
|
||||
if (translations[ctx] && translations[ctx][term])
|
||||
return translations[ctx][term];
|
||||
for (const c in translations) {
|
||||
if (translations[c] && translations[c][term]) return translations[c][term]
|
||||
if (translations[c] && translations[c][term])
|
||||
return translations[c][term];
|
||||
}
|
||||
return term
|
||||
return term;
|
||||
}
|
||||
|
||||
function trContext(context, term) {
|
||||
if (!translationsLoaded || !translations) return term
|
||||
if (translations[context] && translations[context][term]) return translations[context][term]
|
||||
return term
|
||||
if (!translationsLoaded || !translations)
|
||||
return term;
|
||||
if (translations[context] && translations[context][term])
|
||||
return translations[context][term];
|
||||
return term;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtQuick
|
||||
@@ -10,11 +11,11 @@ Singleton {
|
||||
|
||||
function openModal(modal) {
|
||||
if (!modal.allowStacking) {
|
||||
closeAllModalsExcept(modal)
|
||||
closeAllModalsExcept(modal);
|
||||
}
|
||||
if (!modal.keepPopoutsOpen) {
|
||||
PopoutManager.closeAllPopouts()
|
||||
PopoutManager.closeAllPopouts();
|
||||
}
|
||||
TrayMenuManager.closeAllMenus()
|
||||
TrayMenuManager.closeAllMenus();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtQuick
|
||||
@@ -10,15 +11,14 @@ Singleton {
|
||||
|
||||
function showOSD(osd) {
|
||||
if (!osd || !osd.screen)
|
||||
return
|
||||
|
||||
const screenName = osd.screen.name
|
||||
const currentOSD = currentOSDsByScreen[screenName]
|
||||
return;
|
||||
const screenName = osd.screen.name;
|
||||
const currentOSD = currentOSDsByScreen[screenName];
|
||||
|
||||
if (currentOSD && currentOSD !== osd) {
|
||||
currentOSD.hide()
|
||||
currentOSD.hide();
|
||||
}
|
||||
|
||||
currentOSDsByScreen[screenName] = osd
|
||||
currentOSDsByScreen[screenName] = osd;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pragma Singleton
|
||||
pragma ComponentBehavior: Bound
|
||||
|
||||
import Quickshell
|
||||
import QtCore
|
||||
@@ -6,60 +7,74 @@ import QtCore
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
readonly property url home: StandardPaths.standardLocations(
|
||||
StandardPaths.HomeLocation)[0]
|
||||
readonly property url pictures: StandardPaths.standardLocations(
|
||||
StandardPaths.PicturesLocation)[0]
|
||||
readonly property url home: StandardPaths.standardLocations(StandardPaths.HomeLocation)[0]
|
||||
readonly property url pictures: StandardPaths.standardLocations(StandardPaths.PicturesLocation)[0]
|
||||
|
||||
readonly property url data: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericDataLocation)[0]}/DankMaterialShell`
|
||||
readonly property url state: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericStateLocation)[0]}/DankMaterialShell`
|
||||
readonly property url cache: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericCacheLocation)[0]}/DankMaterialShell`
|
||||
readonly property url config: `${StandardPaths.standardLocations(
|
||||
StandardPaths.GenericConfigLocation)[0]}/DankMaterialShell`
|
||||
readonly property url data: `${StandardPaths.standardLocations(StandardPaths.GenericDataLocation)[0]}/DankMaterialShell`
|
||||
readonly property url state: `${StandardPaths.standardLocations(StandardPaths.GenericStateLocation)[0]}/DankMaterialShell`
|
||||
readonly property url cache: `${StandardPaths.standardLocations(StandardPaths.GenericCacheLocation)[0]}/DankMaterialShell`
|
||||
readonly property url config: `${StandardPaths.standardLocations(StandardPaths.GenericConfigLocation)[0]}/DankMaterialShell`
|
||||
|
||||
readonly property url imagecache: `${cache}/imagecache`
|
||||
|
||||
function stringify(path: url): string {
|
||||
return path.toString().replace(/%20/g, " ")
|
||||
return path.toString().replace(/%20/g, " ");
|
||||
}
|
||||
|
||||
function expandTilde(path: string): string {
|
||||
return strip(path.replace("~", stringify(root.home)))
|
||||
return strip(path.replace("~", stringify(root.home)));
|
||||
}
|
||||
|
||||
function shortenHome(path: string): string {
|
||||
return path.replace(strip(root.home), "~")
|
||||
return path.replace(strip(root.home), "~");
|
||||
}
|
||||
|
||||
function strip(path: url): string {
|
||||
return stringify(path).replace("file://", "")
|
||||
return stringify(path).replace("file://", "");
|
||||
}
|
||||
|
||||
function toFileUrl(path: string): string {
|
||||
return path.startsWith("file://") ? path : "file://" + path
|
||||
return path.startsWith("file://") ? path : "file://" + path;
|
||||
}
|
||||
|
||||
function mkdir(path: url): void {
|
||||
Quickshell.execDetached(["mkdir", "-p", strip(path)])
|
||||
Quickshell.execDetached(["mkdir", "-p", strip(path)]);
|
||||
}
|
||||
|
||||
function copy(from: url, to: url): void {
|
||||
Quickshell.execDetached(["cp", strip(from), strip(to)])
|
||||
Quickshell.execDetached(["cp", strip(from), strip(to)]);
|
||||
}
|
||||
|
||||
// ! Spotify and maybe some other apps report the wrong app id in toplevels, hardcode special case
|
||||
function moddedAppId(appId: string): string {
|
||||
if (appId === "Spotify")
|
||||
return "spotify"
|
||||
return "spotify";
|
||||
if (appId === "beepertexts")
|
||||
return "beeper"
|
||||
return "beeper";
|
||||
if (appId === "home assistant desktop")
|
||||
return "homeassistant-desktop"
|
||||
return "homeassistant-desktop";
|
||||
if (appId.includes("com.transmissionbt.transmission"))
|
||||
return "transmission-gtk"
|
||||
return appId
|
||||
return "transmission-gtk";
|
||||
return appId;
|
||||
}
|
||||
|
||||
function getAppIcon(appId: string, desktopEntry: var): string {
|
||||
if (appId === "org.quickshell") {
|
||||
return Qt.resolvedUrl("../assets/danklogo.svg");
|
||||
}
|
||||
|
||||
const moddedId = moddedAppId(appId);
|
||||
if (moddedId.toLowerCase().includes("steam_app")) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return desktopEntry && desktopEntry.icon ? Quickshell.iconPath(desktopEntry.icon, true) : "";
|
||||
}
|
||||
|
||||
function getAppName(appId: string, desktopEntry: var): string {
|
||||
if (appId === "org.quickshell") {
|
||||
return "dms";
|
||||
}
|
||||
|
||||
return desktopEntry && desktopEntry.name ? desktopEntry.name : appId;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,155 +9,167 @@ Singleton {
|
||||
property var currentPopoutsByScreen: ({})
|
||||
property var currentPopoutTriggers: ({})
|
||||
|
||||
function showPopout(popout) {
|
||||
if (!popout || !popout.screen) return
|
||||
signal popoutOpening
|
||||
signal popoutChanged
|
||||
|
||||
const screenName = popout.screen.name
|
||||
function showPopout(popout) {
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
popoutOpening();
|
||||
|
||||
const screenName = popout.screen.name;
|
||||
|
||||
for (const otherScreenName in currentPopoutsByScreen) {
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName]
|
||||
if (!otherPopout || otherPopout === popout) continue
|
||||
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName];
|
||||
if (!otherPopout || otherPopout === popout)
|
||||
continue;
|
||||
if (otherPopout.dashVisible !== undefined) {
|
||||
otherPopout.dashVisible = false
|
||||
otherPopout.dashVisible = false;
|
||||
} else if (otherPopout.notificationHistoryVisible !== undefined) {
|
||||
otherPopout.notificationHistoryVisible = false
|
||||
otherPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
otherPopout.close()
|
||||
otherPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
currentPopoutsByScreen[screenName] = popout
|
||||
ModalManager.closeAllModalsExcept(null)
|
||||
TrayMenuManager.closeAllMenus()
|
||||
currentPopoutsByScreen[screenName] = popout;
|
||||
popoutChanged();
|
||||
ModalManager.closeAllModalsExcept(null);
|
||||
}
|
||||
|
||||
function hidePopout(popout) {
|
||||
if (!popout || !popout.screen) return
|
||||
|
||||
const screenName = popout.screen.name
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
const screenName = popout.screen.name;
|
||||
if (currentPopoutsByScreen[screenName] === popout) {
|
||||
currentPopoutsByScreen[screenName] = null
|
||||
currentPopoutTriggers[screenName] = null
|
||||
currentPopoutsByScreen[screenName] = null;
|
||||
currentPopoutTriggers[screenName] = null;
|
||||
popoutChanged();
|
||||
}
|
||||
}
|
||||
|
||||
function closeAllPopouts() {
|
||||
for (const screenName in currentPopoutsByScreen) {
|
||||
const popout = currentPopoutsByScreen[screenName]
|
||||
if (!popout) continue
|
||||
|
||||
const popout = currentPopoutsByScreen[screenName];
|
||||
if (!popout)
|
||||
continue;
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close()
|
||||
popout.close();
|
||||
}
|
||||
}
|
||||
currentPopoutsByScreen = {}
|
||||
currentPopoutsByScreen = {};
|
||||
}
|
||||
|
||||
function getActivePopout(screen) {
|
||||
if (!screen) return null
|
||||
return currentPopoutsByScreen[screen.name] || null
|
||||
if (!screen)
|
||||
return null;
|
||||
return currentPopoutsByScreen[screen.name] || null;
|
||||
}
|
||||
|
||||
function requestPopout(popout, tabIndex, triggerSource) {
|
||||
if (!popout || !popout.screen) return
|
||||
if (!popout || !popout.screen)
|
||||
return;
|
||||
const screenName = popout.screen.name;
|
||||
const currentPopout = currentPopoutsByScreen[screenName];
|
||||
const triggerId = triggerSource !== undefined ? triggerSource : tabIndex;
|
||||
|
||||
const screenName = popout.screen.name
|
||||
const currentPopout = currentPopoutsByScreen[screenName]
|
||||
const triggerId = triggerSource !== undefined ? triggerSource : tabIndex
|
||||
const willOpen = !(currentPopout === popout && popout.shouldBeVisible && triggerId !== undefined && currentPopoutTriggers[screenName] === triggerId);
|
||||
if (willOpen) {
|
||||
popoutOpening();
|
||||
}
|
||||
|
||||
let justClosedSamePopout = false
|
||||
let justClosedSamePopout = false;
|
||||
for (const otherScreenName in currentPopoutsByScreen) {
|
||||
if (otherScreenName === screenName) continue
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName]
|
||||
if (!otherPopout) continue
|
||||
|
||||
if (otherScreenName === screenName)
|
||||
continue;
|
||||
const otherPopout = currentPopoutsByScreen[otherScreenName];
|
||||
if (!otherPopout)
|
||||
continue;
|
||||
if (otherPopout === popout) {
|
||||
justClosedSamePopout = true
|
||||
justClosedSamePopout = true;
|
||||
}
|
||||
|
||||
if (otherPopout.dashVisible !== undefined) {
|
||||
otherPopout.dashVisible = false
|
||||
otherPopout.dashVisible = false;
|
||||
} else if (otherPopout.notificationHistoryVisible !== undefined) {
|
||||
otherPopout.notificationHistoryVisible = false
|
||||
otherPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
otherPopout.close()
|
||||
otherPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (currentPopout && currentPopout !== popout) {
|
||||
if (currentPopout.dashVisible !== undefined) {
|
||||
currentPopout.dashVisible = false
|
||||
currentPopout.dashVisible = false;
|
||||
} else if (currentPopout.notificationHistoryVisible !== undefined) {
|
||||
currentPopout.notificationHistoryVisible = false
|
||||
currentPopout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
currentPopout.close()
|
||||
currentPopout.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (currentPopout === popout && popout.shouldBeVisible) {
|
||||
if (triggerId !== undefined && currentPopoutTriggers[screenName] === triggerId) {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close()
|
||||
popout.close();
|
||||
}
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
if (triggerId === undefined) {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = false
|
||||
popout.dashVisible = false;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = false
|
||||
popout.notificationHistoryVisible = false;
|
||||
} else {
|
||||
popout.close()
|
||||
popout.close();
|
||||
}
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
if (tabIndex !== undefined && popout.currentTabIndex !== undefined) {
|
||||
popout.currentTabIndex = tabIndex
|
||||
popout.currentTabIndex = tabIndex;
|
||||
}
|
||||
currentPopoutTriggers[screenName] = triggerId
|
||||
return
|
||||
currentPopoutTriggers[screenName] = triggerId;
|
||||
}
|
||||
|
||||
currentPopoutTriggers[screenName] = triggerId
|
||||
currentPopoutsByScreen[screenName] = popout
|
||||
currentPopoutTriggers[screenName] = triggerId;
|
||||
currentPopoutsByScreen[screenName] = popout;
|
||||
popoutChanged();
|
||||
|
||||
if (tabIndex !== undefined && popout.currentTabIndex !== undefined) {
|
||||
popout.currentTabIndex = tabIndex
|
||||
popout.currentTabIndex = tabIndex;
|
||||
}
|
||||
|
||||
if (currentPopout !== popout) {
|
||||
ModalManager.closeAllModalsExcept(null)
|
||||
ModalManager.closeAllModalsExcept(null);
|
||||
}
|
||||
TrayMenuManager.closeAllMenus()
|
||||
|
||||
if (justClosedSamePopout) {
|
||||
Qt.callLater(() => {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = true
|
||||
popout.dashVisible = true;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = true
|
||||
popout.notificationHistoryVisible = true;
|
||||
} else {
|
||||
popout.open()
|
||||
popout.open();
|
||||
}
|
||||
})
|
||||
});
|
||||
} else {
|
||||
if (popout.dashVisible !== undefined) {
|
||||
popout.dashVisible = true
|
||||
popout.dashVisible = true;
|
||||
} else if (popout.notificationHistoryVisible !== undefined) {
|
||||
popout.notificationHistoryVisible = true
|
||||
popout.notificationHistoryVisible = true;
|
||||
} else {
|
||||
popout.open()
|
||||
popout.open();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,13 +67,21 @@ Singleton {
|
||||
})
|
||||
|
||||
out.streamFinished.connect(function() {
|
||||
capturedOut = out.text || ""
|
||||
try {
|
||||
capturedOut = out.text || ""
|
||||
} catch (e) {
|
||||
capturedOut = ""
|
||||
}
|
||||
outSeen = true
|
||||
maybeComplete()
|
||||
})
|
||||
|
||||
err.streamFinished.connect(function() {
|
||||
capturedErr = err.text || ""
|
||||
try {
|
||||
capturedErr = err.text || ""
|
||||
} catch (e) {
|
||||
capturedErr = ""
|
||||
}
|
||||
errSeen = true
|
||||
maybeComplete()
|
||||
})
|
||||
@@ -88,8 +96,14 @@ Singleton {
|
||||
function maybeComplete() {
|
||||
if (!exitSeen || !outSeen || !errSeen) return
|
||||
timeoutTimer.stop()
|
||||
if (typeof entry.callback === "function") {
|
||||
try { entry.callback(capturedOut, exitCodeValue) } catch (e) { console.warn("runCommand callback error:", e) }
|
||||
if (entry && entry.callback && typeof entry.callback === "function") {
|
||||
try {
|
||||
const safeOutput = capturedOut !== null && capturedOut !== undefined ? capturedOut : ""
|
||||
const safeExitCode = exitCodeValue !== null && exitCodeValue !== undefined ? exitCodeValue : -1
|
||||
entry.callback(safeOutput, safeExitCode)
|
||||
} catch (e) {
|
||||
console.warn("runCommand callback error for command:", entry.command, "Error:", e)
|
||||
}
|
||||
}
|
||||
try { proc.destroy() } catch (_) {}
|
||||
try { timeoutTimer.destroy() } catch (_) {}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -2,16 +2,16 @@
|
||||
// Separated from Theme.qml to keep that file clean
|
||||
|
||||
const CatppuccinMocha = {
|
||||
surface: "#313244",
|
||||
surface: "#181825",
|
||||
surfaceText: "#cdd6f4",
|
||||
surfaceVariant: "#313244",
|
||||
surfaceVariant: "#1e1e2e",
|
||||
surfaceVariantText: "#a6adc8",
|
||||
background: "#1e1e2e",
|
||||
background: "#181825",
|
||||
backgroundText: "#cdd6f4",
|
||||
outline: "#6c7086",
|
||||
surfaceContainer: "#45475a",
|
||||
surfaceContainerHigh: "#585b70",
|
||||
surfaceContainerHighest: "#6c7086"
|
||||
surfaceContainer: "#1e1e2e",
|
||||
surfaceContainerHigh: "#313244",
|
||||
surfaceContainerHighest: "#45475a"
|
||||
}
|
||||
|
||||
const CatppuccinLatte = {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,26 +6,30 @@ import QtQuick
|
||||
Singleton {
|
||||
id: root
|
||||
|
||||
property var activeTrayBars: ({})
|
||||
property var activeTrayMenus: ({})
|
||||
|
||||
function register(screenName, trayBar) {
|
||||
if (!screenName || !trayBar) return
|
||||
activeTrayBars[screenName] = trayBar
|
||||
function registerMenu(screenName, menu) {
|
||||
if (!screenName || !menu) return
|
||||
const newMenus = Object.assign({}, activeTrayMenus)
|
||||
newMenus[screenName] = menu
|
||||
activeTrayMenus = newMenus
|
||||
}
|
||||
|
||||
function unregister(screenName) {
|
||||
function unregisterMenu(screenName) {
|
||||
if (!screenName) return
|
||||
delete activeTrayBars[screenName]
|
||||
const newMenus = Object.assign({}, activeTrayMenus)
|
||||
delete newMenus[screenName]
|
||||
activeTrayMenus = newMenus
|
||||
}
|
||||
|
||||
function closeAllMenus() {
|
||||
for (const screenName in activeTrayBars) {
|
||||
const trayBar = activeTrayBars[screenName]
|
||||
if (!trayBar) continue
|
||||
|
||||
trayBar.menuOpen = false
|
||||
if (trayBar.currentTrayMenu) {
|
||||
trayBar.currentTrayMenu.showMenu = false
|
||||
for (const screenName in activeTrayMenus) {
|
||||
const menu = activeTrayMenus[screenName]
|
||||
if (!menu) continue
|
||||
if (typeof menu.close === "function") {
|
||||
menu.close()
|
||||
} else if (menu.showMenu !== undefined) {
|
||||
menu.showMenu = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,12 +12,11 @@ var SPEC = {
|
||||
runUserMatugenTemplates: { def: true, onChange: "regenSystemThemes" },
|
||||
matugenTargetMonitor: { def: "", onChange: "regenSystemThemes" },
|
||||
|
||||
dankBarTransparency: { def: 1.0, coerce: percentToUnit, migrate: ["topBarTransparency"] },
|
||||
dankBarWidgetTransparency: { def: 1.0, coerce: percentToUnit, migrate: ["topBarWidgetTransparency"] },
|
||||
popupTransparency: { def: 1.0, coerce: percentToUnit },
|
||||
dockTransparency: { def: 1.0, coerce: percentToUnit },
|
||||
|
||||
widgetBackgroundColor: { def: "sch" },
|
||||
widgetColorMode: { def: "default" },
|
||||
cornerRadius: { def: 12, onChange: "updateNiriLayout" },
|
||||
|
||||
use24HourClock: { def: true },
|
||||
@@ -52,6 +51,11 @@ var SPEC = {
|
||||
controlCenterShowNetworkIcon: { def: true },
|
||||
controlCenterShowBluetoothIcon: { def: true },
|
||||
controlCenterShowAudioIcon: { def: true },
|
||||
controlCenterShowVpnIcon: { def: false },
|
||||
controlCenterShowBrightnessIcon: { def: false },
|
||||
controlCenterShowMicIcon: { def: false },
|
||||
controlCenterShowBatteryIcon: { def: false },
|
||||
controlCenterShowPrinterIcon: { def: false },
|
||||
|
||||
showPrivacyButton: { def: true },
|
||||
privacyShowMicIcon: { def: false },
|
||||
@@ -75,6 +79,7 @@ var SPEC = {
|
||||
showWorkspaceApps: { def: false },
|
||||
maxWorkspaceIcons: { def: 3 },
|
||||
workspacesPerMonitor: { def: true },
|
||||
showOccupiedWorkspacesOnly: { def: false },
|
||||
dwlShowAllTags: { def: false },
|
||||
workspaceNameIcons: { def: {} },
|
||||
waveProgressEnabled: { def: true },
|
||||
@@ -88,15 +93,11 @@ var SPEC = {
|
||||
lockDateFormat: { def: "" },
|
||||
mediaSize: { def: 1 },
|
||||
|
||||
dankBarLeftWidgets: { def: ["launcherButton", "workspaceSwitcher", "focusedWindow"], migrate: ["topBarLeftWidgets"] },
|
||||
dankBarCenterWidgets: { def: ["music", "clock", "weather"], migrate: ["topBarCenterWidgets"] },
|
||||
dankBarRightWidgets: { def: ["systemTray", "clipboard", "cpuUsage", "memUsage", "notificationButton", "battery", "controlCenterButton"], migrate: ["topBarRightWidgets"] },
|
||||
dankBarWidgetOrder: { def: [] },
|
||||
|
||||
appLauncherViewMode: { def: "list" },
|
||||
spotlightModalViewMode: { def: "list" },
|
||||
sortAppsAlphabetically: { def: false },
|
||||
appLauncherGridColumns: { def: 4 },
|
||||
spotlightCloseNiriOverview: { def: true },
|
||||
|
||||
weatherLocation: { def: "New York, NY" },
|
||||
weatherCoordinates: { def: "40.7128,-74.0060" },
|
||||
@@ -125,7 +126,6 @@ var SPEC = {
|
||||
monoFontFamily: { def: "Fira Code" },
|
||||
fontWeight: { def: 400 },
|
||||
fontScale: { def: 1.0 },
|
||||
dankBarFontScale: { def: 1.0 },
|
||||
|
||||
notepadUseMonospace: { def: true },
|
||||
notepadFontFamily: { def: "" },
|
||||
@@ -151,6 +151,8 @@ var SPEC = {
|
||||
lockBeforeSuspend: { def: false },
|
||||
preventIdleForMedia: { def: false },
|
||||
loginctlLockIntegration: { def: true },
|
||||
fadeToLockEnabled: { def: false },
|
||||
fadeToLockGracePeriod: { def: 5 },
|
||||
launchPrefix: { def: "" },
|
||||
brightnessDevicePins: { def: {} },
|
||||
wifiNetworkPins: { def: {} },
|
||||
@@ -175,31 +177,9 @@ var SPEC = {
|
||||
dockIndicatorStyle: { def: "circle" },
|
||||
|
||||
notificationOverlayEnabled: { def: false },
|
||||
dankBarAutoHide: { def: false, migrate: ["topBarAutoHide"] },
|
||||
dankBarAutoHideDelay: { def: 250 },
|
||||
dankBarOpenOnOverview: { def: false, migrate: ["topBarOpenOnOverview"] },
|
||||
dankBarVisible: { def: true, migrate: ["topBarVisible"] },
|
||||
overviewRows: { def: 2, persist: false },
|
||||
overviewColumns: { def: 5, persist: false },
|
||||
overviewScale: { def: 0.16, persist: false },
|
||||
dankBarSpacing: { def: 4, migrate: ["topBarSpacing"], onChange: "updateNiriLayout" },
|
||||
dankBarBottomGap: { def: 0, migrate: ["topBarBottomGap"] },
|
||||
dankBarInnerPadding: { def: 4, migrate: ["topBarInnerPadding"] },
|
||||
dankBarPosition: { def: 0, migrate: ["dankBarAtBottom", "topBarAtBottom"] },
|
||||
dankBarIsVertical: { def: false, persist: false },
|
||||
|
||||
dankBarSquareCorners: { def: false, migrate: ["topBarSquareCorners"] },
|
||||
dankBarNoBackground: { def: false, migrate: ["topBarNoBackground"] },
|
||||
dankBarGothCornersEnabled: { def: false, migrate: ["topBarGothCornersEnabled"] },
|
||||
dankBarGothCornerRadiusOverride: { def: false },
|
||||
dankBarGothCornerRadiusValue: { def: 12 },
|
||||
dankBarBorderEnabled: { def: false },
|
||||
dankBarBorderColor: { def: "surfaceText" },
|
||||
dankBarBorderOpacity: { def: 1.0 },
|
||||
dankBarBorderThickness: { def: 1 },
|
||||
|
||||
popupGapsAuto: { def: true },
|
||||
popupGapsManual: { def: 4 },
|
||||
|
||||
modalDarkenBackground: { def: true },
|
||||
|
||||
@@ -217,11 +197,12 @@ var SPEC = {
|
||||
osdAlwaysShowValue: { def: false },
|
||||
osdPosition: { def: 5 },
|
||||
osdVolumeEnabled: { def: true },
|
||||
osdMediaVolumeEnabled : { def: true },
|
||||
osdBrightnessEnabled: { def: true },
|
||||
osdIdleInhibitorEnabled: { def: true },
|
||||
osdMicMuteEnabled: { def: true },
|
||||
osdCapsLockEnabled: { def: true },
|
||||
osdPowerProfileEnabled: { def: true },
|
||||
osdPowerProfileEnabled: { def: false },
|
||||
|
||||
powerActionConfirm: { def: true },
|
||||
powerMenuActions: { def: ["reboot", "logout", "poweroff", "lock", "suspend", "restart"] },
|
||||
@@ -240,7 +221,44 @@ var SPEC = {
|
||||
|
||||
displayNameMode: { def: "system" },
|
||||
screenPreferences: { def: {} },
|
||||
showOnLastDisplay: { def: {} }
|
||||
showOnLastDisplay: { def: {} },
|
||||
|
||||
barConfigs: { def: [{
|
||||
id: "default",
|
||||
name: "Main Bar",
|
||||
enabled: true,
|
||||
position: 0,
|
||||
screenPreferences: ["all"],
|
||||
showOnLastDisplay: true,
|
||||
leftWidgets: ["launcherButton", "workspaceSwitcher", "focusedWindow"],
|
||||
centerWidgets: ["music", "clock", "weather"],
|
||||
rightWidgets: ["systemTray", "clipboard", "cpuUsage", "memUsage", "notificationButton", "battery", "controlCenterButton"],
|
||||
spacing: 4,
|
||||
innerPadding: 4,
|
||||
bottomGap: 0,
|
||||
transparency: 1.0,
|
||||
widgetTransparency: 1.0,
|
||||
squareCorners: false,
|
||||
noBackground: false,
|
||||
gothCornersEnabled: false,
|
||||
gothCornerRadiusOverride: false,
|
||||
gothCornerRadiusValue: 12,
|
||||
borderEnabled: false,
|
||||
borderColor: "surfaceText",
|
||||
borderOpacity: 1.0,
|
||||
borderThickness: 1,
|
||||
widgetOutlineEnabled: false,
|
||||
widgetOutlineColor: "primary",
|
||||
widgetOutlineOpacity: 1.0,
|
||||
widgetOutlineThickness: 1,
|
||||
fontScale: 1.0,
|
||||
autoHide: false,
|
||||
autoHideDelay: 250,
|
||||
openOnOverview: false,
|
||||
visible: true,
|
||||
popupGapsAuto: true,
|
||||
popupGapsManual: 4
|
||||
}], onChange: "updateBarConfigs" }
|
||||
};
|
||||
|
||||
function getValidKeys() {
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
function parse(root, jsonObj) {
|
||||
var SPEC = SpecModule.SPEC;
|
||||
for (var k in SPEC) {
|
||||
if (k === "pluginSettings") continue;
|
||||
var spec = SPEC[k];
|
||||
root[k] = spec.def;
|
||||
}
|
||||
@@ -13,6 +14,7 @@ function parse(root, jsonObj) {
|
||||
|
||||
for (var k in jsonObj) {
|
||||
if (!SPEC[k]) continue;
|
||||
if (k === "pluginSettings") continue;
|
||||
var raw = jsonObj[k];
|
||||
var spec = SPEC[k];
|
||||
var coerce = spec.coerce;
|
||||
@@ -25,72 +27,93 @@ function toJson(root) {
|
||||
var out = {};
|
||||
for (var k in SPEC) {
|
||||
if (SPEC[k].persist === false) continue;
|
||||
if (k === "pluginSettings") continue;
|
||||
out[k] = root[k];
|
||||
}
|
||||
out.configVersion = root.settingsConfigVersion;
|
||||
return out;
|
||||
}
|
||||
|
||||
function migrate(root, jsonObj) {
|
||||
var SPEC = SpecModule.SPEC;
|
||||
if (!jsonObj) return;
|
||||
function migrateToVersion(obj, targetVersion) {
|
||||
if (!obj) return null;
|
||||
|
||||
if (jsonObj.themeIndex !== undefined || jsonObj.themeIsDynamic !== undefined) {
|
||||
var themeNames = ["blue", "deepBlue", "purple", "green", "orange", "red", "cyan", "pink", "amber", "coral"];
|
||||
if (jsonObj.themeIsDynamic) {
|
||||
root.currentThemeName = "dynamic";
|
||||
} else if (jsonObj.themeIndex >= 0 && jsonObj.themeIndex < themeNames.length) {
|
||||
root.currentThemeName = themeNames[jsonObj.themeIndex];
|
||||
}
|
||||
console.info("Auto-migrated theme from index", jsonObj.themeIndex, "to", root.currentThemeName);
|
||||
var settings = JSON.parse(JSON.stringify(obj));
|
||||
var currentVersion = settings.configVersion || 0;
|
||||
|
||||
if (currentVersion >= targetVersion) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ((jsonObj.dankBarWidgetOrder && jsonObj.dankBarWidgetOrder.length > 0) ||
|
||||
(jsonObj.topBarWidgetOrder && jsonObj.topBarWidgetOrder.length > 0)) {
|
||||
if (jsonObj.dankBarLeftWidgets === undefined && jsonObj.dankBarCenterWidgets === undefined && jsonObj.dankBarRightWidgets === undefined) {
|
||||
var widgetOrder = jsonObj.dankBarWidgetOrder || jsonObj.topBarWidgetOrder;
|
||||
root.dankBarLeftWidgets = widgetOrder.filter(function(w) { return ["launcherButton", "workspaceSwitcher", "focusedWindow"].indexOf(w) >= 0; });
|
||||
root.dankBarCenterWidgets = widgetOrder.filter(function(w) { return ["clock", "music", "weather"].indexOf(w) >= 0; });
|
||||
root.dankBarRightWidgets = widgetOrder.filter(function(w) { return ["systemTray", "clipboard", "systemResources", "notificationButton", "battery", "controlCenterButton"].indexOf(w) >= 0; });
|
||||
}
|
||||
}
|
||||
if (currentVersion < 2) {
|
||||
console.info("Migrating settings from version", currentVersion, "to version 2");
|
||||
|
||||
if (jsonObj.useOSLogo !== undefined) {
|
||||
root.launcherLogoMode = jsonObj.useOSLogo ? "os" : "apps";
|
||||
root.launcherLogoColorOverride = jsonObj.osLogoColorOverride !== undefined ? jsonObj.osLogoColorOverride : "";
|
||||
root.launcherLogoBrightness = jsonObj.osLogoBrightness !== undefined ? jsonObj.osLogoBrightness : 0.5;
|
||||
root.launcherLogoContrast = jsonObj.osLogoContrast !== undefined ? jsonObj.osLogoContrast : 1;
|
||||
}
|
||||
|
||||
if (jsonObj.mediaCompactMode !== undefined && jsonObj.mediaSize === undefined) {
|
||||
root.mediaSize = jsonObj.mediaCompactMode ? 0 : 1;
|
||||
}
|
||||
|
||||
for (var k in SPEC) {
|
||||
var spec = SPEC[k];
|
||||
if (!spec.migrate) continue;
|
||||
for (var i = 0; i < spec.migrate.length; i++) {
|
||||
var oldKey = spec.migrate[i];
|
||||
if (jsonObj[oldKey] !== undefined && jsonObj[k] === undefined) {
|
||||
var raw = jsonObj[oldKey];
|
||||
var coerce = spec.coerce;
|
||||
root[k] = coerce ? (coerce(raw) !== undefined ? coerce(raw) : root[k]) : raw;
|
||||
break;
|
||||
if (settings.barConfigs === undefined) {
|
||||
var position = 0;
|
||||
if (settings.dankBarAtBottom !== undefined || settings.topBarAtBottom !== undefined) {
|
||||
var atBottom = settings.dankBarAtBottom !== undefined ? settings.dankBarAtBottom : settings.topBarAtBottom;
|
||||
position = atBottom ? 1 : 0;
|
||||
} else if (settings.dankBarPosition !== undefined) {
|
||||
position = settings.dankBarPosition;
|
||||
}
|
||||
|
||||
var defaultConfig = {
|
||||
id: "default",
|
||||
name: "Main Bar",
|
||||
enabled: true,
|
||||
position: position,
|
||||
screenPreferences: ["all"],
|
||||
showOnLastDisplay: true,
|
||||
leftWidgets: settings.dankBarLeftWidgets || ["launcherButton", "workspaceSwitcher", "focusedWindow"],
|
||||
centerWidgets: settings.dankBarCenterWidgets || ["music", "clock", "weather"],
|
||||
rightWidgets: settings.dankBarRightWidgets || ["systemTray", "clipboard", "cpuUsage", "memUsage", "notificationButton", "battery", "controlCenterButton"],
|
||||
spacing: settings.dankBarSpacing !== undefined ? settings.dankBarSpacing : 4,
|
||||
innerPadding: settings.dankBarInnerPadding !== undefined ? settings.dankBarInnerPadding : 4,
|
||||
bottomGap: settings.dankBarBottomGap !== undefined ? settings.dankBarBottomGap : 0,
|
||||
transparency: settings.dankBarTransparency !== undefined ? settings.dankBarTransparency : 1.0,
|
||||
widgetTransparency: settings.dankBarWidgetTransparency !== undefined ? settings.dankBarWidgetTransparency : 1.0,
|
||||
squareCorners: settings.dankBarSquareCorners !== undefined ? settings.dankBarSquareCorners : false,
|
||||
noBackground: settings.dankBarNoBackground !== undefined ? settings.dankBarNoBackground : false,
|
||||
gothCornersEnabled: settings.dankBarGothCornersEnabled !== undefined ? settings.dankBarGothCornersEnabled : false,
|
||||
gothCornerRadiusOverride: settings.dankBarGothCornerRadiusOverride !== undefined ? settings.dankBarGothCornerRadiusOverride : false,
|
||||
gothCornerRadiusValue: settings.dankBarGothCornerRadiusValue !== undefined ? settings.dankBarGothCornerRadiusValue : 12,
|
||||
borderEnabled: settings.dankBarBorderEnabled !== undefined ? settings.dankBarBorderEnabled : false,
|
||||
borderColor: settings.dankBarBorderColor || "surfaceText",
|
||||
borderOpacity: settings.dankBarBorderOpacity !== undefined ? settings.dankBarBorderOpacity : 1.0,
|
||||
borderThickness: settings.dankBarBorderThickness !== undefined ? settings.dankBarBorderThickness : 1,
|
||||
fontScale: settings.dankBarFontScale !== undefined ? settings.dankBarFontScale : 1.0,
|
||||
autoHide: settings.dankBarAutoHide !== undefined ? settings.dankBarAutoHide : false,
|
||||
autoHideDelay: settings.dankBarAutoHideDelay !== undefined ? settings.dankBarAutoHideDelay : 250,
|
||||
openOnOverview: settings.dankBarOpenOnOverview !== undefined ? settings.dankBarOpenOnOverview : false,
|
||||
visible: settings.dankBarVisible !== undefined ? settings.dankBarVisible : true,
|
||||
popupGapsAuto: settings.popupGapsAuto !== undefined ? settings.popupGapsAuto : true,
|
||||
popupGapsManual: settings.popupGapsManual !== undefined ? settings.popupGapsManual : 4
|
||||
};
|
||||
|
||||
settings.barConfigs = [defaultConfig];
|
||||
|
||||
var legacyKeys = [
|
||||
"dankBarLeftWidgets", "dankBarCenterWidgets", "dankBarRightWidgets",
|
||||
"dankBarWidgetOrder", "dankBarAutoHide", "dankBarAutoHideDelay",
|
||||
"dankBarOpenOnOverview", "dankBarVisible", "dankBarSpacing",
|
||||
"dankBarBottomGap", "dankBarInnerPadding", "dankBarPosition",
|
||||
"dankBarSquareCorners", "dankBarNoBackground", "dankBarGothCornersEnabled",
|
||||
"dankBarGothCornerRadiusOverride", "dankBarGothCornerRadiusValue",
|
||||
"dankBarBorderEnabled", "dankBarBorderColor", "dankBarBorderOpacity",
|
||||
"dankBarBorderThickness", "popupGapsAuto", "popupGapsManual",
|
||||
"dankBarAtBottom", "topBarAtBottom", "dankBarTransparency", "dankBarWidgetTransparency"
|
||||
];
|
||||
|
||||
for (var i = 0; i < legacyKeys.length; i++) {
|
||||
delete settings[legacyKeys[i]];
|
||||
}
|
||||
|
||||
console.info("Migrated single bar settings to barConfigs");
|
||||
}
|
||||
|
||||
settings.configVersion = 2;
|
||||
}
|
||||
|
||||
if (jsonObj.dankBarAtBottom !== undefined || jsonObj.topBarAtBottom !== undefined) {
|
||||
var atBottom = jsonObj.dankBarAtBottom !== undefined ? jsonObj.dankBarAtBottom : jsonObj.topBarAtBottom;
|
||||
root.dankBarPosition = atBottom ? 1 : 0;
|
||||
}
|
||||
|
||||
if (jsonObj.pluginSettings !== undefined) {
|
||||
root.pluginSettings = jsonObj.pluginSettings;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return settings;
|
||||
}
|
||||
|
||||
function cleanup(fileText) {
|
||||
@@ -104,7 +127,6 @@ function cleanup(fileText) {
|
||||
|
||||
for (var key in settings) {
|
||||
if (validKeys.indexOf(key) < 0) {
|
||||
console.log("SettingsData: Removing unused key:", key);
|
||||
delete settings[key];
|
||||
needsSave = true;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user