1
0
mirror of https://github.com/AvengeMedia/DankMaterialShell.git synced 2026-01-24 13:32:50 -05:00

Refactor pre-commit hooks to use prek (#976)

* ci: change to prek for pre-commit

* refactor: fix shellcheck warnings for the scripts

* chore: unify whitespace formatting

* nix: add prek to dev shell
This commit is contained in:
Marcus Ramberg
2025-12-11 15:11:12 +01:00
committed by GitHub
parent c8cfe0cb5a
commit 7c88865d67
147 changed files with 805 additions and 860 deletions

8
.editorconfig Normal file
View File

@@ -0,0 +1,8 @@
[*.sh]
# like -i=4
indent_style = space
indent_size = 4
[*.nix]
# like -i=4
indent_style = space
indent_size = 4

View File

@@ -1,69 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$HOOK_DIR/.." && pwd)"
cd "$REPO_ROOT"
# =============================================================================
# Go CI checks (when core/ files are staged)
# =============================================================================
STAGED_CORE_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep '^core/' || true)
if [[ -n "$STAGED_CORE_FILES" ]]; then
echo "Go files staged in core/, running CI checks..."
cd "$REPO_ROOT/core"
# Format check
echo " Checking gofmt..."
UNFORMATTED=$(gofmt -s -l . 2>/dev/null || true)
if [[ -n "$UNFORMATTED" ]]; then
echo "The following files are not formatted:"
echo "$UNFORMATTED"
echo ""
echo "Run: cd core && gofmt -s -w ."
exit 1
fi
# golangci-lint
if command -v golangci-lint &>/dev/null; then
echo " Running golangci-lint..."
golangci-lint run ./...
else
echo " Warning: golangci-lint not installed, skipping lint"
echo " Install: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"
fi
# Tests
echo " Running tests..."
if ! go test ./... >/dev/null 2>&1; then
echo "Tests failed! Run 'go test ./...' for details."
exit 1
fi
# Build checks
echo " Building..."
mkdir -p bin
go build -buildvcs=false -o bin/dms ./cmd/dms
go build -buildvcs=false -o bin/dms-distro -tags distro_binary ./cmd/dms
go build -buildvcs=false -o bin/dankinstall ./cmd/dankinstall
echo "All Go CI checks passed!"
cd "$REPO_ROOT"
fi
# =============================================================================
# i18n sync check (DISABLED for now)
# =============================================================================
# if [[ -n "${POEDITOR_API_TOKEN:-}" ]] && [[ -n "${POEDITOR_PROJECT_ID:-}" ]]; then
# if command -v python3 &>/dev/null; then
# if ! python3 scripts/i18nsync.py check &>/dev/null; then
# echo "Translations out of sync"
# echo "Run: python3 scripts/i18nsync.py sync"
# exit 1
# fi
# fi
# fi
exit 0

View File

@@ -6,7 +6,7 @@ labels: "bug"
assignees: "" assignees: ""
--- ---
<!-- If your issue is related to ICONS <!-- If your issue is related to ICONS
- Purple and black checkerboards are QT's way of signalling an icon doesn't exist - Purple and black checkerboards are QT's way of signalling an icon doesn't exist
- FIX: Configure a QT6 or Icon Pack in DMS Settings that has the icon you want - FIX: Configure a QT6 or Icon Pack in DMS Settings that has the icon you want
- Follow the [THEMING](https://danklinux.com/docs/dankmaterialshell/icon-theming) section to ensure your QT environment variable is configured correctly for themes. - Follow the [THEMING](https://danklinux.com/docs/dankmaterialshell/icon-theming) section to ensure your QT environment variable is configured correctly for themes.
@@ -62,4 +62,4 @@ Paste error messages or logs here
## Screenshots/Recordings ## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings --> <!-- If applicable, add screenshots or screen recordings -->

View File

@@ -30,4 +30,4 @@ Is this feature specific to one compositor?
## Alternatives/Existing Solutions ## Alternatives/Existing Solutions
<!-- Include any similar/pre-existing products that solve this problem --> <!-- Include any similar/pre-existing products that solve this problem -->

View File

@@ -37,4 +37,4 @@ assignees: ""
## Screenshots/Recordings ## Screenshots/Recordings
<!-- If applicable, add screenshots or screen recordings --> <!-- If applicable, add screenshots or screen recordings -->

View File

@@ -33,20 +33,6 @@ jobs:
with: with:
go-version-file: ./core/go.mod go-version-file: ./core/go.mod
- name: Format check
run: |
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
echo "The following files are not formatted:"
gofmt -s -l .
exit 1
fi
- name: Run golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.6
working-directory: core
- name: Test - name: Test
run: go test -v ./... run: go test -v ./...

15
.github/workflows/prek.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
name: Pre-commit Checks
on:
push:
pull_request:
branches: [master, main]
jobs:
pre-commit-check:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: run pre-commit hooks
uses: j178/prek-action@v1

View File

@@ -272,7 +272,7 @@ jobs:
# Create QML source package (exclude build artifacts and git files) # Create QML source package (exclude build artifacts and git files)
# Copy root LICENSE and CONTRIBUTING.md to quickshell/ for packaging # Copy root LICENSE and CONTRIBUTING.md to quickshell/ for packaging
cp LICENSE CONTRIBUTING.md quickshell/ cp LICENSE CONTRIBUTING.md quickshell/
# Copy root assets directory to quickshell for systemd service and desktop file # Copy root assets directory to quickshell for systemd service and desktop file
cp -r assets quickshell/ cp -r assets quickshell/

View File

@@ -19,7 +19,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Determine version - name: Determine version
id: version id: version
run: | run: |
@@ -40,31 +40,31 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "release=$RELEASE" >> $GITHUB_OUTPUT echo "release=$RELEASE" >> $GITHUB_OUTPUT
echo "✅ Building DMS hotfix version: $VERSION-$RELEASE" echo "✅ Building DMS hotfix version: $VERSION-$RELEASE"
- name: Setup build environment - name: Setup build environment
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y rpm wget curl jq gzip sudo apt-get install -y rpm wget curl jq gzip
mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS} mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
echo "✅ RPM build environment ready" echo "✅ RPM build environment ready"
- name: Download release assets - name: Download release assets
run: | run: |
VERSION="${{ steps.version.outputs.version }}" VERSION="${{ steps.version.outputs.version }}"
cd ~/rpmbuild/SOURCES cd ~/rpmbuild/SOURCES
echo "📦 Downloading DMS QML source for v${VERSION}..." echo "📦 Downloading DMS QML source for v${VERSION}..."
# Download DMS QML source # Download DMS QML source
wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || { wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
echo "❌ Failed to download dms-qml.tar.gz for v${VERSION}" echo "❌ Failed to download dms-qml.tar.gz for v${VERSION}"
exit 1 exit 1
} }
echo "✅ Source downloaded" echo "✅ Source downloaded"
echo "Note: dms-cli binary will be downloaded during build based on target architecture" echo "Note: dms-cli binary will be downloaded during build based on target architecture"
ls -lh ls -lh
- name: Generate stable spec file - name: Generate stable spec file
run: | run: |
VERSION="${{ steps.version.outputs.version }}" VERSION="${{ steps.version.outputs.version }}"
@@ -211,38 +211,38 @@ jobs:
echo "" echo ""
echo "=== Spec file preview ===" echo "=== Spec file preview ==="
head -40 ~/rpmbuild/SPECS/dms.spec head -40 ~/rpmbuild/SPECS/dms.spec
- name: Build SRPM - name: Build SRPM
id: build id: build
run: | run: |
cd ~/rpmbuild/SPECS cd ~/rpmbuild/SPECS
echo "🔨 Building SRPM..." echo "🔨 Building SRPM..."
rpmbuild -bs dms.spec rpmbuild -bs dms.spec
SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1) SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
SRPM_NAME=$(basename "$SRPM") SRPM_NAME=$(basename "$SRPM")
echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
echo "✅ SRPM built: $SRPM_NAME" echo "✅ SRPM built: $SRPM_NAME"
echo "" echo ""
echo "=== SRPM Info ===" echo "=== SRPM Info ==="
rpm -qpi "$SRPM" rpm -qpi "$SRPM"
- name: Upload SRPM artifact - name: Upload SRPM artifact
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: dms-stable-srpm-${{ steps.version.outputs.version }} name: dms-stable-srpm-${{ steps.version.outputs.version }}
path: ${{ steps.build.outputs.srpm_path }} path: ${{ steps.build.outputs.srpm_path }}
retention-days: 90 retention-days: 90
- name: Install Copr CLI - name: Install Copr CLI
run: | run: |
sudo apt-get install -y python3-pip sudo apt-get install -y python3-pip
pip3 install copr-cli pip3 install copr-cli
mkdir -p ~/.config mkdir -p ~/.config
cat > ~/.config/copr << EOF cat > ~/.config/copr << EOF
[copr-cli] [copr-cli]
@@ -252,30 +252,30 @@ jobs:
copr_url = https://copr.fedorainfracloud.org copr_url = https://copr.fedorainfracloud.org
EOF EOF
chmod 600 ~/.config/copr chmod 600 ~/.config/copr
echo "✅ Copr CLI configured" echo "✅ Copr CLI configured"
- name: Upload to Copr - name: Upload to Copr
run: | run: |
SRPM="${{ steps.build.outputs.srpm_path }}" SRPM="${{ steps.build.outputs.srpm_path }}"
VERSION="${{ steps.version.outputs.version }}" VERSION="${{ steps.version.outputs.version }}"
echo "🚀 Uploading SRPM to avengemedia/dms..." echo "🚀 Uploading SRPM to avengemedia/dms..."
echo " SRPM: $(basename $SRPM)" echo " SRPM: $(basename $SRPM)"
echo " Version: $VERSION" echo " Version: $VERSION"
BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1) BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
echo "$BUILD_OUTPUT" echo "$BUILD_OUTPUT"
BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown") BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
if [ "$BUILD_ID" != "unknown" ]; then if [ "$BUILD_ID" != "unknown" ]; then
echo "✅ Build submitted successfully!" echo "✅ Build submitted successfully!"
echo "🔗 https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/" echo "🔗 https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
else else
echo "⚠️ Could not extract build ID, but upload may have succeeded" echo "⚠️ Could not extract build ID, but upload may have succeeded"
fi fi
- name: Build summary - name: Build summary
if: always() if: always()
run: | run: |

View File

@@ -21,34 +21,34 @@ jobs:
check-updates: check-updates:
name: Check for updates name: Check for updates
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
has_updates: ${{ steps.check.outputs.has_updates }} has_updates: ${{ steps.check.outputs.has_updates }}
packages: ${{ steps.check.outputs.packages }} packages: ${{ steps.check.outputs.packages }}
version: ${{ steps.check.outputs.version }} version: ${{ steps.check.outputs.version }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Install OSC - name: Install OSC
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y osc sudo apt-get install -y osc
mkdir -p ~/.config/osc mkdir -p ~/.config/osc
cat > ~/.config/osc/oscrc << EOF cat > ~/.config/osc/oscrc << EOF
[general] [general]
apiurl = https://api.opensuse.org apiurl = https://api.opensuse.org
[https://api.opensuse.org] [https://api.opensuse.org]
user = ${{ secrets.OBS_USERNAME }} user = ${{ secrets.OBS_USERNAME }}
pass = ${{ secrets.OBS_PASSWORD }} pass = ${{ secrets.OBS_PASSWORD }}
EOF EOF
chmod 600 ~/.config/osc/oscrc chmod 600 ~/.config/osc/oscrc
- name: Check for updates - name: Check for updates
id: check id: check
run: | run: |
@@ -116,13 +116,13 @@ jobs:
if: | if: |
github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_dispatch' ||
needs.check-updates.outputs.has_updates == 'true' needs.check-updates.outputs.has_updates == 'true'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Determine packages to update - name: Determine packages to update
id: packages id: packages
run: | run: |
@@ -140,7 +140,7 @@ jobs:
else else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi fi
- name: Update dms-git spec version - name: Update dms-git spec version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all' if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: | run: |
@@ -148,13 +148,13 @@ jobs:
COMMIT_HASH=$(git rev-parse --short=8 HEAD) COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD) COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2") BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}" NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating dms-git.spec to version: $NEW_VERSION" echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
# Update version in spec # Update version in spec
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
# Add changelog entry # Add changelog entry
DATE_STR=$(date "+%a %b %d %Y") DATE_STR=$(date "+%a %b %d %Y")
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)" CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
@@ -163,12 +163,12 @@ jobs:
- name: Update Debian dms-git changelog version - name: Update Debian dms-git changelog version
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all' if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
run: | run: |
# Get commit info for dms-git versioning # Get commit info for dms-git versioning
COMMIT_HASH=$(git rev-parse --short=8 HEAD) COMMIT_HASH=$(git rev-parse --short=8 HEAD)
COMMIT_COUNT=$(git rev-list --count HEAD) COMMIT_COUNT=$(git rev-list --count HEAD)
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2") BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
# Debian version format: 0.6.2+git2256.9162e314 # Debian version format: 0.6.2+git2256.9162e314
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}" NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION" echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
@@ -182,7 +182,7 @@ jobs:
echo "Current Debian version: $CURRENT_VERSION" echo "Current Debian version: $CURRENT_VERSION"
echo "New version: $NEW_VERSION" echo "New version: $NEW_VERSION"
# Only update if version changed # Only update if version changed
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
# Create new changelog entry at top # Create new changelog entry at top
TEMP_CHANGELOG=$(mktemp) TEMP_CHANGELOG=$(mktemp)
@@ -211,10 +211,10 @@ jobs:
VERSION="${{ steps.packages.outputs.version }}" VERSION="${{ steps.packages.outputs.version }}"
VERSION_NO_V="${VERSION#v}" VERSION_NO_V="${VERSION#v}"
echo "Updating packaging to version $VERSION_NO_V" echo "Updating packaging to version $VERSION_NO_V"
# Update openSUSE dms spec (stable only) # Update openSUSE dms spec (stable only)
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
# Update Debian _service files # Update Debian _service files
for service in distro/debian/*/_service; do for service in distro/debian/*/_service; do
if [[ -f "$service" ]]; then if [[ -f "$service" ]]; then
@@ -250,18 +250,18 @@ jobs:
run: | run: |
PACKAGES="${{ steps.packages.outputs.packages }}" PACKAGES="${{ steps.packages.outputs.packages }}"
MESSAGE="Automated update from GitHub Actions" MESSAGE="Automated update from GitHub Actions"
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
MESSAGE="Update to ${{ steps.packages.outputs.version }}" MESSAGE="Update to ${{ steps.packages.outputs.version }}"
fi fi
if [[ "$PACKAGES" == "all" ]]; then if [[ "$PACKAGES" == "all" ]]; then
bash distro/scripts/obs-upload.sh dms "$MESSAGE" bash distro/scripts/obs-upload.sh dms "$MESSAGE"
bash distro/scripts/obs-upload.sh dms-git "Automated git update" bash distro/scripts/obs-upload.sh dms-git "Automated git update"
else else
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE" bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
fi fi
- name: Summary - name: Summary
run: | run: |
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY

View File

@@ -84,12 +84,12 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: '1.24' go-version: '1.24'
cache: false cache: false
- name: Install build dependencies - name: Install build dependencies
run: | run: |
@@ -102,7 +102,7 @@ jobs:
build-essential \ build-essential \
fakeroot \ fakeroot \
dpkg-dev dpkg-dev
- name: Configure GPG - name: Configure GPG
env: env:
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }} GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
@@ -110,7 +110,7 @@ jobs:
echo "$GPG_KEY" | gpg --import echo "$GPG_KEY" | gpg --import
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2) GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
- name: Determine packages to upload - name: Determine packages to upload
id: packages id: packages
run: | run: |
@@ -123,19 +123,19 @@ jobs:
else else
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
fi fi
- name: Upload to PPA - name: Upload to PPA
env: env:
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }} REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
run: | run: |
PACKAGES="${{ steps.packages.outputs.packages }}" PACKAGES="${{ steps.packages.outputs.packages }}"
# Export to ensure it's available to subprocesses # Export to ensure it's available to subprocesses
if [ -n "$REBUILD_RELEASE" ]; then if [ -n "$REBUILD_RELEASE" ]; then
export REBUILD_RELEASE export REBUILD_RELEASE
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE" echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
fi fi
if [[ "$PACKAGES" == "all" ]]; then if [[ "$PACKAGES" == "all" ]]; then
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms to PPA..." echo "Uploading dms to PPA..."
@@ -144,13 +144,13 @@ jobs:
fi fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing
echo "" echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-git to PPA..." echo "Uploading dms-git to PPA..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing
echo "" echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Uploading dms-greeter to PPA..." echo "Uploading dms-greeter to PPA..."
@@ -163,7 +163,7 @@ jobs:
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing
fi fi
- name: Summary - name: Summary
run: | run: |
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
@@ -186,7 +186,7 @@ jobs:
elif [[ "$PACKAGES" == "dms-greeter" ]]; then elif [[ "$PACKAGES" == "dms-greeter" ]]; then
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
fi fi
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
fi fi

2
.gitignore vendored
View File

@@ -104,7 +104,7 @@ go.work.sum
bin/ bin/
# Extracted source trees in Ubuntu package directories # Extracted source trees in Ubuntu package directories
distro/ubuntu/*/dms-git-repo/ distro/ubuntu/*/dms-git-repo/
distro/ubuntu/*/DankMaterialShell-*/ distro/ubuntu/*/DankMaterialShell-*/
distro/ubuntu/danklinux/*/dsearch-*/ distro/ubuntu/danklinux/*/dsearch-*/

12
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,12 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: check-yaml
- id: end-of-file-fixer
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.10.0.1
hooks:
- id: shellcheck
args: [-e, SC2164, -e, SC2001, -e, SC2012, -e, SC2317]

View File

@@ -6,10 +6,10 @@ To contribute fork this repository, make your changes, and open a pull request.
## Setup ## Setup
Enable pre-commit hooks to catch CI failures before pushing: Install [prek](https://prek.j178.dev/) then activate pre-commit hooks:
```bash ```bash
git config core.hooksPath .githooks prek install
``` ```
### Nix Development Shell ### Nix Development Shell

View File

@@ -14,4 +14,4 @@ RestartSec=1.23
TimeoutStopSec=10 TimeoutStopSec=10
[Install] [Install]
WantedBy=graphical-session.target WantedBy=graphical-session.target

View File

@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/golangci/golangci-lint
rev: v2.6.2
hooks:
- id: golangci-lint-full
- id: golangci-lint-fmt
- id: golangci-lint-config-verify

View File

@@ -139,4 +139,4 @@ Most packages available in standard repos. Minimal building required.
**Gentoo** **Gentoo**
Uses Portage with GURU overlay. Automatically configures USE flags. Variable success depending on system configuration. Uses Portage with GURU overlay. Automatically configures USE flags. Variable success depending on system configuration.
See installer output for distribution-specific details during installation. See installer output for distribution-specific details during installation.

View File

@@ -8,7 +8,7 @@
<rect x="0" y="29" width="8" height="8" fill="#CCBEFF"/> <rect x="0" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="20" y="29" width="8" height="8" fill="#CCBEFF"/> <rect x="20" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="0" y="37" width="24" height="8" fill="#CCBEFF"/> <rect x="0" y="37" width="24" height="8" fill="#CCBEFF"/>
<!-- A --> <!-- A -->
<rect x="36" y="5" width="20" height="8" fill="#CCBEFF"/> <rect x="36" y="5" width="20" height="8" fill="#CCBEFF"/>
<rect x="32" y="13" width="8" height="8" fill="#CCBEFF"/> <rect x="32" y="13" width="8" height="8" fill="#CCBEFF"/>
@@ -18,7 +18,7 @@
<rect x="52" y="29" width="8" height="8" fill="#CCBEFF"/> <rect x="52" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="32" y="37" width="8" height="8" fill="#CCBEFF"/> <rect x="32" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="52" y="37" width="8" height="8" fill="#CCBEFF"/> <rect x="52" y="37" width="8" height="8" fill="#CCBEFF"/>
<!-- N --> <!-- N -->
<rect x="64" y="5" width="12" height="8" fill="#CCBEFF"/> <rect x="64" y="5" width="12" height="8" fill="#CCBEFF"/>
<rect x="92" y="5" width="8" height="8" fill="#CCBEFF"/> <rect x="92" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -32,7 +32,7 @@
<rect x="92" y="29" width="8" height="8" fill="#CCBEFF"/> <rect x="92" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="64" y="37" width="8" height="8" fill="#CCBEFF"/> <rect x="64" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="84" y="37" width="16" height="8" fill="#CCBEFF"/> <rect x="84" y="37" width="16" height="8" fill="#CCBEFF"/>
<!-- K --> <!-- K -->
<rect x="104" y="5" width="8" height="8" fill="#CCBEFF"/> <rect x="104" y="5" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="5" width="8" height="8" fill="#CCBEFF"/> <rect x="124" y="5" width="8" height="8" fill="#CCBEFF"/>
@@ -43,4 +43,4 @@
<rect x="120" y="29" width="8" height="8" fill="#CCBEFF"/> <rect x="120" y="29" width="8" height="8" fill="#CCBEFF"/>
<rect x="104" y="37" width="8" height="8" fill="#CCBEFF"/> <rect x="104" y="37" width="8" height="8" fill="#CCBEFF"/>
<rect x="124" y="37" width="8" height="8" fill="#CCBEFF"/> <rect x="124" y="37" width="8" height="8" fill="#CCBEFF"/>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -17,8 +17,8 @@ func getThemedASCII() string {
logo := ` logo := `
██████╗ █████╗ ███╗ ██╗██╗ ██╗ ██████╗ █████╗ ███╗ ██╗██╗ ██╗
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝ ██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
██║ ██║███████║██╔██╗ ██║█████╔╝ ██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗ ██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██████╔╝██║ ██║██║ ╚████║██║ ██╗ ██████╔╝██║ ██║██║ ╚████║██║ ██╗
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝` ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝`

View File

@@ -1,4 +1,4 @@
#!/bin/sh #!/usr/bin/env bash
set -e set -e
@@ -9,8 +9,8 @@ NC='\033[0m' # No Color
# Check for root privileges # Check for root privileges
if [ "$(id -u)" == "0" ]; then if [ "$(id -u)" == "0" ]; then
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC" printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
exit 1 exit 1
fi fi
# Check if running on Linux # Check if running on Linux
@@ -22,17 +22,17 @@ fi
# Detect architecture # Detect architecture
ARCH=$(uname -m) ARCH=$(uname -m)
case "$ARCH" in case "$ARCH" in
x86_64) x86_64)
ARCH="amd64" ARCH="amd64"
;; ;;
aarch64) aarch64)
ARCH="arm64" ARCH="arm64"
;; ;;
*) *)
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC" printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n" printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
exit 1 exit 1
;; ;;
esac esac
# Get the latest release version # Get the latest release version
@@ -55,7 +55,7 @@ curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LAT
curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LATEST_VERSION/dankinstall-$ARCH.gz.sha256" -o "expected.sha256" curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LATEST_VERSION/dankinstall-$ARCH.gz.sha256" -o "expected.sha256"
# Get the expected checksum # Get the expected checksum
EXPECTED_CHECKSUM=$(cat expected.sha256 | awk '{print $1}') EXPECTED_CHECKSUM=$(awk '{print $1}' expected.sha256)
# Calculate actual checksum # Calculate actual checksum
printf "%bVerifying checksum...%b\n" "$GREEN" "$NC" printf "%bVerifying checksum...%b\n" "$GREEN" "$NC"
@@ -67,7 +67,7 @@ if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then
printf "Expected: %s\n" "$EXPECTED_CHECKSUM" printf "Expected: %s\n" "$EXPECTED_CHECKSUM"
printf "Got: %s\n" "$ACTUAL_CHECKSUM" printf "Got: %s\n" "$ACTUAL_CHECKSUM"
printf "The downloaded file may be corrupted or tampered with\n" printf "The downloaded file may be corrupted or tampered with\n"
cd - > /dev/null cd - >/dev/null
rm -rf "$TEMP_DIR" rm -rf "$TEMP_DIR"
exit 1 exit 1
fi fi
@@ -82,5 +82,5 @@ printf "%bRunning installer...%b\n" "$GREEN" "$NC"
./installer ./installer
# Cleanup # Cleanup
cd - > /dev/null cd - >/dev/null
rm -rf "$TEMP_DIR" rm -rf "$TEMP_DIR"

View File

@@ -192,4 +192,4 @@ binds {
// === System Controls === // === System Controls ===
Mod+Escape allow-inhibiting=false { toggle-keyboard-shortcuts-inhibit; } Mod+Escape allow-inhibiting=false { toggle-keyboard-shortcuts-inhibit; }
Mod+Shift+P { power-off-monitors; } Mod+Shift+P { power-off-monitors; }
} }

View File

@@ -33,4 +33,4 @@ recent-windows {
active-color "#124a73" active-color "#124a73"
urgent-color "#ffb4ab" urgent-color "#ffb4ab"
} }
} }

View File

@@ -73,7 +73,7 @@
</description> </description>
<arg name="workspace" type="new_id" interface="ext_workspace_handle_v1"/> <arg name="workspace" type="new_id" interface="ext_workspace_handle_v1"/>
</event> </event>
<request name="commit"> <request name="commit">
<description summary="all requests about the workspaces have been sent"> <description summary="all requests about the workspaces have been sent">
The client must send this request after it has finished sending other The client must send this request after it has finished sending other
@@ -242,7 +242,7 @@
- a list of states, conveyed to the client with the state event - a list of states, conveyed to the client with the state event
- and optionally a set of coordinates, conveyed to the client with the - and optionally a set of coordinates, conveyed to the client with the
coordinates event coordinates event
The client may request that the compositor activate or deactivate the workspace. The client may request that the compositor activate or deactivate the workspace.
Each workspace can belong to only a single workspace group. Each workspace can belong to only a single workspace group.

View File

@@ -6,8 +6,8 @@ func (m Model) renderBanner() string {
logo := ` logo := `
██████╗ █████╗ ███╗ ██╗██╗ ██╗ ██████╗ █████╗ ███╗ ██╗██╗ ██╗
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝ ██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
██║ ██║███████║██╔██╗ ██║█████╔╝ ██║ ██║███████║██╔██╗ ██║█████╔╝
██║ ██║██╔══██║██║╚██╗██║██╔═██╗ ██║ ██║██╔══██║██║╚██╗██║██╔═██╗
██████╔╝██║ ██║██║ ╚████║██║ ██╗ ██████╔╝██║ ██║██║ ╚████║██║ ██╗
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ` ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ `

View File

@@ -1,9 +1,9 @@
#!/bin/sh #!/usr/bin/env sh
# Runs go generate for each directory, but in parallel. Any arguments are appended to the # Runs go generate for each directory, but in parallel. Any arguments are appended to the
# go generate command. # go generate command.
# Usage: $ ./generatep [go generate arguments] # Usage: $ ./generatep [go generate arguments]
# Print all generate commands: $ ./generatep -x # Print all generate commands: $ ./generatep -x
cd ./wayland cd ./wayland || exit 1
find . -type f -name '*.go' -exec dirname {} \; | sort -u | parallel -j 0 go generate $1 {}/. find . -type f -name '*.go' -exec dirname {} \; | sort -u | parallel -j 0 go generate "$1" {}/.

View File

@@ -1,4 +1,4 @@
# Include files that are normally excluded by .gitignore # Include files that are normally excluded by .gitignore
# These are needed for the build process on Launchpad # These are needed for the build process on Launchpad
tar-ignore = !dms-distropkg-amd64.gz tar-ignore = !dms-distropkg-amd64.gz
tar-ignore = !dms-source.tar.gz tar-ignore = !dms-source.tar.gz

View File

@@ -104,19 +104,19 @@ if [ -x /usr/sbin/semanage ] && [ -x /usr/sbin/restorecon ]; then
# Greeter launcher binary # Greeter launcher binary
semanage fcontext -a -t bin_t '%{_bindir}/dms-greeter' >/dev/null 2>&1 || true semanage fcontext -a -t bin_t '%{_bindir}/dms-greeter' >/dev/null 2>&1 || true
restorecon %{_bindir}/dms-greeter >/dev/null 2>&1 || true restorecon %{_bindir}/dms-greeter >/dev/null 2>&1 || true
# Greeter home directory # Greeter home directory
semanage fcontext -a -t user_home_dir_t '%{_sharedstatedir}/greeter(/.*)?' >/dev/null 2>&1 || true semanage fcontext -a -t user_home_dir_t '%{_sharedstatedir}/greeter(/.*)?' >/dev/null 2>&1 || true
restorecon -R %{_sharedstatedir}/greeter >/dev/null 2>&1 || true restorecon -R %{_sharedstatedir}/greeter >/dev/null 2>&1 || true
# Cache directory for greeter data # Cache directory for greeter data
semanage fcontext -a -t cache_home_t '%{_localstatedir}/cache/dms-greeter(/.*)?' >/dev/null 2>&1 || true semanage fcontext -a -t cache_home_t '%{_localstatedir}/cache/dms-greeter(/.*)?' >/dev/null 2>&1 || true
restorecon -R %{_localstatedir}/cache/dms-greeter >/dev/null 2>&1 || true restorecon -R %{_localstatedir}/cache/dms-greeter >/dev/null 2>&1 || true
# Shared data directory # Shared data directory
semanage fcontext -a -t usr_t '%{_datadir}/quickshell/dms-greeter(/.*)?' >/dev/null 2>&1 || true semanage fcontext -a -t usr_t '%{_datadir}/quickshell/dms-greeter(/.*)?' >/dev/null 2>&1 || true
restorecon -R %{_datadir}/quickshell/dms-greeter >/dev/null 2>&1 || true restorecon -R %{_datadir}/quickshell/dms-greeter >/dev/null 2>&1 || true
# PAM configuration # PAM configuration
restorecon %{_sysconfdir}/pam.d/greetd >/dev/null 2>&1 || true restorecon %{_sysconfdir}/pam.d/greetd >/dev/null 2>&1 || true
fi fi

View File

@@ -134,4 +134,4 @@ pkill -USR1 -x dms >/dev/null 2>&1 || :
%{_datadir}/fish/vendor_completions.d/dms.fish %{_datadir}/fish/vendor_completions.d/dms.fish
%changelog %changelog
{{{ git_repo_changelog }}} {{{ git_repo_changelog }}}

View File

@@ -45,7 +45,7 @@ rm -rf "$TEMP_DIR"
echo "Generating spec file..." echo "Generating spec file..."
CHANGELOG_DATE="$(date '+%a %b %d %Y')" CHANGELOG_DATE="$(date '+%a %b %d %Y')"
cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF' cat >~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
# Spec for DMS stable releases - Built locally # Spec for DMS stable releases - Built locally
%global debug_package %{nil} %global debug_package %{nil}
@@ -187,7 +187,7 @@ echo "Building SRPM..."
cd ~/rpmbuild/SPECS cd ~/rpmbuild/SPECS
rpmbuild -bs dms.spec rpmbuild -bs dms.spec
SRPM=$(ls ~/rpmbuild/SRPMS/dms-${VERSION}-*.src.rpm | tail -n 1) SRPM=$(ls ~/rpmbuild/SRPMS/dms-"${VERSION}"-*.src.rpm | tail -n 1)
if [ ! -f "$SRPM" ]; then if [ ! -f "$SRPM" ]; then
echo "Error: SRPM not found!" echo "Error: SRPM not found!"
exit 1 exit 1
@@ -196,7 +196,7 @@ fi
echo "SRPM built successfully: $SRPM" echo "SRPM built successfully: $SRPM"
# Check if copr-cli is installed # Check if copr-cli is installed
if ! command -v copr-cli &> /dev/null; then if ! command -v copr-cli &>/dev/null; then
echo "" echo ""
echo "copr-cli is not installed. Install it with:" echo "copr-cli is not installed. Install it with:"
echo " pip install copr-cli" echo " pip install copr-cli"

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
# Unified OBS status checker for dms packages # Unified OBS status checker for dms packages
# Checks all platforms (Debian, OpenSUSE) and architectures (x86_64, aarch64) # Checks all platforms (Debian, OpenSUSE) and architectures (x86_64, aarch64)
# Only pulls logs if build failed # Only pulls logs if build failed
@@ -35,81 +35,81 @@ cd "$OBS_BASE" || {
for pkg in "${PACKAGES[@]}"; do for pkg in "${PACKAGES[@]}"; do
case "$pkg" in case "$pkg" in
dms) dms)
PROJECT="$OBS_BASE_PROJECT:dms" PROJECT="$OBS_BASE_PROJECT:dms"
;; ;;
dms-git) dms-git)
PROJECT="$OBS_BASE_PROJECT:dms-git" PROJECT="$OBS_BASE_PROJECT:dms-git"
;; ;;
*) *)
echo "Error: Unknown package '$pkg'" echo "Error: Unknown package '$pkg'"
continue continue
;; ;;
esac esac
(
echo "=========================================="
echo "=== $pkg ===" echo "=========================================="
echo "==========================================" echo "=== $pkg ==="
echo "=========================================="
# Checkout if needed
if [[ ! -d "$PROJECT/$pkg" ]]; then # Checkout if needed
osc co "$PROJECT/$pkg" 2>&1 | tail -1 if [[ ! -d "$PROJECT/$pkg" ]]; then
fi osc co "$PROJECT/$pkg" 2>&1 | tail -1
fi
cd "$PROJECT/$pkg"
cd "$PROJECT/$pkg"
ALL_RESULTS=$(osc results 2>&1)
ALL_RESULTS=$(osc results 2>&1)
# Check each repository and architecture
FAILED_BUILDS=() # Check each repository and architecture
for repo in "${REPOS[@]}"; do FAILED_BUILDS=()
for arch in "${ARCHES[@]}"; do for repo in "${REPOS[@]}"; do
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1) for arch in "${ARCHES[@]}"; do
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1)
if [[ -n "$STATUS" ]]; then
# Color code status if [[ -n "$STATUS" ]]; then
case "$STATUS" in # Color code status
case "$STATUS" in
succeeded) succeeded)
COLOR="\033[0;32m" # Green COLOR="\033[0;32m" # Green
SYMBOL="✅" SYMBOL="✅"
;; ;;
failed) failed)
COLOR="\033[0;31m" # Red COLOR="\033[0;31m" # Red
SYMBOL="❌" SYMBOL="❌"
FAILED_BUILDS+=("$repo $arch") FAILED_BUILDS+=("$repo $arch")
;; ;;
unresolvable) unresolvable)
COLOR="\033[0;33m" # Yellow COLOR="\033[0;33m" # Yellow
SYMBOL="⚠️" SYMBOL="⚠️"
;; ;;
*) *)
COLOR="\033[0;37m" # White COLOR="\033[0;37m" # White
SYMBOL="⏳" SYMBOL="⏳"
;; ;;
esac esac
echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m" echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m"
fi fi
done
done done
done
# Pull logs for failed builds
# Pull logs for failed builds if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
echo ""
echo " 📋 Fetching logs for failed builds..."
for build in "${FAILED_BUILDS[@]}"; do
read -r repo arch <<< "$build"
echo "" echo ""
echo " ────────────────────────────────────────────" echo " 📋 Fetching logs for failed builds..."
echo " Build log: $repo $arch" for build in "${FAILED_BUILDS[@]}"; do
echo " ────────────────────────────────────────────" read -r repo arch <<<"$build"
osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100 echo ""
done echo " ────────────────────────────────────────────"
fi echo " Build log: $repo $arch"
echo " ────────────────────────────────────────────"
echo "" osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100
cd - > /dev/null done
fi
echo ""
)
done done
echo "==========================================" echo "=========================================="
echo "Status check complete!" echo "Status check complete!"

View File

@@ -17,21 +17,21 @@ MESSAGE=""
for arg in "$@"; do for arg in "$@"; do
case "$arg" in case "$arg" in
debian) debian)
UPLOAD_DEBIAN=true UPLOAD_DEBIAN=true
UPLOAD_OPENSUSE=false UPLOAD_OPENSUSE=false
;; ;;
opensuse) opensuse)
UPLOAD_DEBIAN=false UPLOAD_DEBIAN=false
UPLOAD_OPENSUSE=true UPLOAD_OPENSUSE=true
;; ;;
*) *)
if [[ -z "$PACKAGE" ]]; then if [[ -z "$PACKAGE" ]]; then
PACKAGE="$arg" PACKAGE="$arg"
elif [[ -z "$MESSAGE" ]]; then elif [[ -z "$MESSAGE" ]]; then
MESSAGE="$arg" MESSAGE="$arg"
fi fi
;; ;;
esac esac
done done
@@ -46,17 +46,17 @@ if [[ -z "$PACKAGE" ]]; then
echo " 2. dms-git - Nightly DMS" echo " 2. dms-git - Nightly DMS"
echo " a. all" echo " a. all"
echo "" echo ""
read -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection read -r -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection
if [[ "$selection" == "a" ]] || [[ "$selection" == "all" ]]; then if [[ "$selection" == "a" ]] || [[ "$selection" == "all" ]]; then
PACKAGE="all" PACKAGE="all"
elif [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -le ${#AVAILABLE_PACKAGES[@]} ]]; then elif [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -le ${#AVAILABLE_PACKAGES[@]} ]]; then
PACKAGE="${AVAILABLE_PACKAGES[$((selection-1))]}" PACKAGE="${AVAILABLE_PACKAGES[$((selection - 1))]}"
else else
echo "Error: Invalid selection" echo "Error: Invalid selection"
exit 1 exit 1
fi fi
fi fi
if [[ -z "$MESSAGE" ]]; then if [[ -z "$MESSAGE" ]]; then
@@ -107,7 +107,7 @@ if [[ "$PACKAGE" == "all" ]]; then
echo "⚠️ Skipping $pkg (not found in distro/debian/)" echo "⚠️ Skipping $pkg (not found in distro/debian/)"
fi fi
done done
if [[ ${#FAILED[@]} -eq 0 ]]; then if [[ ${#FAILED[@]} -eq 0 ]]; then
echo "✅ All packages uploaded successfully!" echo "✅ All packages uploaded successfully!"
exit 0 exit 0
@@ -124,16 +124,16 @@ if [[ ! -d "distro/debian/$PACKAGE" ]]; then
fi fi
case "$PACKAGE" in case "$PACKAGE" in
dms) dms)
PROJECT="dms" PROJECT="dms"
;; ;;
dms-git) dms-git)
PROJECT="dms-git" PROJECT="dms-git"
;; ;;
*) *)
echo "Error: Unknown package '$PACKAGE'" echo "Error: Unknown package '$PACKAGE'"
exit 1 exit 1
;; ;;
esac esac
OBS_PROJECT="${OBS_BASE_PROJECT}:${PROJECT}" OBS_PROJECT="${OBS_BASE_PROJECT}:${PROJECT}"
@@ -216,8 +216,8 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]];
# However, we need to check if we are also updating Debian, or if this script is expected to continue. # However, we need to check if we are also updating Debian, or if this script is expected to continue.
# If this is OpenSUSE only run, we can exit. # If this is OpenSUSE only run, we can exit.
if [[ "$UPLOAD_DEBIAN" == false ]]; then if [[ "$UPLOAD_DEBIAN" == false ]]; then
echo "✅ No changes needed for OpenSUSE (not manual). Exiting." echo "✅ No changes needed for OpenSUSE (not manual). Exiting."
exit 0 exit 0
fi fi
fi fi
fi fi
@@ -235,7 +235,7 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
echo " - OpenSUSE-only upload: creating source tarball" echo " - OpenSUSE-only upload: creating source tarball"
TEMP_DIR=$(mktemp -d) TEMP_DIR=$(mktemp -d)
trap "rm -rf $TEMP_DIR" EXIT trap 'rm -rf $TEMP_DIR' EXIT
if [[ -f "distro/debian/$PACKAGE/_service" ]] && grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then if [[ -f "distro/debian/$PACKAGE/_service" ]] && grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/') GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
@@ -244,8 +244,8 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
if [[ -n "$GIT_URL" ]]; then if [[ -n "$GIT_URL" ]]; then
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})" echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
SOURCE_DIR="$TEMP_DIR/dms-git-source" SOURCE_DIR="$TEMP_DIR/dms-git-source"
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \ if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null ||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
cd "$SOURCE_DIR" cd "$SOURCE_DIR"
if [[ -n "$GIT_REVISION" ]]; then if [[ -n "$GIT_REVISION" ]]; then
git checkout "$GIT_REVISION" 2>/dev/null || true git checkout "$GIT_REVISION" 2>/dev/null || true
@@ -265,16 +265,16 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
cd "$OBS_TARBALL_DIR" cd "$OBS_TARBALL_DIR"
case "$PACKAGE" in case "$PACKAGE" in
dms) dms)
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1) DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}" EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
;; ;;
dms-git) dms-git)
EXPECTED_DIR="dms-git-source" EXPECTED_DIR="dms-git-source"
;; ;;
*) *)
EXPECTED_DIR=$(basename "$SOURCE_DIR") EXPECTED_DIR=$(basename "$SOURCE_DIR")
;; ;;
esac esac
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)" echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
@@ -295,12 +295,12 @@ fi
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; then if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; then
# Use CHANGELOG_VERSION already set above, or get it if not set # Use CHANGELOG_VERSION already set above, or get it if not set
if [[ -z "$CHANGELOG_VERSION" ]]; then if [[ -z "$CHANGELOG_VERSION" ]]; then
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/$PACKAGE/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11") CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/"$PACKAGE"/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11")
fi fi
# Determine source format # Determine source format
SOURCE_FORMAT=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)") SOURCE_FORMAT=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)")
# For native format, remove any Debian revision (-N) from version # For native format, remove any Debian revision (-N) from version
# Native format cannot have revisions, so strip them if present # Native format cannot have revisions, so strip them if present
if [[ "$SOURCE_FORMAT" == *"native"* ]] && [[ "$CHANGELOG_VERSION" == *"-"* ]]; then if [[ "$SOURCE_FORMAT" == *"native"* ]] && [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
@@ -308,26 +308,26 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
CHANGELOG_VERSION=$(echo "$CHANGELOG_VERSION" | sed 's/-[0-9]*$//') CHANGELOG_VERSION=$(echo "$CHANGELOG_VERSION" | sed 's/-[0-9]*$//')
echo " Warning: Removed Debian revision from version for native format: $CHANGELOG_VERSION" echo " Warning: Removed Debian revision from version for native format: $CHANGELOG_VERSION"
fi fi
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
echo " - Native format detected: creating combined tarball" echo " - Native format detected: creating combined tarball"
VERSION="$CHANGELOG_VERSION" VERSION="$CHANGELOG_VERSION"
TEMP_DIR=$(mktemp -d) TEMP_DIR=$(mktemp -d)
trap "rm -rf $TEMP_DIR" EXIT trap 'rm -rf $TEMP_DIR' EXIT
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz" COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
SOURCE_DIR="" SOURCE_DIR=""
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/') GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/') GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
if [[ -n "$GIT_URL" ]]; then if [[ -n "$GIT_URL" ]]; then
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})" echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
SOURCE_DIR="$TEMP_DIR/dms-git-source" SOURCE_DIR="$TEMP_DIR/dms-git-source"
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \ if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null ||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
cd "$SOURCE_DIR" cd "$SOURCE_DIR"
if [[ -n "$GIT_REVISION" ]]; then if [[ -n "$GIT_REVISION" ]]; then
git checkout "$GIT_REVISION" 2>/dev/null || true git checkout "$GIT_REVISION" 2>/dev/null || true
@@ -341,19 +341,19 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
fi fi
fi fi
elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" | \ ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" |
grep '<param name="path">' | \ grep '<param name="path">' |
sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/') sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/')
SOURCE_PATH="" SOURCE_PATH=""
for path in $ALL_PATHS; do for path in $ALL_PATHS; do
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" && \ if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" &&
! echo "$path" | grep -qE "(distropkg|binary)"; then ! echo "$path" | grep -qE "(distropkg|binary)"; then
SOURCE_PATH="$path" SOURCE_PATH="$path"
break break
fi fi
done done
if [[ -z "$SOURCE_PATH" ]]; then if [[ -z "$SOURCE_PATH" ]]; then
for path in $ALL_PATHS; do for path in $ALL_PATHS; do
if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then
@@ -362,12 +362,12 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
fi fi
done done
fi fi
if [[ -n "$SOURCE_PATH" ]]; then if [[ -n "$SOURCE_PATH" ]]; then
SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" ' SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" '
/<service name="download_url">/ { in_block=1; block="" } /<service name="download_url">/ { in_block=1; block="" }
in_block { block=block"\n"$0 } in_block { block=block"\n"$0 }
/<\/service>/ { /<\/service>/ {
if (in_block && block ~ target) { if (in_block && block ~ target) {
print block print block
exit exit
@@ -375,18 +375,18 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
in_block=0 in_block=0
} }
' "distro/debian/$PACKAGE/_service") ' "distro/debian/$PACKAGE/_service")
URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1) URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1)
URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1) URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1)
URL_PATH="$SOURCE_PATH" URL_PATH="$SOURCE_PATH"
fi fi
if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then
SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}" SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}"
echo " Downloading source from: $SOURCE_URL" echo " Downloading source from: $SOURCE_URL"
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null || \ if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null ||
curl -L -f -s -o "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null; then curl -L -f -s -o "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null; then
cd "$TEMP_DIR" cd "$TEMP_DIR"
if [[ "$SOURCE_URL" == *.tar.xz ]]; then if [[ "$SOURCE_URL" == *.tar.xz ]]; then
tar -xJf source-archive tar -xJf source-archive
@@ -414,7 +414,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
fi fi
fi fi
fi fi
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
echo "Error: Could not determine or obtain source for $PACKAGE" echo "Error: Could not determine or obtain source for $PACKAGE"
echo "SOURCE_DIR: $SOURCE_DIR" echo "SOURCE_DIR: $SOURCE_DIR"
@@ -424,15 +424,15 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
fi fi
exit 1 exit 1
fi fi
echo " Found source directory: $SOURCE_DIR" echo " Found source directory: $SOURCE_DIR"
# Vendor Go dependencies for dms-git # Vendor Go dependencies for dms-git
if [[ "$PACKAGE" == "dms-git" ]] && [[ -d "$SOURCE_DIR/core" ]]; then if [[ "$PACKAGE" == "dms-git" ]] && [[ -d "$SOURCE_DIR/core" ]]; then
echo " - Vendoring Go dependencies for offline OBS build..." echo " - Vendoring Go dependencies for offline OBS build..."
cd "$SOURCE_DIR/core" cd "$SOURCE_DIR/core"
if ! command -v go &> /dev/null; then if ! command -v go &>/dev/null; then
echo "ERROR: Go not found. Install Go to vendor dependencies." echo "ERROR: Go not found. Install Go to vendor dependencies."
echo " Install: sudo apt-get install golang-go (Debian/Ubuntu)" echo " Install: sudo apt-get install golang-go (Debian/Ubuntu)"
echo " or: sudo dnf install golang (Fedora)" echo " or: sudo dnf install golang (Fedora)"
@@ -454,7 +454,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
# Create OpenSUSE-compatible source tarballs BEFORE adding debian/ directory # Create OpenSUSE-compatible source tarballs BEFORE adding debian/ directory
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
echo " - Creating OpenSUSE-compatible source tarballs" echo " - Creating OpenSUSE-compatible source tarballs"
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1) SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1)
if [[ -z "$SOURCE0" && "$PACKAGE" == "dms-git" ]]; then if [[ -z "$SOURCE0" && "$PACKAGE" == "dms-git" ]]; then
SOURCE0="dms-git-source.tar.gz" SOURCE0="dms-git-source.tar.gz"
@@ -463,68 +463,68 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
if [[ -n "$SOURCE0" ]]; then if [[ -n "$SOURCE0" ]]; then
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX) OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
cd "$OBS_TARBALL_DIR" cd "$OBS_TARBALL_DIR"
case "$PACKAGE" in case "$PACKAGE" in
dms) dms)
if [[ -n "$CHANGELOG_VERSION" ]]; then if [[ -n "$CHANGELOG_VERSION" ]]; then
DMS_VERSION="$CHANGELOG_VERSION" DMS_VERSION="$CHANGELOG_VERSION"
else else
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1) DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
fi fi
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}" EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)" echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
cp -r "$SOURCE_DIR" "$EXPECTED_DIR" cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
if [[ "$SOURCE0" == *.tar.xz ]]; then if [[ "$SOURCE0" == *.tar.xz ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
else else
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
fi fi
rm -rf "$EXPECTED_DIR" rm -rf "$EXPECTED_DIR"
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)" echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
;; ;;
dms-git) dms-git)
EXPECTED_DIR="dms-git-source" EXPECTED_DIR="dms-git-source"
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)" echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
cp -r "$SOURCE_DIR" "$EXPECTED_DIR" cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
if [[ "$SOURCE0" == *.tar.xz ]]; then if [[ "$SOURCE0" == *.tar.xz ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
else else
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
fi fi
rm -rf "$EXPECTED_DIR" rm -rf "$EXPECTED_DIR"
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)" echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
;; ;;
*) *)
DIR_NAME=$(basename "$SOURCE_DIR") DIR_NAME=$(basename "$SOURCE_DIR")
echo " Creating $SOURCE0 (directory: $DIR_NAME)" echo " Creating $SOURCE0 (directory: $DIR_NAME)"
cp -r "$SOURCE_DIR" "$DIR_NAME" cp -r "$SOURCE_DIR" "$DIR_NAME"
if [[ "$SOURCE0" == *.tar.xz ]]; then if [[ "$SOURCE0" == *.tar.xz ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
else else
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
fi fi
rm -rf "$DIR_NAME" rm -rf "$DIR_NAME"
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)" echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
;; ;;
esac esac
cd "$REPO_ROOT" cd "$REPO_ROOT"
rm -rf "$OBS_TARBALL_DIR" rm -rf "$OBS_TARBALL_DIR"
echo " - OpenSUSE source tarballs created" echo " - OpenSUSE source tarballs created"
fi fi
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/" cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
fi fi
if [[ "$UPLOAD_DEBIAN" == true ]]; then if [[ "$UPLOAD_DEBIAN" == true ]]; then
echo " Copying debian/ directory into source" echo " Copying debian/ directory into source"
cp -r "distro/debian/$PACKAGE/debian" "$SOURCE_DIR/" cp -r "distro/debian/$PACKAGE/debian" "$SOURCE_DIR/"
# For dms, rename directory to match what debian/rules expects # For dms, rename directory to match what debian/rules expects
# debian/rules uses UPSTREAM_VERSION which is the full version from changelog # debian/rules uses UPSTREAM_VERSION which is the full version from changelog
if [[ "$PACKAGE" == "dms" ]]; then if [[ "$PACKAGE" == "dms" ]]; then
@@ -542,15 +542,15 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
fi fi
fi fi
fi fi
rm -f "$WORK_DIR/$COMBINED_TARBALL" rm -f "$WORK_DIR/$COMBINED_TARBALL"
echo " Creating combined tarball: $COMBINED_TARBALL" echo " Creating combined tarball: $COMBINED_TARBALL"
cd "$(dirname "$SOURCE_DIR")" cd "$(dirname "$SOURCE_DIR")"
TARBALL_BASE=$(basename "$SOURCE_DIR") TARBALL_BASE=$(basename "$SOURCE_DIR")
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
cd "$REPO_ROOT" cd "$REPO_ROOT"
if [[ "$PACKAGE" == "dms" ]]; then if [[ "$PACKAGE" == "dms" ]]; then
TARBALL_DIR=$(tar -tzf "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null | head -1 | cut -d'/' -f1) TARBALL_DIR=$(tar -tzf "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null | head -1 | cut -d'/' -f1)
EXPECTED_TARBALL_DIR="DankMaterialShell-${VERSION}" EXPECTED_TARBALL_DIR="DankMaterialShell-${VERSION}"
@@ -563,10 +563,10 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
cd "$REPO_ROOT" cd "$REPO_ROOT"
fi fi
fi fi
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null) TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1) TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
# Extract Build-Depends from debian/control using awk for proper multi-line parsing # Extract Build-Depends from debian/control using awk for proper multi-line parsing
if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then
BUILD_DEPS=$(awk ' BUILD_DEPS=$(awk '
@@ -591,8 +591,8 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
else else
BUILD_DEPS="debhelper-compat (= 13)" BUILD_DEPS="debhelper-compat (= 13)"
fi fi
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
Format: 3.0 (native) Format: 3.0 (native)
Source: $PACKAGE Source: $PACKAGE
Binary: $PACKAGE Binary: $PACKAGE
@@ -603,7 +603,7 @@ Build-Depends: $BUILD_DEPS
Files: Files:
$TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL $TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL
EOF EOF
echo " - Generated $PACKAGE.dsc for native format" echo " - Generated $PACKAGE.dsc for native format"
fi fi
else else
@@ -613,12 +613,12 @@ EOF
else else
VERSION="${CHANGELOG_VERSION}-1" VERSION="${CHANGELOG_VERSION}-1"
fi fi
echo " - Quilt format detected: creating debian.tar.gz" echo " - Quilt format detected: creating debian.tar.gz"
tar -czf "$WORK_DIR/debian.tar.gz" -C "distro/debian/$PACKAGE" debian/ tar -czf "$WORK_DIR/debian.tar.gz" -C "distro/debian/$PACKAGE" debian/
echo " - Generating $PACKAGE.dsc for quilt format" echo " - Generating $PACKAGE.dsc for quilt format"
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
Format: 3.0 (quilt) Format: 3.0 (quilt)
Source: $PACKAGE Source: $PACKAGE
Binary: $PACKAGE Binary: $PACKAGE
@@ -671,7 +671,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
if [[ -n "$OLD_DSC_VERSION" ]] && [[ "$OLD_DSC_BASE" == "$CHANGELOG_BASE" ]]; then if [[ -n "$OLD_DSC_VERSION" ]] && [[ "$OLD_DSC_BASE" == "$CHANGELOG_BASE" ]]; then
if [[ "$IS_MANUAL" == true ]]; then if [[ "$IS_MANUAL" == true ]]; then
echo "==> Detected rebuild of same base version $CHANGELOG_BASE, incrementing version" echo "==> Detected rebuild of same base version $CHANGELOG_BASE, incrementing version"
# If REBUILD_RELEASE is set, use that number directly # If REBUILD_RELEASE is set, use that number directly
if [[ -n "${REBUILD_RELEASE:-}" ]]; then if [[ -n "${REBUILD_RELEASE:-}" ]]; then
if [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git([0-9]+)(\.[a-f0-9]+)?$ ]]; then if [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git([0-9]+)(\.[a-f0-9]+)?$ ]]; then
@@ -739,18 +739,18 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
echo " Warning: Could not parse version format, appending ppa1: $CHANGELOG_VERSION -> $NEW_VERSION" echo " Warning: Could not parse version format, appending ppa1: $CHANGELOG_VERSION -> $NEW_VERSION"
fi fi
fi fi
if [[ -z "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR/debian" ]]; then if [[ -z "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR/debian" ]]; then
echo " Error: Source directory with debian/ not found for version increment" echo " Error: Source directory with debian/ not found for version increment"
exit 1 exit 1
fi fi
SOURCE_CHANGELOG="$SOURCE_DIR/debian/changelog" SOURCE_CHANGELOG="$SOURCE_DIR/debian/changelog"
if [[ ! -f "$SOURCE_CHANGELOG" ]]; then if [[ ! -f "$SOURCE_CHANGELOG" ]]; then
echo " Error: Changelog not found in source directory: $SOURCE_CHANGELOG" echo " Error: Changelog not found in source directory: $SOURCE_CHANGELOG"
exit 1 exit 1
fi fi
REPO_CHANGELOG="$REPO_ROOT/distro/debian/$PACKAGE/debian/changelog" REPO_CHANGELOG="$REPO_ROOT/distro/debian/$PACKAGE/debian/changelog"
TEMP_CHANGELOG=$(mktemp) TEMP_CHANGELOG=$(mktemp)
{ {
@@ -763,24 +763,24 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
if [[ -f "$REPO_CHANGELOG" ]]; then if [[ -f "$REPO_CHANGELOG" ]]; then
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1) OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
if [[ -n "$OLD_ENTRY_START" ]]; then if [[ -n "$OLD_ENTRY_START" ]]; then
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG" tail -n +"$OLD_ENTRY_START" "$REPO_CHANGELOG"
fi fi
fi fi
} > "$TEMP_CHANGELOG" } >"$TEMP_CHANGELOG"
cp "$TEMP_CHANGELOG" "$SOURCE_CHANGELOG" cp "$TEMP_CHANGELOG" "$SOURCE_CHANGELOG"
rm -f "$TEMP_CHANGELOG" rm -f "$TEMP_CHANGELOG"
CHANGELOG_VERSION="$NEW_VERSION" CHANGELOG_VERSION="$NEW_VERSION"
VERSION="$NEW_VERSION" VERSION="$NEW_VERSION"
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz" COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
for old_tarball in "${PACKAGE}"_*.tar.gz; do for old_tarball in "${PACKAGE}"_*.tar.gz; do
if [[ -f "$old_tarball" ]] && [[ "$old_tarball" != "${PACKAGE}_${NEW_VERSION}.tar.gz" ]]; then if [[ -f "$old_tarball" ]] && [[ "$old_tarball" != "${PACKAGE}_${NEW_VERSION}.tar.gz" ]]; then
echo " Removing old tarball from OBS: $old_tarball" echo " Removing old tarball from OBS: $old_tarball"
osc rm -f "$old_tarball" 2>/dev/null || rm -f "$old_tarball" osc rm -f "$old_tarball" 2>/dev/null || rm -f "$old_tarball"
fi fi
done done
if [[ "$PACKAGE" == "dms" ]] && [[ -f "$WORK_DIR/dms-source.tar.gz" ]]; then if [[ "$PACKAGE" == "dms" ]] && [[ -f "$WORK_DIR/dms-source.tar.gz" ]]; then
echo " Recreating dms-source.tar.gz with new directory name for incremented version" echo " Recreating dms-source.tar.gz with new directory name for incremented version"
EXPECTED_SOURCE_DIR="DankMaterialShell-${NEW_VERSION}" EXPECTED_SOURCE_DIR="DankMaterialShell-${NEW_VERSION}"
@@ -810,7 +810,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
cd "$REPO_ROOT" cd "$REPO_ROOT"
rm -rf "$TEMP_SOURCE_DIR" rm -rf "$TEMP_SOURCE_DIR"
fi fi
echo " Recreating tarball with new version: $COMBINED_TARBALL" echo " Recreating tarball with new version: $COMBINED_TARBALL"
if [[ -n "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR/debian" ]]; then if [[ -n "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR/debian" ]]; then
if [[ "$PACKAGE" == "dms" ]]; then if [[ "$PACKAGE" == "dms" ]]; then
@@ -848,10 +848,10 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
if [[ -f "$REPO_CHANGELOG" ]]; then if [[ -f "$REPO_CHANGELOG" ]]; then
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1) OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
if [[ -n "$OLD_ENTRY_START" ]]; then if [[ -n "$OLD_ENTRY_START" ]]; then
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG" tail -n +"$OLD_ENTRY_START" "$REPO_CHANGELOG"
fi fi
fi fi
} > "$TEMP_CHANGELOG" } >"$TEMP_CHANGELOG"
cp "$TEMP_CHANGELOG" "$EXPECTED_DIR/debian/changelog" cp "$TEMP_CHANGELOG" "$EXPECTED_DIR/debian/changelog"
rm -f "$TEMP_CHANGELOG" rm -f "$TEMP_CHANGELOG"
fi fi
@@ -867,19 +867,19 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
fi fi
fi fi
fi fi
rm -f "$WORK_DIR/$COMBINED_TARBALL" rm -f "$WORK_DIR/$COMBINED_TARBALL"
echo " Creating combined tarball: $COMBINED_TARBALL" echo " Creating combined tarball: $COMBINED_TARBALL"
cd "$(dirname "$SOURCE_DIR")" cd "$(dirname "$SOURCE_DIR")"
TARBALL_BASE=$(basename "$SOURCE_DIR") TARBALL_BASE=$(basename "$SOURCE_DIR")
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE" tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
cd "$REPO_ROOT" cd "$REPO_ROOT"
fi fi
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null) TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1) TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
# Extract Build-Depends from debian/control using awk for proper multi-line parsing # Extract Build-Depends from debian/control using awk for proper multi-line parsing
if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then
BUILD_DEPS=$(awk ' BUILD_DEPS=$(awk '
@@ -904,8 +904,8 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
else else
BUILD_DEPS="debhelper-compat (= 13)" BUILD_DEPS="debhelper-compat (= 13)"
fi fi
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
Format: 3.0 (native) Format: 3.0 (native)
Source: $PACKAGE Source: $PACKAGE
Binary: $PACKAGE Binary: $PACKAGE
@@ -954,11 +954,11 @@ ls -la 2>&1 | head -20
echo "==> Staging changes" echo "==> Staging changes"
echo "Files to upload:" echo "Files to upload:"
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$UPLOAD_OPENSUSE" == true ]]; then if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$UPLOAD_OPENSUSE" == true ]]; then
ls -lh *.tar.gz *.tar.xz *.tar *.spec *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' ls -lh ./*.tar.gz ./*.tar.xz ./*.tar ./*.spec ./*.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
elif [[ "$UPLOAD_DEBIAN" == true ]]; then elif [[ "$UPLOAD_DEBIAN" == true ]]; then
ls -lh *.tar.gz *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' ls -lh ./*.tar.gz ./*.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
ls -lh *.tar.gz *.tar.xz *.tar *.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' ls -lh ./*.tar.gz ./*.tar.xz ./*.tar ./*.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
fi fi
echo "" echo ""

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
# Generic source package builder for DMS PPA packages # Generic source package builder for DMS PPA packages
# Usage: ./create-source.sh <package-dir> [ubuntu-series] # Usage: ./create-source.sh <package-dir> [ubuntu-series]
# #
@@ -54,7 +54,7 @@ PACKAGE_PARENT=$(dirname "$PACKAGE_DIR")
# Create temporary working directory (like OBS) # Create temporary working directory (like OBS)
TEMP_WORK_DIR=$(mktemp -d -t ppa_build_work_XXXXXX) TEMP_WORK_DIR=$(mktemp -d -t ppa_build_work_XXXXXX)
trap "rm -rf '$TEMP_WORK_DIR'" EXIT trap 'rm -rf "$TEMP_WORK_DIR"' EXIT
info "Building source package for: $PACKAGE_NAME" info "Building source package for: $PACKAGE_NAME"
info "Package directory: $PACKAGE_DIR" info "Package directory: $PACKAGE_DIR"
@@ -79,7 +79,7 @@ done
# Verify GPG key is set up # Verify GPG key is set up
info "Checking GPG key setup..." info "Checking GPG key setup..."
if ! gpg --list-secret-keys &> /dev/null; then if ! gpg --list-secret-keys &>/dev/null; then
error "No GPG secret keys found. Please set up GPG first!" error "No GPG secret keys found. Please set up GPG first!"
error "See GPG_SETUP.md for instructions" error "See GPG_SETUP.md for instructions"
exit 1 exit 1
@@ -88,7 +88,7 @@ fi
success "GPG key found" success "GPG key found"
# Check if debuild is installed # Check if debuild is installed
if ! command -v debuild &> /dev/null; then if ! command -v debuild &>/dev/null; then
error "debuild not found. Install devscripts:" error "debuild not found. Install devscripts:"
error " sudo dnf install devscripts" error " sudo dnf install devscripts"
exit 1 exit 1
@@ -137,7 +137,7 @@ cd "$WORK_PACKAGE_DIR"
get_latest_tag() { get_latest_tag() {
local repo="$1" local repo="$1"
# Try GitHub API first (faster) # Try GitHub API first (faster)
if command -v curl &> /dev/null; then if command -v curl &>/dev/null; then
LATEST_TAG=$(curl -s "https://api.github.com/repos/$repo/releases/latest" 2>/dev/null | grep '"tag_name":' | sed 's/.*"tag_name": "\(.*\)".*/\1/' | head -1) LATEST_TAG=$(curl -s "https://api.github.com/repos/$repo/releases/latest" 2>/dev/null | grep '"tag_name":' | sed 's/.*"tag_name": "\(.*\)".*/\1/' | head -1)
if [ -n "$LATEST_TAG" ]; then if [ -n "$LATEST_TAG" ]; then
echo "$LATEST_TAG" | sed 's/^v//' echo "$LATEST_TAG" | sed 's/^v//'
@@ -175,69 +175,69 @@ fi
# Special handling for known packages # Special handling for known packages
case "$PACKAGE_NAME" in case "$PACKAGE_NAME" in
dms-git) dms-git)
IS_GIT_PACKAGE=true IS_GIT_PACKAGE=true
GIT_REPO="AvengeMedia/DankMaterialShell" GIT_REPO="AvengeMedia/DankMaterialShell"
SOURCE_DIR="dms-git-repo" SOURCE_DIR="dms-git-repo"
;; ;;
dms) dms)
GIT_REPO="AvengeMedia/DankMaterialShell" GIT_REPO="AvengeMedia/DankMaterialShell"
info "Downloading pre-built binaries and source for dms..." info "Downloading pre-built binaries and source for dms..."
# Get version from changelog (remove ppa suffix for both quilt and native formats) # Get version from changelog (remove ppa suffix for both quilt and native formats)
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2 # Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//') VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
# Download amd64 binary (will be included in source package) # Download amd64 binary (will be included in source package)
if [ ! -f "dms-distropkg-amd64.gz" ]; then if [ ! -f "dms-distropkg-amd64.gz" ]; then
info "Downloading dms binary for amd64..." info "Downloading dms binary for amd64..."
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then
success "amd64 binary downloaded" success "amd64 binary downloaded"
else else
error "Failed to download dms-distropkg-amd64.gz" error "Failed to download dms-distropkg-amd64.gz"
exit 1 exit 1
fi
fi fi
fi
# Download source tarball for QML files # Download source tarball for QML files
if [ ! -f "dms-source.tar.gz" ]; then if [ ! -f "dms-source.tar.gz" ]; then
info "Downloading dms source for QML files..." info "Downloading dms source for QML files..."
if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
success "source tarball downloaded" success "source tarball downloaded"
else else
error "Failed to download dms-source.tar.gz" error "Failed to download dms-source.tar.gz"
exit 1 exit 1
fi
fi fi
;; fi
dms-greeter) ;;
GIT_REPO="AvengeMedia/DankMaterialShell" dms-greeter)
info "Downloading source for dms-greeter..." GIT_REPO="AvengeMedia/DankMaterialShell"
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//') info "Downloading source for dms-greeter..."
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
if [ ! -f "dms-greeter-source.tar.gz" ]; then if [ ! -f "dms-greeter-source.tar.gz" ]; then
info "Downloading dms-greeter source..." info "Downloading dms-greeter source..."
if wget -O dms-greeter-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then if wget -O dms-greeter-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
success "source tarball downloaded" success "source tarball downloaded"
else else
error "Failed to download dms-greeter-source.tar.gz" error "Failed to download dms-greeter-source.tar.gz"
exit 1 exit 1
fi
fi fi
;; fi
danksearch) ;;
# danksearch uses pre-built binary from releases danksearch)
GIT_REPO="AvengeMedia/danksearch" # danksearch uses pre-built binary from releases
;; GIT_REPO="AvengeMedia/danksearch"
dgop) ;;
# dgop uses pre-built binary from releases dgop)
GIT_REPO="AvengeMedia/dgop" # dgop uses pre-built binary from releases
;; GIT_REPO="AvengeMedia/dgop"
;;
esac esac
# Handle git packages # Handle git packages
if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
info "Detected git package: $PACKAGE_NAME" info "Detected git package: $PACKAGE_NAME"
# Determine source directory name # Determine source directory name
if [ -z "$SOURCE_DIR" ]; then if [ -z "$SOURCE_DIR" ]; then
# Default: use package name without -git suffix + -source or -repo # Default: use package name without -git suffix + -source or -repo
@@ -252,7 +252,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
SOURCE_DIR="${BASE_NAME}-source" SOURCE_DIR="${BASE_NAME}-source"
fi fi
fi fi
# Always clone fresh source to get latest commit info # Always clone fresh source to get latest commit info
info "Cloning $GIT_REPO from GitHub (getting latest commit info)..." info "Cloning $GIT_REPO from GitHub (getting latest commit info)..."
TEMP_CLONE=$(mktemp -d) TEMP_CLONE=$(mktemp -d)
@@ -260,7 +260,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
# Get git commit info from fresh clone # Get git commit info from fresh clone
GIT_COMMIT_HASH=$(cd "$TEMP_CLONE" && git rev-parse --short HEAD) GIT_COMMIT_HASH=$(cd "$TEMP_CLONE" && git rev-parse --short HEAD)
GIT_COMMIT_COUNT=$(cd "$TEMP_CLONE" && git rev-list --count HEAD) GIT_COMMIT_COUNT=$(cd "$TEMP_CLONE" && git rev-list --count HEAD)
# Get upstream version from latest git tag (e.g., 0.2.1) # Get upstream version from latest git tag (e.g., 0.2.1)
# Sort all tags by version and get the latest one (not just the one reachable from HEAD) # Sort all tags by version and get the latest one (not just the one reachable from HEAD)
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l "v*" | sed 's/^v//' | sort -V | tail -1) UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l "v*" | sed 's/^v//' | sort -V | tail -1)
@@ -272,36 +272,36 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
# Last resort: use git describe # Last resort: use git describe
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.1") UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.1")
fi fi
# Verify we got valid commit info # Verify we got valid commit info
if [ -z "$GIT_COMMIT_COUNT" ] || [ "$GIT_COMMIT_COUNT" = "0" ]; then if [ -z "$GIT_COMMIT_COUNT" ] || [ "$GIT_COMMIT_COUNT" = "0" ]; then
error "Failed to get commit count from $GIT_REPO" error "Failed to get commit count from $GIT_REPO"
rm -rf "$TEMP_CLONE" rm -rf "$TEMP_CLONE"
exit 1 exit 1
fi fi
if [ -z "$GIT_COMMIT_HASH" ]; then if [ -z "$GIT_COMMIT_HASH" ]; then
error "Failed to get commit hash from $GIT_REPO" error "Failed to get commit hash from $GIT_REPO"
rm -rf "$TEMP_CLONE" rm -rf "$TEMP_CLONE"
exit 1 exit 1
fi fi
success "Got commit info: $GIT_COMMIT_COUNT ($GIT_COMMIT_HASH), upstream: $UPSTREAM_VERSION" success "Got commit info: $GIT_COMMIT_COUNT ($GIT_COMMIT_HASH), upstream: $UPSTREAM_VERSION"
# Update changelog with git commit info # Update changelog with git commit info
info "Updating changelog with git commit info..." info "Updating changelog with git commit info..."
# Format: 0.2.1+git705.fdbb86appa1 # Format: 0.2.1+git705.fdbb86appa1
# Check if we're rebuilding the same commit (increment PPA number if so) # Check if we're rebuilding the same commit (increment PPA number if so)
BASE_VERSION="${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" BASE_VERSION="${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "") CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
# Use REBUILD_RELEASE if provided, otherwise auto-increment # Use REBUILD_RELEASE if provided, otherwise auto-increment
if [[ -n "${REBUILD_RELEASE:-}" ]]; then if [[ -n "${REBUILD_RELEASE:-}" ]]; then
PPA_NUM=$REBUILD_RELEASE PPA_NUM=$REBUILD_RELEASE
info "Using REBUILD_RELEASE=$REBUILD_RELEASE for PPA number" info "Using REBUILD_RELEASE=$REBUILD_RELEASE for PPA number"
else else
PPA_NUM=1 PPA_NUM=1
# If current version matches the base version, increment PPA number # If current version matches the base version, increment PPA number
# Escape special regex characters in BASE_VERSION for pattern matching # Escape special regex characters in BASE_VERSION for pattern matching
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/+/\\+/g') ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/+/\\+/g')
@@ -318,36 +318,36 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
info "New commit or first build, using PPA number $PPA_NUM" info "New commit or first build, using PPA number $PPA_NUM"
fi fi
fi fi
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}" NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
# Use sed to update changelog (non-interactive, faster) # Use sed to update changelog (non-interactive, faster)
# Get current changelog content - find the next package header line (starts with package name) # Get current changelog content - find the next package header line (starts with package name)
# Skip the first entry entirely by finding the second occurrence of the package name at start of line # Skip the first entry entirely by finding the second occurrence of the package name at start of line
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1) OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
if [ -n "$OLD_ENTRY_START" ]; then if [ -n "$OLD_ENTRY_START" ]; then
# Found second entry, use everything from there # Found second entry, use everything from there
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog) CHANGELOG_CONTENT=$(tail -n +"$OLD_ENTRY_START" debian/changelog)
else else
# No second entry found, changelog will only have new entry # No second entry found, changelog will only have new entry
CHANGELOG_CONTENT="" CHANGELOG_CONTENT=""
fi fi
# Create new changelog entry with proper format # Create new changelog entry with proper format
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
* Git snapshot (commit ${GIT_COMMIT_COUNT}: ${GIT_COMMIT_HASH}) * Git snapshot (commit ${GIT_COMMIT_COUNT}: ${GIT_COMMIT_HASH})
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)" -- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
# Write new changelog (new entry, blank line, then old entries) # Write new changelog (new entry, blank line, then old entries)
echo "$CHANGELOG_ENTRY" > debian/changelog echo "$CHANGELOG_ENTRY" >debian/changelog
if [ -n "$CHANGELOG_CONTENT" ]; then if [ -n "$CHANGELOG_CONTENT" ]; then
echo "" >> debian/changelog echo "" >>debian/changelog
echo "$CHANGELOG_CONTENT" >> debian/changelog echo "$CHANGELOG_CONTENT" >>debian/changelog
fi fi
success "Version updated to $NEW_VERSION" success "Version updated to $NEW_VERSION"
# Now clone to source directory (without .git for inclusion in package) # Now clone to source directory (without .git for inclusion in package)
rm -rf "$SOURCE_DIR" rm -rf "$SOURCE_DIR"
cp -r "$TEMP_CLONE" "$SOURCE_DIR" cp -r "$TEMP_CLONE" "$SOURCE_DIR"
@@ -355,8 +355,8 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
# Save version info for dms-git build process # Save version info for dms-git build process
if [ "$PACKAGE_NAME" = "dms-git" ]; then if [ "$PACKAGE_NAME" = "dms-git" ]; then
info "Saving version info to .dms-version for build process..." info "Saving version info to .dms-version for build process..."
echo "VERSION=${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" > "$SOURCE_DIR/.dms-version" echo "VERSION=${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" >"$SOURCE_DIR/.dms-version"
echo "COMMIT=${GIT_COMMIT_HASH}" >> "$SOURCE_DIR/.dms-version" echo "COMMIT=${GIT_COMMIT_HASH}" >>"$SOURCE_DIR/.dms-version"
success "Version info saved: ${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" success "Version info saved: ${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
# Vendor Go dependencies (Launchpad has no internet access) # Vendor Go dependencies (Launchpad has no internet access)
@@ -397,7 +397,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
/^\[source\.crates-io\]/ { printing=1 } /^\[source\.crates-io\]/ { printing=1 }
printing { print } printing { print }
/^directory = "vendor"$/ { exit } /^directory = "vendor"$/ { exit }
' > .cargo/config.toml ' >.cargo/config.toml
# Verify vendor directory was created # Verify vendor directory was created
if [ ! -d "vendor" ]; then if [ ! -d "vendor" ]; then
@@ -428,7 +428,6 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
fi fi
fi fi
success "Source prepared for packaging" success "Source prepared for packaging"
else else
error "Failed to clone $GIT_REPO" error "Failed to clone $GIT_REPO"
@@ -439,15 +438,15 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
elif [ -n "$GIT_REPO" ]; then elif [ -n "$GIT_REPO" ]; then
info "Detected stable package: $PACKAGE_NAME" info "Detected stable package: $PACKAGE_NAME"
info "Fetching latest tag from $GIT_REPO..." info "Fetching latest tag from $GIT_REPO..."
LATEST_TAG=$(get_latest_tag "$GIT_REPO") LATEST_TAG=$(get_latest_tag "$GIT_REPO")
if [ -n "$LATEST_TAG" ]; then if [ -n "$LATEST_TAG" ]; then
# Check source format - native packages can't use dashes # Check source format - native packages can't use dashes
SOURCE_FORMAT=$(cat debian/source/format 2>/dev/null | head -1 || echo "3.0 (quilt)") SOURCE_FORMAT=$(head -1 debian/source/format 2>/dev/null || echo "3.0 (quilt)")
# Get current version to check if we need to increment PPA number # Get current version to check if we need to increment PPA number
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "") CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
# Use REBUILD_RELEASE if provided, otherwise auto-increment # Use REBUILD_RELEASE if provided, otherwise auto-increment
if [[ -n "${REBUILD_RELEASE:-}" ]]; then if [[ -n "${REBUILD_RELEASE:-}" ]]; then
PPA_NUM=$REBUILD_RELEASE PPA_NUM=$REBUILD_RELEASE
@@ -498,11 +497,11 @@ elif [ -n "$GIT_REPO" ]; then
# Get current changelog content - find the next package header line # Get current changelog content - find the next package header line
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1) OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
if [ -n "$OLD_ENTRY_START" ]; then if [ -n "$OLD_ENTRY_START" ]; then
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog) CHANGELOG_CONTENT=$(tail -n +"$OLD_ENTRY_START" debian/changelog)
else else
CHANGELOG_CONTENT="" CHANGELOG_CONTENT=""
fi fi
# Create appropriate changelog message # Create appropriate changelog message
if [ "$PPA_NUM" -gt 1 ]; then if [ "$PPA_NUM" -gt 1 ]; then
CHANGELOG_MSG="Rebuild for packaging fixes (ppa${PPA_NUM})" CHANGELOG_MSG="Rebuild for packaging fixes (ppa${PPA_NUM})"
@@ -515,10 +514,10 @@ elif [ -n "$GIT_REPO" ]; then
* ${CHANGELOG_MSG} * ${CHANGELOG_MSG}
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)" -- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
echo "$CHANGELOG_ENTRY" > debian/changelog echo "$CHANGELOG_ENTRY" >debian/changelog
if [ -n "$CHANGELOG_CONTENT" ]; then if [ -n "$CHANGELOG_CONTENT" ]; then
echo "" >> debian/changelog echo "" >>debian/changelog
echo "$CHANGELOG_CONTENT" >> debian/changelog echo "$CHANGELOG_CONTENT" >>debian/changelog
fi fi
success "Version updated to $NEW_VERSION" success "Version updated to $NEW_VERSION"
else else
@@ -532,47 +531,47 @@ fi
# Handle packages that need pre-built binaries downloaded # Handle packages that need pre-built binaries downloaded
cd "$PACKAGE_DIR" cd "$PACKAGE_DIR"
case "$PACKAGE_NAME" in case "$PACKAGE_NAME" in
danksearch) danksearch)
info "Downloading pre-built binaries for danksearch..." info "Downloading pre-built binaries for danksearch..."
# Get version from changelog (remove ppa suffix for both quilt and native formats) # Get version from changelog (remove ppa suffix for both quilt and native formats)
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2 # Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//') VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
# Download both amd64 and arm64 binaries (will be included in source package) # Download both amd64 and arm64 binaries (will be included in source package)
# Launchpad can't download during build, so we include both architectures # Launchpad can't download during build, so we include both architectures
if [ ! -f "dsearch-amd64" ]; then if [ ! -f "dsearch-amd64" ]; then
info "Downloading dsearch binary for amd64..." info "Downloading dsearch binary for amd64..."
if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then
gunzip dsearch-amd64.gz gunzip dsearch-amd64.gz
chmod +x dsearch-amd64 chmod +x dsearch-amd64
success "amd64 binary downloaded" success "amd64 binary downloaded"
else else
error "Failed to download dsearch-amd64.gz" error "Failed to download dsearch-amd64.gz"
exit 1 exit 1
fi
fi fi
fi
if [ ! -f "dsearch-arm64" ]; then if [ ! -f "dsearch-arm64" ]; then
info "Downloading dsearch binary for arm64..." info "Downloading dsearch binary for arm64..."
if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then
gunzip dsearch-arm64.gz gunzip dsearch-arm64.gz
chmod +x dsearch-arm64 chmod +x dsearch-arm64
success "arm64 binary downloaded" success "arm64 binary downloaded"
else else
error "Failed to download dsearch-arm64.gz" error "Failed to download dsearch-arm64.gz"
exit 1 exit 1
fi
fi fi
;; fi
dgop) ;;
# dgop binary should already be committed in the repo dgop)
if [ ! -f "dgop" ]; then # dgop binary should already be committed in the repo
warn "dgop binary not found - should be committed to repo" if [ ! -f "dgop" ]; then
fi warn "dgop binary not found - should be committed to repo"
;; fi
;;
esac esac
cd - > /dev/null cd - >/dev/null
# Check if this version already exists on PPA (only in CI environment) # Check if this version already exists on PPA (only in CI environment)
if command -v rmadison >/dev/null 2>&1; then if command -v rmadison >/dev/null 2>&1; then
@@ -586,10 +585,10 @@ if command -v rmadison >/dev/null 2>&1; then
cd "$PACKAGE_DIR" cd "$PACKAGE_DIR"
# Still clean up extracted sources # Still clean up extracted sources
case "$PACKAGE_NAME" in case "$PACKAGE_NAME" in
dms-git) dms-git)
rm -rf DankMaterialShell-* rm -rf DankMaterialShell-*
success "Cleaned up DankMaterialShell-*/ directory" success "Cleaned up DankMaterialShell-*/ directory"
;; ;;
esac esac
exit 0 exit 0
fi fi
@@ -621,11 +620,11 @@ if yes | DEBIAN_FRONTEND=noninteractive debuild -S $DEBUILD_SOURCE_FLAG -d; then
# Copy build artifacts back to parent directory # Copy build artifacts back to parent directory
info "Copying build artifacts to $PACKAGE_PARENT..." info "Copying build artifacts to $PACKAGE_PARENT..."
cp -v "$TEMP_WORK_DIR"/${SOURCE_NAME}_${CHANGELOG_VERSION}* "$PACKAGE_PARENT/" 2>/dev/null || true cp -v "$TEMP_WORK_DIR"/"${SOURCE_NAME}"_"${CHANGELOG_VERSION}"* "$PACKAGE_PARENT/" 2>/dev/null || true
# List generated files # List generated files
info "Generated files in $PACKAGE_PARENT:" info "Generated files in $PACKAGE_PARENT:"
ls -lh "$PACKAGE_PARENT"/${SOURCE_NAME}_${CHANGELOG_VERSION}* 2>/dev/null || true ls -lh "$PACKAGE_PARENT"/"${SOURCE_NAME}"_"${CHANGELOG_VERSION}"* 2>/dev/null || true
# Show what to do next # Show what to do next
echo echo

View File

@@ -12,7 +12,7 @@ RED='\033[0;31m'
GREEN='\033[0;32m' GREEN='\033[0;32m'
YELLOW='\033[1;33m' YELLOW='\033[1;33m'
BLUE='\033[0;34m' BLUE='\033[0;34m'
NC='\033[0m' NC='\033[0m'
info() { echo -e "${BLUE}[INFO]${NC} $1"; } info() { echo -e "${BLUE}[INFO]${NC} $1"; }
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; } success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
@@ -58,23 +58,18 @@ CHANGES_FILE=$(realpath "$CHANGES_FILE")
info "Uploading to PPA: ppa:avengemedia/$PPA_NAME" info "Uploading to PPA: ppa:avengemedia/$PPA_NAME"
info "Changes file: $CHANGES_FILE" info "Changes file: $CHANGES_FILE"
# Check if dput or lftp is installed # Check if dput is installed
UPLOAD_METHOD="" if command -v dput &>/dev/null; then
if command -v dput &> /dev/null; then info "dput found"
UPLOAD_METHOD="dput"
elif command -v lftp &> /dev/null; then
UPLOAD_METHOD="lftp"
warn "dput not found, using lftp as fallback"
else else
error "Neither dput nor lftp found. Install one with:" error "dput not found. Install with:"
error " sudo dnf install dput-ng # Preferred but broken on Fedora" error " sudo dnf install dput-ng"
error " sudo dnf install lftp # Alternative upload method"
exit 1 exit 1
fi fi
# Check if ~/.dput.cf exists # Check if ~/.dput.cf exists
if [ ! -f "$HOME/.dput.cf" ]; then if [ ! -f "$HOME/.dput.cf" ]; then
error "~/.dput.cf not found!" error "$HOME/.dput.cf not found!"
echo echo
info "Create it from template:" info "Create it from template:"
echo " cp $(dirname "$0")/../dput.cf.template ~/.dput.cf" echo " cp $(dirname "$0")/../dput.cf.template ~/.dput.cf"
@@ -160,7 +155,7 @@ elif [ "$UPLOAD_METHOD" = "lftp" ]; then
# Use lftp to upload to Launchpad PPA # Use lftp to upload to Launchpad PPA
CHANGES_DIR=$(dirname "$CHANGES_FILE") CHANGES_DIR=$(dirname "$CHANGES_FILE")
CHANGES_BASENAME=$(basename "$CHANGES_FILE") CHANGES_BASENAME=$(basename "$CHANGES_FILE")
# Extract files to upload from .changes file # Extract files to upload from .changes file
FILES_TO_UPLOAD=("$CHANGES_BASENAME") FILES_TO_UPLOAD=("$CHANGES_BASENAME")
while IFS= read -r line; do while IFS= read -r line; do
@@ -168,14 +163,14 @@ elif [ "$UPLOAD_METHOD" = "lftp" ]; then
FILES_TO_UPLOAD+=("${BASH_REMATCH[1]}") FILES_TO_UPLOAD+=("${BASH_REMATCH[1]}")
fi fi
done < "$CHANGES_FILE" done < "$CHANGES_FILE"
# Build lftp command to upload all files # Build lftp command to upload all files
LFTP_COMMANDS="set ftp:ssl-allow no; open ftp://ppa.launchpad.net; user anonymous ''; cd ~avengemedia/ubuntu/$PPA_NAME/;" LFTP_COMMANDS="set ftp:ssl-allow no; open ftp://ppa.launchpad.net; user anonymous ''; cd ~avengemedia/ubuntu/$PPA_NAME/;"
for file in "${FILES_TO_UPLOAD[@]}"; do for file in "${FILES_TO_UPLOAD[@]}"; do
LFTP_COMMANDS="$LFTP_COMMANDS put '$CHANGES_DIR/$file';" LFTP_COMMANDS="$LFTP_COMMANDS put '$CHANGES_DIR/$file';"
done done
LFTP_COMMANDS="$LFTP_COMMANDS bye" LFTP_COMMANDS="$LFTP_COMMANDS bye"
if echo "$LFTP_COMMANDS" | lftp; then if echo "$LFTP_COMMANDS" | lftp; then
UPLOAD_SUCCESS=true UPLOAD_SUCCESS=true
fi fi

View File

@@ -12,7 +12,7 @@ RED='\033[0;31m'
GREEN='\033[0;32m' GREEN='\033[0;32m'
YELLOW='\033[1;33m' YELLOW='\033[1;33m'
BLUE='\033[0;34m' BLUE='\033[0;34m'
NC='\033[0m' NC='\033[0m'
info() { echo -e "${BLUE}[INFO]${NC} $1"; } info() { echo -e "${BLUE}[INFO]${NC} $1"; }
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; } success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
@@ -98,24 +98,20 @@ info "Step 2: Uploading to PPA..."
# Check if using lftp (for all PPAs) or dput # Check if using lftp (for all PPAs) or dput
if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "dms-git" ]; then if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "dms-git" ]; then
warn "Using lftp for upload" warn "Using lftp for upload"
# Extract version from changes file
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
SOURCE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
# Find all files to upload # Find all files to upload
BUILD_DIR=$(dirname "$CHANGES_FILE") BUILD_DIR=$(dirname "$CHANGES_FILE")
CHANGES_BASENAME=$(basename "$CHANGES_FILE") CHANGES_BASENAME=$(basename "$CHANGES_FILE")
DSC_FILE="${CHANGES_BASENAME/_source.changes/.dsc}" DSC_FILE="${CHANGES_BASENAME/_source.changes/.dsc}"
TARBALL="${CHANGES_BASENAME/_source.changes/.tar.xz}" TARBALL="${CHANGES_BASENAME/_source.changes/.tar.xz}"
BUILDINFO="${CHANGES_BASENAME/_source.changes/_source.buildinfo}" BUILDINFO="${CHANGES_BASENAME/_source.changes/_source.buildinfo}"
# Check all files exist # Check all files exist
MISSING_FILES=() MISSING_FILES=()
[ ! -f "$BUILD_DIR/$DSC_FILE" ] && MISSING_FILES+=("$DSC_FILE") [ ! -f "$BUILD_DIR/$DSC_FILE" ] && MISSING_FILES+=("$DSC_FILE")
[ ! -f "$BUILD_DIR/$TARBALL" ] && MISSING_FILES+=("$TARBALL") [ ! -f "$BUILD_DIR/$TARBALL" ] && MISSING_FILES+=("$TARBALL")
[ ! -f "$BUILD_DIR/$BUILDINFO" ] && MISSING_FILES+=("$BUILDINFO") [ ! -f "$BUILD_DIR/$BUILDINFO" ] && MISSING_FILES+=("$BUILDINFO")
if [ ${#MISSING_FILES[@]} -gt 0 ]; then if [ ${#MISSING_FILES[@]} -gt 0 ]; then
error "Missing required files:" error "Missing required files:"
for file in "${MISSING_FILES[@]}"; do for file in "${MISSING_FILES[@]}"; do
@@ -123,17 +119,17 @@ if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "
done done
exit 1 exit 1
fi fi
info "Uploading files:" info "Uploading files:"
info " - $CHANGES_BASENAME" info " - $CHANGES_BASENAME"
info " - $DSC_FILE" info " - $DSC_FILE"
info " - $TARBALL" info " - $TARBALL"
info " - $BUILDINFO" info " - $BUILDINFO"
echo echo
# lftp build dir change # lftp build dir change
LFTP_SCRIPT=$(mktemp) LFTP_SCRIPT=$(mktemp)
cat > "$LFTP_SCRIPT" <<EOF cat >"$LFTP_SCRIPT" <<EOF
cd ~avengemedia/ubuntu/$PPA_NAME/ cd ~avengemedia/ubuntu/$PPA_NAME/
lcd $BUILD_DIR lcd $BUILD_DIR
mput $CHANGES_BASENAME mput $CHANGES_BASENAME
@@ -142,8 +138,8 @@ mput $TARBALL
mput $BUILDINFO mput $BUILDINFO
bye bye
EOF EOF
if lftp -d ftp://anonymous:@ppa.launchpad.net < "$LFTP_SCRIPT"; then if lftp -d ftp://anonymous:@ppa.launchpad.net <"$LFTP_SCRIPT"; then
success "Upload successful!" success "Upload successful!"
rm -f "$LFTP_SCRIPT" rm -f "$LFTP_SCRIPT"
else else
@@ -197,41 +193,41 @@ if [ "$KEEP_BUILDS" = "false" ]; then
# Clean up downloaded binaries in package directory # Clean up downloaded binaries in package directory
case "$PACKAGE_NAME" in case "$PACKAGE_NAME" in
danksearch) danksearch)
if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then
rm -f "$PACKAGE_DIR/dsearch-amd64" rm -f "$PACKAGE_DIR/dsearch-amd64"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then
rm -f "$PACKAGE_DIR/dsearch-arm64" rm -f "$PACKAGE_DIR/dsearch-arm64"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
;; ;;
dms) dms)
# Remove downloaded binaries and source # Remove downloaded binaries and source
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then
rm -f "$PACKAGE_DIR/dms-source.tar.gz" rm -f "$PACKAGE_DIR/dms-source.tar.gz"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
;; ;;
dms-git) dms-git)
# Remove git source directory binary # Remove git source directory binary
if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then
rm -rf "$PACKAGE_DIR/dms-git-repo" rm -rf "$PACKAGE_DIR/dms-git-repo"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
;; ;;
dms-greeter) dms-greeter)
# Remove downloaded source # Remove downloaded source
if [ -f "$PACKAGE_DIR/dms-greeter-source.tar.gz" ]; then if [ -f "$PACKAGE_DIR/dms-greeter-source.tar.gz" ]; then
rm -f "$PACKAGE_DIR/dms-greeter-source.tar.gz" rm -f "$PACKAGE_DIR/dms-greeter-source.tar.gz"
REMOVED=$((REMOVED + 1)) REMOVED=$((REMOVED + 1))
fi fi
;; ;;
esac esac
if [ $REMOVED -gt 0 ]; then if [ $REMOVED -gt 0 ]; then
@@ -246,4 +242,3 @@ fi
echo echo
success "Done!" success "Done!"

View File

@@ -7,21 +7,21 @@ case "$1" in
if ! getent group greeter >/dev/null; then if ! getent group greeter >/dev/null; then
addgroup --system greeter addgroup --system greeter
fi fi
if ! getent passwd greeter >/dev/null; then if ! getent passwd greeter >/dev/null; then
adduser --system --ingroup greeter --home /var/lib/greeter \ adduser --system --ingroup greeter --home /var/lib/greeter \
--shell /bin/bash --gecos "System Greeter" greeter --shell /bin/bash --gecos "System Greeter" greeter
fi fi
if [ -d /var/cache/dms-greeter ]; then if [ -d /var/cache/dms-greeter ]; then
chown -R greeter:greeter /var/cache/dms-greeter 2>/dev/null || true chown -R greeter:greeter /var/cache/dms-greeter 2>/dev/null || true
fi fi
if [ -d /var/lib/greeter ]; then if [ -d /var/lib/greeter ]; then
chown -R greeter:greeter /var/lib/greeter 2>/dev/null || true chown -R greeter:greeter /var/lib/greeter 2>/dev/null || true
fi fi
# Check and set graphical.target as default # Check and set graphical.target as default
CURRENT_TARGET=$(systemctl get-default 2>/dev/null || echo "unknown") CURRENT_TARGET=$(systemctl get-default 2>/dev/null || echo "unknown")
if [ "$CURRENT_TARGET" != "graphical.target" ]; then if [ "$CURRENT_TARGET" != "graphical.target" ]; then
systemctl set-default graphical.target >/dev/null 2>&1 || true systemctl set-default graphical.target >/dev/null 2>&1 || true
@@ -29,10 +29,10 @@ case "$1" in
else else
TARGET_STATUS="Already graphical.target ✓" TARGET_STATUS="Already graphical.target ✓"
fi fi
GREETD_CONFIG="/etc/greetd/config.toml" GREETD_CONFIG="/etc/greetd/config.toml"
CONFIG_STATUS="Not modified (already configured)" CONFIG_STATUS="Not modified (already configured)"
# Check if niri or hyprland exists # Check if niri or hyprland exists
COMPOSITOR="niri" COMPOSITOR="niri"
if ! command -v niri >/dev/null 2>&1; then if ! command -v niri >/dev/null 2>&1; then
@@ -40,7 +40,7 @@ case "$1" in
COMPOSITOR="hyprland" COMPOSITOR="hyprland"
fi fi
fi fi
# If config doesn't exist, create a default one # If config doesn't exist, create a default one
if [ ! -f "$GREETD_CONFIG" ]; then if [ ! -f "$GREETD_CONFIG" ]; then
mkdir -p /etc/greetd mkdir -p /etc/greetd
@@ -58,13 +58,13 @@ GREETD_EOF
# Backup existing config # Backup existing config
BACKUP_FILE="${GREETD_CONFIG}.backup-$(date +%Y%m%d-%H%M%S)" BACKUP_FILE="${GREETD_CONFIG}.backup-$(date +%Y%m%d-%H%M%S)"
cp "$GREETD_CONFIG" "$BACKUP_FILE" 2>/dev/null || true cp "$GREETD_CONFIG" "$BACKUP_FILE" 2>/dev/null || true
# Update command in default_session section # Update command in default_session section
sed -i "/^\[default_session\]/,/^\[/ s|^command =.*|command = \"/usr/bin/dms-greeter --command $COMPOSITOR\"|" "$GREETD_CONFIG" sed -i "/^\[default_session\]/,/^\[/ s|^command =.*|command = \"/usr/bin/dms-greeter --command $COMPOSITOR\"|" "$GREETD_CONFIG"
sed -i '/^\[default_session\]/,/^\[/ s|^user =.*|user = "greeter"|' "$GREETD_CONFIG" sed -i '/^\[default_session\]/,/^\[/ s|^user =.*|user = "greeter"|' "$GREETD_CONFIG"
CONFIG_STATUS="Updated existing config (backed up) with $COMPOSITOR ✓" CONFIG_STATUS="Updated existing config (backed up) with $COMPOSITOR ✓"
fi fi
# Only show banner on initial install # Only show banner on initial install
if [ -z "$2" ]; then if [ -z "$2" ]; then
cat << 'EOF' cat << 'EOF'

View File

@@ -14,7 +14,7 @@ BASE_VERSION := $(shell echo $(UPSTREAM_VERSION) | sed 's/ppa[0-9]*$$//' | sed '
override_dh_auto_build: override_dh_auto_build:
# All files are included in source package # All files are included in source package
test -f dms-greeter-source.tar.gz || (echo "ERROR: dms-greeter-source.tar.gz not found!" && exit 1) test -f dms-greeter-source.tar.gz || (echo "ERROR: dms-greeter-source.tar.gz not found!" && exit 1)
# Extract source tarball # Extract source tarball
tar -xzf dms-greeter-source.tar.gz tar -xzf dms-greeter-source.tar.gz
# Find the extracted directory # Find the extracted directory
@@ -27,22 +27,22 @@ override_dh_auto_install:
# Install greeter files to shared data location # Install greeter files to shared data location
mkdir -p debian/dms-greeter/usr/share/quickshell/dms-greeter mkdir -p debian/dms-greeter/usr/share/quickshell/dms-greeter
cp -r DankMaterialShell-$(BASE_VERSION)/quickshell/* debian/dms-greeter/usr/share/quickshell/dms-greeter/ cp -r DankMaterialShell-$(BASE_VERSION)/quickshell/* debian/dms-greeter/usr/share/quickshell/dms-greeter/
# Install launcher script # Install launcher script
install -Dm755 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/assets/dms-greeter \ install -Dm755 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/assets/dms-greeter \
debian/dms-greeter/usr/bin/dms-greeter debian/dms-greeter/usr/bin/dms-greeter
# Install documentation # Install documentation
install -Dm644 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/README.md \ install -Dm644 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/README.md \
debian/dms-greeter/usr/share/doc/dms-greeter/README.md debian/dms-greeter/usr/share/doc/dms-greeter/README.md
# Install LICENSE file # Install LICENSE file
install -Dm644 DankMaterialShell-$(BASE_VERSION)/LICENSE \ install -Dm644 DankMaterialShell-$(BASE_VERSION)/LICENSE \
debian/dms-greeter/usr/share/doc/dms-greeter/LICENSE debian/dms-greeter/usr/share/doc/dms-greeter/LICENSE
# Create cache directory structure (will be created by postinst) # Create cache directory structure (will be created by postinst)
mkdir -p debian/dms-greeter/var/cache/dms-greeter mkdir -p debian/dms-greeter/var/cache/dms-greeter
# Remove build and development files # Remove build and development files
rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/core rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/core
rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/distro rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/distro

View File

@@ -77,7 +77,7 @@ There are example themes you can start from:
- [Cyberpunk Electric](theme_cyberpunk_electric.json) - Neon green and magenta cyberpunk aesthetic - [Cyberpunk Electric](theme_cyberpunk_electric.json) - Neon green and magenta cyberpunk aesthetic
- [Hotline Miami](theme_hotline_miami.json) - Retro 80s inspired hot pink and blue - [Hotline Miami](theme_hotline_miami.json) - Retro 80s inspired hot pink and blue
- [Miami Vice](theme_miami_vice.json) - Classic teal and pink vice aesthetic - [Miami Vice](theme_miami_vice.json) - Classic teal and pink vice aesthetic
- [Synthwave Electric](theme_synthwave_electric.json) - Electric purple and cyan synthwave vibes - [Synthwave Electric](theme_synthwave_electric.json) - Electric purple and cyan synthwave vibes
### Color Definitions ### Color Definitions
@@ -87,7 +87,7 @@ There are example themes you can start from:
- `primaryText` - Text color that contrasts well with primary background - `primaryText` - Text color that contrasts well with primary background
- `primaryContainer` - Darker/lighter variant of primary for containers - `primaryContainer` - Darker/lighter variant of primary for containers
**Secondary Colors** **Secondary Colors**
- `secondary` - Supporting accent color for variety and hierarchy - `secondary` - Supporting accent color for variety and hierarchy
- `surfaceTint` - Tint color applied to surfaces, usually derived from primary - `surfaceTint` - Tint color applied to surfaces, usually derived from primary
@@ -115,7 +115,7 @@ While the core colors above are required, you can also customize these optional
```json ```json
{ {
"error": "#f44336", "error": "#f44336",
"warning": "#ff9800", "warning": "#ff9800",
"info": "#2196f3" "info": "#2196f3"
} }
``` ```
@@ -158,4 +158,4 @@ You can also edit `~/.config/DankMaterialShell/settings.json` manually
### Reactivity ### Reactivity
Editing the custom theme file will auto-update the shell if it's the current theme. Editing the custom theme file will auto-update the shell if it's the current theme.

View File

@@ -22,7 +22,7 @@ Audio system control and information.
- Returns: Confirmation message - Returns: Confirmation message
**`decrement <step>`** **`decrement <step>`**
- Decrease output volume by step amount - Decrease output volume by step amount
- Parameters: `step` - Volume decrease amount (default: 5) - Parameters: `step` - Volume decrease amount (default: 5)
- Returns: Confirmation message - Returns: Confirmation message
@@ -72,7 +72,7 @@ Display brightness control for internal and external displays.
**`decrement <step> [device]`** **`decrement <step> [device]`**
- Decrease brightness by step amount - Decrease brightness by step amount
- Parameters: - Parameters:
- `step` - Brightness decrease amount - `step` - Brightness decrease amount
- `device` - Optional device name (empty string for default) - `device` - Optional device name (empty string for default)
- Returns: Confirmation with new brightness level - Returns: Confirmation with new brightness level
@@ -105,7 +105,7 @@ Night mode (gamma/color temperature) control.
- Enable night mode - Enable night mode
- Returns: Confirmation message - Returns: Confirmation message
**`disable`** **`disable`**
- Disable night mode - Disable night mode
- Returns: Confirmation message - Returns: Confirmation message
@@ -163,7 +163,7 @@ Media player control via MPRIS interface.
- Returns: Nothing - Returns: Nothing
**`pause`** **`pause`**
- Pause playback on active player - Pause playback on active player
- Returns: Nothing - Returns: Nothing
**`playPause`** **`playPause`**
@@ -272,7 +272,7 @@ Wallpaper management and retrieval with support for per-monitor configurations.
**`setFor <screenName> <path>`** **`setFor <screenName> <path>`**
- Set wallpaper for specific monitor (automatically enables per-monitor mode) - Set wallpaper for specific monitor (automatically enables per-monitor mode)
- Parameters: - Parameters:
- `screenName` - Monitor name (e.g., "DP-2", "eDP-1") - `screenName` - Monitor name (e.g., "DP-2", "eDP-1")
- `path` - Absolute or relative path to image file - `path` - Absolute or relative path to image file
- Returns: Success confirmation with monitor and path info - Returns: Success confirmation with monitor and path info
@@ -365,7 +365,7 @@ Theme mode control (light/dark mode switching).
- Returns: "light" - Returns: "light"
**`dark`** **`dark`**
- Switch to dark theme mode - Switch to dark theme mode
- Returns: "dark" - Returns: "dark"
**`getMode`** **`getMode`**
@@ -445,7 +445,7 @@ Clipboard history modal control.
**Functions:** **Functions:**
- `open` - Show clipboard history - `open` - Show clipboard history
- `close` - Hide clipboard history - `close` - Hide clipboard history
- `toggle` - Toggle clipboard history visibility - `toggle` - Toggle clipboard history visibility
### Target: `notifications` ### Target: `notifications`
@@ -685,4 +685,4 @@ Most IPC functions return string messages indicating:
- Status information for query functions - Status information for query functions
- Empty/void return for simple action functions - Empty/void return for simple action functions
Functions that return void (like media controls) execute the action but don't provide feedback. Check the application state through other means if needed. Functions that return void (like media controls) execute the action but don't provide feedback. Check the application state through other means if needed.

View File

@@ -39,4 +39,4 @@
"warning": "#99CC00", "warning": "#99CC00",
"info": "#00B899" "info": "#00B899"
} }
} }

View File

@@ -39,4 +39,4 @@
"warning": "#B3B300", "warning": "#B3B300",
"info": "#00B359" "info": "#00B359"
} }
} }

View File

@@ -39,4 +39,4 @@
"warning": "#CC9900", "warning": "#CC9900",
"info": "#0099CC" "info": "#0099CC"
} }
} }

View File

@@ -39,4 +39,4 @@
"warning": "#CC9900", "warning": "#CC9900",
"info": "#0066CC" "info": "#0066CC"
} }
} }

View File

@@ -171,11 +171,13 @@
delve delve
go-tools go-tools
gnumake gnumake
prek
] ]
++ devQmlPkgs; ++ devQmlPkgs;
shellHook = '' shellHook = ''
touch quickshell/.qmlls.ini 2>/dev/null touch quickshell/.qmlls.ini 2>/dev/null
if [ ! -f .git/hooks/pre-commit ]; then prek install; fi
''; '';
QML2_IMPORT_PATH = mkQmlImportPath pkgs devQmlPkgs; QML2_IMPORT_PATH = mkQmlImportPath pkgs devQmlPkgs;

View File

@@ -1 +1 @@
Spicy Miso Spicy Miso

View File

@@ -542,4 +542,4 @@ function getCatppuccinVariantNames() {
function getThemeCategories() { function getThemeCategories() {
return ThemeCategories return ThemeCategories
} }

View File

@@ -1304,4 +1304,4 @@ function byLengthAsc(a, b, selector) {
} }
function byStartAsc(a, b) { function byStartAsc(a, b) {
return a.start - b.start; return a.start - b.start;
} }

View File

@@ -3,13 +3,13 @@
// May not be necessary if that's possible tbh. // May not be necessary if that's possible tbh.
function markdownToHtml(text) { function markdownToHtml(text) {
if (!text) return ""; if (!text) return "";
// Store code blocks and inline code to protect them from further processing // Store code blocks and inline code to protect them from further processing
const codeBlocks = []; const codeBlocks = [];
const inlineCode = []; const inlineCode = [];
let blockIndex = 0; let blockIndex = 0;
let inlineIndex = 0; let inlineIndex = 0;
// First, extract and replace code blocks with placeholders // First, extract and replace code blocks with placeholders
let html = text.replace(/```([\s\S]*?)```/g, (match, code) => { let html = text.replace(/```([\s\S]*?)```/g, (match, code) => {
// Trim leading and trailing blank lines only // Trim leading and trailing blank lines only
@@ -21,7 +21,7 @@ function markdownToHtml(text) {
codeBlocks.push(`<pre><code>${escapedCode}</code></pre>`); codeBlocks.push(`<pre><code>${escapedCode}</code></pre>`);
return `\x00CODEBLOCK${blockIndex++}\x00`; return `\x00CODEBLOCK${blockIndex++}\x00`;
}); });
// Extract and replace inline code // Extract and replace inline code
html = html.replace(/`([^`]+)`/g, (match, code) => { html = html.replace(/`([^`]+)`/g, (match, code) => {
// Escape HTML entities in code // Escape HTML entities in code
@@ -31,18 +31,18 @@ function markdownToHtml(text) {
inlineCode.push(`<code>${escapedCode}</code>`); inlineCode.push(`<code>${escapedCode}</code>`);
return `\x00INLINECODE${inlineIndex++}\x00`; return `\x00INLINECODE${inlineIndex++}\x00`;
}); });
// Now process everything else // Now process everything else
// Escape HTML entities (but not in code blocks) // Escape HTML entities (but not in code blocks)
html = html.replace(/&/g, '&amp;') html = html.replace(/&/g, '&amp;')
.replace(/</g, '&lt;') .replace(/</g, '&lt;')
.replace(/>/g, '&gt;'); .replace(/>/g, '&gt;');
// Headers // Headers
html = html.replace(/^### (.*?)$/gm, '<h3>$1</h3>'); html = html.replace(/^### (.*?)$/gm, '<h3>$1</h3>');
html = html.replace(/^## (.*?)$/gm, '<h2>$1</h2>'); html = html.replace(/^## (.*?)$/gm, '<h2>$1</h2>');
html = html.replace(/^# (.*?)$/gm, '<h1>$1</h1>'); html = html.replace(/^# (.*?)$/gm, '<h1>$1</h1>');
// Bold and italic (order matters!) // Bold and italic (order matters!)
html = html.replace(/\*\*\*(.*?)\*\*\*/g, '<b><i>$1</i></b>'); html = html.replace(/\*\*\*(.*?)\*\*\*/g, '<b><i>$1</i></b>');
html = html.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>'); html = html.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>');
@@ -50,15 +50,15 @@ function markdownToHtml(text) {
html = html.replace(/___(.*?)___/g, '<b><i>$1</i></b>'); html = html.replace(/___(.*?)___/g, '<b><i>$1</i></b>');
html = html.replace(/__(.*?)__/g, '<b>$1</b>'); html = html.replace(/__(.*?)__/g, '<b>$1</b>');
html = html.replace(/_(.*?)_/g, '<i>$1</i>'); html = html.replace(/_(.*?)_/g, '<i>$1</i>');
// Links // Links
html = html.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>'); html = html.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>');
// Lists // Lists
html = html.replace(/^\* (.*?)$/gm, '<li>$1</li>'); html = html.replace(/^\* (.*?)$/gm, '<li>$1</li>');
html = html.replace(/^- (.*?)$/gm, '<li>$1</li>'); html = html.replace(/^- (.*?)$/gm, '<li>$1</li>');
html = html.replace(/^\d+\. (.*?)$/gm, '<li>$1</li>'); html = html.replace(/^\d+\. (.*?)$/gm, '<li>$1</li>');
// Wrap consecutive list items in ul/ol tags // Wrap consecutive list items in ul/ol tags
html = html.replace(/(<li>[\s\S]*?<\/li>\s*)+/g, function(match) { html = html.replace(/(<li>[\s\S]*?<\/li>\s*)+/g, function(match) {
return '<ul>' + match + '</ul>'; return '<ul>' + match + '</ul>';
@@ -71,36 +71,36 @@ function markdownToHtml(text) {
html = html.replace(/\x00CODEBLOCK(\d+)\x00/g, (match, index) => { html = html.replace(/\x00CODEBLOCK(\d+)\x00/g, (match, index) => {
return codeBlocks[parseInt(index)]; return codeBlocks[parseInt(index)];
}); });
html = html.replace(/\x00INLINECODE(\d+)\x00/g, (match, index) => { html = html.replace(/\x00INLINECODE(\d+)\x00/g, (match, index) => {
return inlineCode[parseInt(index)]; return inlineCode[parseInt(index)];
}); });
// Line breaks (after code blocks are restored) // Line breaks (after code blocks are restored)
html = html.replace(/\n\n/g, '</p><p>'); html = html.replace(/\n\n/g, '</p><p>');
html = html.replace(/\n/g, '<br/>'); html = html.replace(/\n/g, '<br/>');
// Wrap in paragraph tags if not already wrapped // Wrap in paragraph tags if not already wrapped
if (!html.startsWith('<')) { if (!html.startsWith('<')) {
html = '<p>' + html + '</p>'; html = '<p>' + html + '</p>';
} }
// Clean up the final HTML // Clean up the final HTML
// Remove <br/> tags immediately before block elements // Remove <br/> tags immediately before block elements
html = html.replace(/<br\/>\s*<pre>/g, '<pre>'); html = html.replace(/<br\/>\s*<pre>/g, '<pre>');
html = html.replace(/<br\/>\s*<ul>/g, '<ul>'); html = html.replace(/<br\/>\s*<ul>/g, '<ul>');
html = html.replace(/<br\/>\s*<h[1-6]>/g, '<h$1>'); html = html.replace(/<br\/>\s*<h[1-6]>/g, '<h$1>');
// Remove empty paragraphs // Remove empty paragraphs
html = html.replace(/<p>\s*<\/p>/g, ''); html = html.replace(/<p>\s*<\/p>/g, '');
html = html.replace(/<p>\s*<br\/>\s*<\/p>/g, ''); html = html.replace(/<p>\s*<br\/>\s*<\/p>/g, '');
// Remove excessive line breaks // Remove excessive line breaks
html = html.replace(/(<br\/>){3,}/g, '<br/><br/>'); // Max 2 consecutive line breaks html = html.replace(/(<br\/>){3,}/g, '<br/><br/>'); // Max 2 consecutive line breaks
html = html.replace(/(<\/p>)\s*(<p>)/g, '$1$2'); // Remove whitespace between paragraphs html = html.replace(/(<\/p>)\s*(<p>)/g, '$1$2'); // Remove whitespace between paragraphs
// Remove leading/trailing whitespace // Remove leading/trailing whitespace
html = html.trim(); html = html.trim();
return html; return html;
} }

View File

@@ -18,18 +18,18 @@ PanelWindow {
property var parentModal: null property var parentModal: null
property real menuPositionX: 0 property real menuPositionX: 0
property real menuPositionY: 0 property real menuPositionY: 0
readonly property real shadowBuffer: 5 readonly property real shadowBuffer: 5
screen: parentModal?.effectiveScreen screen: parentModal?.effectiveScreen
function show(x, y, app, fromKeyboard) { function show(x, y, app, fromKeyboard) {
fromKeyboard = fromKeyboard || false; fromKeyboard = fromKeyboard || false;
menuContent.currentApp = app; menuContent.currentApp = app;
let screenX = x; let screenX = x;
let screenY = y; let screenY = y;
if (parentModal) { if (parentModal) {
if (fromKeyboard) { if (fromKeyboard) {
screenX = x + parentModal.alignedX; screenX = x + parentModal.alignedX;
@@ -39,14 +39,14 @@ PanelWindow {
screenY = y + (parentModal.alignedY - shadowBuffer); screenY = y + (parentModal.alignedY - shadowBuffer);
} }
} }
menuPositionX = screenX; menuPositionX = screenX;
menuPositionY = screenY; menuPositionY = screenY;
menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1; menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1;
menuContent.keyboardNavigation = true; menuContent.keyboardNavigation = true;
visible = true; visible = true;
if (parentHandler) { if (parentHandler) {
parentHandler.enabled = false; parentHandler.enabled = false;
} }

View File

@@ -143,7 +143,7 @@ Item {
implicitWidth: Math.max(180, menuColumn.implicitWidth + Theme.spacingS * 2) implicitWidth: Math.max(180, menuColumn.implicitWidth + Theme.spacingS * 2)
implicitHeight: menuColumn.implicitHeight + Theme.spacingS * 2 implicitHeight: menuColumn.implicitHeight + Theme.spacingS * 2
width: implicitWidth width: implicitWidth
height: implicitHeight height: implicitHeight

View File

@@ -14,20 +14,20 @@ Popup {
function show(x, y, app, fromKeyboard) { function show(x, y, app, fromKeyboard) {
fromKeyboard = fromKeyboard || false; fromKeyboard = fromKeyboard || false;
menuContent.currentApp = app; menuContent.currentApp = app;
root.x = x + 4; root.x = x + 4;
root.y = y + 4; root.y = y + 4;
menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1; menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1;
menuContent.keyboardNavigation = true; menuContent.keyboardNavigation = true;
if (parentHandler) { if (parentHandler) {
parentHandler.enabled = false; parentHandler.enabled = false;
} }
open(); open();
} }
onOpened: { onOpened: {
Qt.callLater(() => { Qt.callLater(() => {
menuContent.keyboardHandler.forceActiveFocus(); menuContent.keyboardHandler.forceActiveFocus();

View File

@@ -117,4 +117,4 @@ Rectangle {
easing.type: Theme.standardEasing easing.type: Theme.standardEasing
} }
} }
} }

View File

@@ -88,4 +88,4 @@ Item {
} }
} }
} }
} }

View File

@@ -286,4 +286,4 @@ Item {
ColorAnimation { duration: Theme.shortDuration } ColorAnimation { duration: Theme.shortDuration }
} }
} }
} }

View File

@@ -237,4 +237,4 @@ Row {
onClicked: root.clearAll() onClicked: root.clearAll()
} }
} }
} }

View File

@@ -49,4 +49,4 @@ Rectangle {
cursorShape: Qt.PointingHandCursor cursorShape: Qt.PointingHandCursor
onPressed: root.pressed() onPressed: root.pressed()
} }
} }

View File

@@ -49,4 +49,4 @@ Row {
} }
} }
} }
} }

View File

@@ -43,4 +43,4 @@ StyledText {
default: return Theme.surfaceText default: return Theme.surfaceText
} }
} }
} }

View File

@@ -161,7 +161,7 @@ Rectangle {
const pins = SettingsData.bluetoothDevicePins || {} const pins = SettingsData.bluetoothDevicePins || {}
const pinnedAddr = pins["preferredDevice"] const pinnedAddr = pins["preferredDevice"]
let devices = [...BluetoothService.adapter.devices.values.filter(dev => dev && (dev.paired || dev.trusted))] let devices = [...BluetoothService.adapter.devices.values.filter(dev => dev && (dev.paired || dev.trusted))]
devices.sort((a, b) => { devices.sort((a, b) => {
// Pinned device first // Pinned device first
@@ -337,13 +337,13 @@ Rectangle {
onClicked: { onClicked: {
const pins = JSON.parse(JSON.stringify(SettingsData.bluetoothDevicePins || {})) const pins = JSON.parse(JSON.stringify(SettingsData.bluetoothDevicePins || {}))
const isCurrentlyPinned = pins["preferredDevice"] === modelData.address const isCurrentlyPinned = pins["preferredDevice"] === modelData.address
if (isCurrentlyPinned) { if (isCurrentlyPinned) {
delete pins["preferredDevice"] delete pins["preferredDevice"]
} else { } else {
pins["preferredDevice"] = modelData.address pins["preferredDevice"] = modelData.address
} }
SettingsData.set("bluetoothDevicePins", pins) SettingsData.set("bluetoothDevicePins", pins)
} }
} }
@@ -642,4 +642,4 @@ Rectangle {
} }
} }
} }
} }

View File

@@ -163,4 +163,4 @@ Rectangle {
} }
} }
} }
} }

View File

@@ -45,4 +45,4 @@ CompoundPill {
onToggled: { onToggled: {
expandClicked() expandClicked()
} }
} }

View File

@@ -30,4 +30,4 @@ CompoundPill {
colorPickerModal.show() colorPickerModal.show()
} }
} }
} }

View File

@@ -67,4 +67,4 @@ Rectangle {
onSliderValueChanged: root.sliderValueChanged(newValue / 100.0) onSliderValueChanged: root.sliderValueChanged(newValue / 100.0)
} }
} }
} }

View File

@@ -167,4 +167,4 @@ Rectangle {
if (ev.key === Qt.Key_Space || ev.key === Qt.Key_Return) { root.toggled(); ev.accepted = true } if (ev.key === Qt.Key_Space || ev.key === Qt.Key_Return) { root.toggled(); ev.accepted = true }
else if (ev.key === Qt.Key_Right) { root.expandClicked(); ev.accepted = true } else if (ev.key === Qt.Key_Right) { root.expandClicked(); ev.accepted = true }
} }
} }

View File

@@ -26,4 +26,4 @@ Rectangle {
} }
} }

View File

@@ -75,4 +75,4 @@ CompoundPill {
onToggled: { onToggled: {
expandClicked() expandClicked()
} }
} }

View File

@@ -97,4 +97,4 @@ Rectangle {
easing.type: Theme.standardEasing easing.type: Theme.standardEasing
} }
} }
} }

View File

@@ -77,4 +77,4 @@ Rectangle {
easing.type: Theme.standardEasing easing.type: Theme.standardEasing
} }
} }
} }

View File

@@ -118,4 +118,4 @@ Rectangle {
easing.type: Theme.standardEasing easing.type: Theme.standardEasing
} }
} }
} }

View File

@@ -42,4 +42,4 @@ function calculateRowsAndWidgets(controlCenterColumn, expandedSection, expandedW
} }
return { rows: rows, expandedRowIndex: expandedRow } return { rows: rows, expandedRowIndex: expandedRow }
} }

View File

@@ -22,4 +22,4 @@ function toggleSection(root, section) {
} else { } else {
root.expandedSection = section root.expandedSection = section
} }
} }

View File

@@ -87,4 +87,4 @@ function resetToDefault() {
function clearAll() { function clearAll() {
SettingsData.set("controlCenterWidgets", []) SettingsData.set("controlCenterWidgets", [])
} }

View File

@@ -54,4 +54,4 @@ QtObject {
axisOrientationChanged() axisOrientationChanged()
changed() changed()
} }
} }

View File

@@ -22,4 +22,4 @@ BasePill {
} }
} }
} }
} }

View File

@@ -60,4 +60,4 @@ BasePill {
} }
} }
} }
} }

View File

@@ -21,4 +21,4 @@ BasePill {
} }
} }
} }
} }

View File

@@ -444,4 +444,4 @@ Rectangle {
id: systemClock id: systemClock
precision: SystemClock.Hours precision: SystemClock.Hours
} }
} }

View File

@@ -19,4 +19,4 @@ Rectangle {
anchors.fill: parent anchors.fill: parent
anchors.margins: card.pad anchors.margins: card.pad
} }
} }

View File

@@ -110,4 +110,4 @@ Card {
id: systemClock id: systemClock
precision: SettingsData.showSeconds ? SystemClock.Seconds : SystemClock.Minutes precision: SettingsData.showSeconds ? SystemClock.Seconds : SystemClock.Minutes
} }
} }

View File

@@ -215,4 +215,4 @@ Card {
onClicked: root.clicked() onClicked: root.clicked()
visible: activePlayer visible: activePlayer
} }
} }

View File

@@ -175,4 +175,4 @@ Card {
} }
} }
} }
} }

View File

@@ -86,4 +86,4 @@ Card {
cursorShape: Qt.PointingHandCursor cursorShape: Qt.PointingHandCursor
onClicked: root.clicked() onClicked: root.clicked()
} }
} }

View File

@@ -73,4 +73,4 @@ Item {
onClicked: root.switchToMediaTab() onClicked: root.switchToMediaTab()
} }
} }
} }

View File

@@ -1093,7 +1093,7 @@ Item {
command: { command: {
var paths = [ var paths = [
"/usr/share/wayland-sessions", "/usr/share/wayland-sessions",
"/usr/share/xsessions", "/usr/share/xsessions",
"/usr/local/share/wayland-sessions", "/usr/local/share/wayland-sessions",
"/usr/local/share/xsessions" "/usr/local/share/xsessions"
] ]
@@ -1110,7 +1110,7 @@ Item {
} }
}) })
} }
// 1. Explicit system/user paths // 1. Explicit system/user paths
var explicitFind = "find " + paths.join(" ") + " -maxdepth 1 -name '*.desktop' -type f -follow 2>/dev/null" var explicitFind = "find " + paths.join(" ") + " -maxdepth 1 -name '*.desktop' -type f -follow 2>/dev/null"
// 2. Scan all /home user directories for local session files // 2. Scan all /home user directories for local session files
var homeScan = "find /home -maxdepth 5 \\( -path '*/wayland-sessions/*.desktop' -o -path '*/xsessions/*.desktop' \\) -type f -follow 2>/dev/null" var homeScan = "find /home -maxdepth 5 \\( -path '*/wayland-sessions/*.desktop' -o -path '*/xsessions/*.desktop' \\) -type f -follow 2>/dev/null"

View File

@@ -45,7 +45,7 @@ setfacl -m u:greeter:x ~ ~/.config ~/.local ~/.cache ~/.local/state
# Set group ownership on config directories # Set group ownership on config directories
sudo chgrp -R greeter ~/.config/DankMaterialShell sudo chgrp -R greeter ~/.config/DankMaterialShell
sudo chgrp -R greeter ~/.local/state/DankMaterialShell sudo chgrp -R greeter ~/.local/state/DankMaterialShell
sudo chgrp -R greeter ~/.cache/DankMaterialShell sudo chgrp -R greeter ~/.cache/DankMaterialShell
sudo chmod -R g+rX ~/.config/DankMaterialShell ~/.cache/DankMaterialShell ~/.cache/quickshell sudo chmod -R g+rX ~/.config/DankMaterialShell ~/.cache/DankMaterialShell ~/.cache/quickshell

View File

@@ -20,4 +20,4 @@ gestures {
layout { layout {
background-color "#000000" background-color "#000000"
} }

View File

@@ -5,4 +5,4 @@ export QT_QPA_PLATFORM=wayland
export QT_WAYLAND_DISABLE_WINDOWDECORATION=1 export QT_WAYLAND_DISABLE_WINDOWDECORATION=1
export EGL_PLATFORM=gbm export EGL_PLATFORM=gbm
exec niri -c /etc/greetd/dms-niri.kdl exec niri -c /etc/greetd/dms-niri.kdl

View File

@@ -616,4 +616,4 @@ Column {
autoSaveToSession() autoSaveToSession()
} }
} }
} }

View File

@@ -241,4 +241,4 @@ FocusScope {
} }
return defaultValue return defaultValue
} }
} }

View File

@@ -73,7 +73,7 @@ Singleton {
} }
const profileValue = BatteryService.isPluggedIn const profileValue = BatteryService.isPluggedIn
? SettingsData.acProfileName ? SettingsData.acProfileName
: SettingsData.batteryProfileName; : SettingsData.batteryProfileName;
if (profileValue !== "") { if (profileValue !== "") {

View File

@@ -984,4 +984,4 @@ Singleton {
"bssid": network.bssid "bssid": network.bssid
} }
} }
} }

View File

@@ -429,4 +429,4 @@ Singleton {
id: mkdirProcess id: mkdirProcess
command: ["mkdir", "-p", root.baseDir, root.filesDir] command: ["mkdir", "-p", root.baseDir, root.filesDir]
} }
} }

View File

@@ -47,7 +47,7 @@ Singleton {
if (!node || !node.ready) { if (!node || !node.ready) {
continue continue
} }
if (node.properties && node.properties["media.class"] === "Stream/Input/Video") { if (node.properties && node.properties["media.class"] === "Stream/Input/Video") {
if (node.properties["stream.is-live"] === "true") { if (node.properties["stream.is-live"] === "true") {
return true return true

View File

@@ -1,4 +1,4 @@
layer-rule { layer-rule {
match namespace="dms:blurwallpaper" match namespace="dms:blurwallpaper"
place-within-backdrop true place-within-backdrop true
} }

View File

@@ -15,7 +15,7 @@ layout(std140, binding = 0) uniform buf {
float centerY; // Y coordinate of disc center (0.0 to 1.0) float centerY; // Y coordinate of disc center (0.0 to 1.0)
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth) float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
float aspectRatio; // Width / Height of the screen float aspectRatio; // Width / Height of the screen
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
float imageWidth1; float imageWidth1;
float imageHeight1; float imageHeight1;
@@ -97,30 +97,30 @@ void main() {
// This makes distances circular instead of elliptical // This makes distances circular instead of elliptical
vec2 adjustedUV = vec2(uv.x * ubuf.aspectRatio, uv.y); vec2 adjustedUV = vec2(uv.x * ubuf.aspectRatio, uv.y);
vec2 adjustedCenter = vec2(ubuf.centerX * ubuf.aspectRatio, ubuf.centerY); vec2 adjustedCenter = vec2(ubuf.centerX * ubuf.aspectRatio, ubuf.centerY);
// Calculate distance in aspect-corrected space // Calculate distance in aspect-corrected space
float dist = distance(adjustedUV, adjustedCenter); float dist = distance(adjustedUV, adjustedCenter);
// Calculate the maximum possible distance (corner to corner) // Calculate the maximum possible distance (corner to corner)
// This ensures the disc can cover the entire screen // This ensures the disc can cover the entire screen
float maxDistX = max(ubuf.centerX * ubuf.aspectRatio, float maxDistX = max(ubuf.centerX * ubuf.aspectRatio,
(1.0 - ubuf.centerX) * ubuf.aspectRatio); (1.0 - ubuf.centerX) * ubuf.aspectRatio);
float maxDistY = max(ubuf.centerY, 1.0 - ubuf.centerY); float maxDistY = max(ubuf.centerY, 1.0 - ubuf.centerY);
float maxDist = length(vec2(maxDistX, maxDistY)); float maxDist = length(vec2(maxDistX, maxDistY));
// Scale progress to cover the maximum distance // Scale progress to cover the maximum distance
// Add extra range for smoothness to ensure complete coverage // Add extra range for smoothness to ensure complete coverage
// Adjust smoothness for aspect ratio to maintain consistent visual appearance // Adjust smoothness for aspect ratio to maintain consistent visual appearance
float adjustedSmoothness = mappedSmoothness * max(1.0, ubuf.aspectRatio); float adjustedSmoothness = mappedSmoothness * max(1.0, ubuf.aspectRatio);
float radius = ubuf.progress * (maxDist + adjustedSmoothness); float radius = ubuf.progress * (maxDist + adjustedSmoothness);
// Use smoothstep for a smooth edge transition // Use smoothstep for a smooth edge transition
float factor = smoothstep(radius - adjustedSmoothness, radius + adjustedSmoothness, dist); float factor = smoothstep(radius - adjustedSmoothness, radius + adjustedSmoothness, dist);
// Mix the textures (factor = 0 inside disc, 1 outside) // Mix the textures (factor = 0 inside disc, 1 outside)
fragColor = mix(color2, color1, factor); fragColor = mix(color2, color1, factor);
if (ubuf.progress <= 0.0) fragColor = color1; if (ubuf.progress <= 0.0) fragColor = color1;
fragColor *= ubuf.qt_Opacity; fragColor *= ubuf.qt_Opacity;
} }

View File

@@ -11,7 +11,7 @@ layout(std140, binding = 0) uniform buf {
mat4 qt_Matrix; mat4 qt_Matrix;
float qt_Opacity; float qt_Opacity;
float progress; float progress;
// Fill mode parameters // Fill mode parameters
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
float imageWidth1; // Width of source1 image float imageWidth1; // Width of source1 image
@@ -81,11 +81,11 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
void main() { void main() {
vec2 uv = qt_TexCoord0; vec2 uv = qt_TexCoord0;
// Sample textures with fill mode // Sample textures with fill mode
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1); vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2); vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
// Mix the two textures based on progress value // Mix the two textures based on progress value
fragColor = mix(color1, color2, ubuf.progress) * ubuf.qt_Opacity; fragColor = mix(color1, color2, ubuf.progress) * ubuf.qt_Opacity;
} }

View File

@@ -104,7 +104,7 @@ void main() {
float radius = p * maxDist; float radius = p * maxDist;
// squash factor for the "eye" slit // squash factor for the "eye" slit
float squash = mix(0.2, 1.0, p); float squash = mix(0.2, 1.0, p);
q.y /= squash; q.y /= squash;
float dist = length(q); float dist = length(q);

View File

@@ -15,7 +15,7 @@ layout(std140, binding = 0) uniform buf {
float angle; // Angle of stripes in degrees (default 30.0) float angle; // Angle of stripes in degrees (default 30.0)
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth) float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
float aspectRatio; // Width / Height of the screen float aspectRatio; // Width / Height of the screen
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
float imageWidth1; float imageWidth1;
float imageHeight1; float imageHeight1;
@@ -84,58 +84,58 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
void main() { void main() {
vec2 uv = qt_TexCoord0; vec2 uv = qt_TexCoord0;
// Sample textures with fill mode // Sample textures with fill mode
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1); vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2); vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
// Map smoothness from 0.0-1.0 to 0.001-0.3 range // Map smoothness from 0.0-1.0 to 0.001-0.3 range
// Using a non-linear mapping for better control at low values // Using a non-linear mapping for better control at low values
float mappedSmoothness = mix(0.001, 0.3, ubuf.smoothness * ubuf.smoothness); float mappedSmoothness = mix(0.001, 0.3, ubuf.smoothness * ubuf.smoothness);
// Use values directly without forcing defaults // Use values directly without forcing defaults
float stripes = (ubuf.stripeCount > 0.0) ? ubuf.stripeCount : 12.0; float stripes = (ubuf.stripeCount > 0.0) ? ubuf.stripeCount : 12.0;
float angleRad = radians(ubuf.angle); float angleRad = radians(ubuf.angle);
float edgeSmooth = mappedSmoothness; float edgeSmooth = mappedSmoothness;
// Create a coordinate system for stripes based on angle // Create a coordinate system for stripes based on angle
// At 0°: vertical stripes (divide by x) // At 0°: vertical stripes (divide by x)
// At 45°: diagonal stripes // At 45°: diagonal stripes
// At 90°: horizontal stripes (divide by y) // At 90°: horizontal stripes (divide by y)
// Transform coordinates based on angle // Transform coordinates based on angle
float cosA = cos(angleRad); float cosA = cos(angleRad);
float sinA = sin(angleRad); float sinA = sin(angleRad);
// Project the UV position onto the stripe direction // Project the UV position onto the stripe direction
// This gives us the position along the stripe direction // This gives us the position along the stripe direction
float stripeCoord = uv.x * cosA + uv.y * sinA; float stripeCoord = uv.x * cosA + uv.y * sinA;
// Perpendicular coordinate (for edge movement) // Perpendicular coordinate (for edge movement)
float perpCoord = -uv.x * sinA + uv.y * cosA; float perpCoord = -uv.x * sinA + uv.y * cosA;
// Calculate the range of perpCoord based on angle // Calculate the range of perpCoord based on angle
// This determines how far edges need to travel to fully cover the screen // This determines how far edges need to travel to fully cover the screen
float minPerp = min(min(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA), float minPerp = min(min(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA),
min(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA)); min(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA));
float maxPerp = max(max(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA), float maxPerp = max(max(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA),
max(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA)); max(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA));
// Determine which stripe we're in // Determine which stripe we're in
float stripePos = stripeCoord * stripes; float stripePos = stripeCoord * stripes;
int stripeIndex = int(floor(stripePos)); int stripeIndex = int(floor(stripePos));
// Determine if this is an odd or even stripe // Determine if this is an odd or even stripe
bool isOddStripe = mod(float(stripeIndex), 2.0) != 0.0; bool isOddStripe = mod(float(stripeIndex), 2.0) != 0.0;
// Calculate the progress for this specific stripe with wave delay // Calculate the progress for this specific stripe with wave delay
// Use absolute stripe position for consistent delay across all stripes // Use absolute stripe position for consistent delay across all stripes
float normalizedStripePos = clamp(stripePos / stripes, 0.0, 1.0); float normalizedStripePos = clamp(stripePos / stripes, 0.0, 1.0);
// Increased delay and better distribution // Increased delay and better distribution
float maxDelay = 0.1; float maxDelay = 0.1;
float stripeDelay = normalizedStripePos * maxDelay; float stripeDelay = normalizedStripePos * maxDelay;
// Better progress mapping that uses the full 0.0-1.0 range // Better progress mapping that uses the full 0.0-1.0 range
// Map progress so that: // Map progress so that:
// - First stripe starts at progress = 0.0 // - First stripe starts at progress = 0.0
@@ -151,13 +151,13 @@ void main() {
float activeEnd = stripeDelay + (1.0 - maxDelay); float activeEnd = stripeDelay + (1.0 - maxDelay);
stripeProgress = (ubuf.progress - activeStart) / (activeEnd - activeStart); stripeProgress = (ubuf.progress - activeStart) / (activeEnd - activeStart);
} }
// Use gentler easing curve // Use gentler easing curve
stripeProgress = stripeProgress * stripeProgress * (3.0 - 2.0 * stripeProgress); // Smootherstep instead of smoothstep stripeProgress = stripeProgress * stripeProgress * (3.0 - 2.0 * stripeProgress); // Smootherstep instead of smoothstep
// Use the perpendicular coordinate for edge comparison // Use the perpendicular coordinate for edge comparison
float yPos = perpCoord; float yPos = perpCoord;
// Calculate edge position for this stripe // Calculate edge position for this stripe
// Use the actual perpendicular coordinate range for this angle // Use the actual perpendicular coordinate range for this angle
float perpRange = maxPerp - minPerp; float perpRange = maxPerp - minPerp;
@@ -170,7 +170,7 @@ void main() {
// Even stripes: edge moves from min to max // Even stripes: edge moves from min to max
edgePosition = minPerp - margin + stripeProgress * (perpRange + margin * 2.0); edgePosition = minPerp - margin + stripeProgress * (perpRange + margin * 2.0);
} }
// Determine which wallpaper to show based on rotated position // Determine which wallpaper to show based on rotated position
float mask; float mask;
if (isOddStripe) { if (isOddStripe) {
@@ -180,10 +180,10 @@ void main() {
// Even stripes reveal new wallpaper from top // Even stripes reveal new wallpaper from top
mask = 1.0 - smoothstep(edgePosition - edgeSmooth, edgePosition + edgeSmooth, yPos); mask = 1.0 - smoothstep(edgePosition - edgeSmooth, edgePosition + edgeSmooth, yPos);
} }
// Mix the wallpapers // Mix the wallpapers
fragColor = mix(color1, color2, mask); fragColor = mix(color1, color2, mask);
// Force exact values at start and end to prevent any bleed-through // Force exact values at start and end to prevent any bleed-through
if (ubuf.progress <= 0.0) { if (ubuf.progress <= 0.0) {
fragColor = color1; // Only show old wallpaper at start fragColor = color1; // Only show old wallpaper at start
@@ -195,11 +195,11 @@ void main() {
float shadowStrength = 1.0 - smoothstep(0.0, edgeSmooth * 2.5, edgeDist); float shadowStrength = 1.0 - smoothstep(0.0, edgeSmooth * 2.5, edgeDist);
shadowStrength *= 0.2 * (1.0 - abs(stripeProgress - 0.5) * 2.0); shadowStrength *= 0.2 * (1.0 - abs(stripeProgress - 0.5) * 2.0);
fragColor.rgb *= (1.0 - shadowStrength); fragColor.rgb *= (1.0 - shadowStrength);
// Add slight vignette during transition for dramatic effect // Add slight vignette during transition for dramatic effect
float vignette = 1.0 - ubuf.progress * 0.1 * (1.0 - abs(stripeProgress - 0.5) * 2.0); float vignette = 1.0 - ubuf.progress * 0.1 * (1.0 - abs(stripeProgress - 0.5) * 2.0);
fragColor.rgb *= vignette; fragColor.rgb *= vignette;
} }
fragColor *= ubuf.qt_Opacity; fragColor *= ubuf.qt_Opacity;
} }

View File

@@ -13,7 +13,7 @@ layout(std140, binding = 0) uniform buf {
float progress; // Transition progress (0.0 to 1.0) float progress; // Transition progress (0.0 to 1.0)
float direction; // 0=left, 1=right, 2=up, 3=down float direction; // 0=left, 1=right, 2=up, 3=down
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth) float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
float imageWidth1; float imageWidth1;
float imageHeight1; float imageHeight1;
@@ -82,22 +82,22 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
void main() { void main() {
vec2 uv = qt_TexCoord0; vec2 uv = qt_TexCoord0;
// Sample textures with fill mode // Sample textures with fill mode
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1); vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2); vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
// Map smoothness from 0.0-1.0 to 0.001-0.5 range // Map smoothness from 0.0-1.0 to 0.001-0.5 range
// Using a non-linear mapping for better control // Using a non-linear mapping for better control
float mappedSmoothness = mix(0.001, 0.5, ubuf.smoothness * ubuf.smoothness); float mappedSmoothness = mix(0.001, 0.5, ubuf.smoothness * ubuf.smoothness);
float edge = 0.0; float edge = 0.0;
float factor = 0.0; float factor = 0.0;
// Extend the progress range to account for smoothness // Extend the progress range to account for smoothness
// This ensures the transition completes fully at the edges // This ensures the transition completes fully at the edges
float extendedProgress = ubuf.progress * (1.0 + 2.0 * mappedSmoothness) - mappedSmoothness; float extendedProgress = ubuf.progress * (1.0 + 2.0 * mappedSmoothness) - mappedSmoothness;
// Calculate edge position based on direction // Calculate edge position based on direction
// As progress goes from 0 to 1, we reveal source2 (new wallpaper) // As progress goes from 0 to 1, we reveal source2 (new wallpaper)
if (ubuf.direction < 0.5) { if (ubuf.direction < 0.5) {
@@ -105,7 +105,7 @@ void main() {
edge = 1.0 - extendedProgress; edge = 1.0 - extendedProgress;
factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.x); factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.x);
fragColor = mix(color1, color2, factor); fragColor = mix(color1, color2, factor);
} }
else if (ubuf.direction < 1.5) { else if (ubuf.direction < 1.5) {
// Wipe from left to right (new image enters from left) // Wipe from left to right (new image enters from left)
edge = extendedProgress; edge = extendedProgress;
@@ -124,6 +124,6 @@ void main() {
factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.y); factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.y);
fragColor = mix(color2, color1, factor); fragColor = mix(color2, color1, factor);
} }
fragColor *= ubuf.qt_Opacity; fragColor *= ubuf.qt_Opacity;
} }

View File

@@ -210,4 +210,4 @@ Flow {
} }
} }
} }
} }

View File

@@ -76,4 +76,4 @@ Rectangle {
font.weight: Font.Bold font.weight: Font.Bold
color: Theme.surfaceVariantText color: Theme.surfaceVariantText
} }
} }

Some files were not shown because too many files have changed in this diff Show More